lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
mit
084441e3789cac0c9d462ed88fd2d4e628419907
0
rnicoll/learn_syllabus_plus_sync,rnicoll/learn_syllabus_plus_sync
package uk.ac.ed.learn9.bb.timetabling.service; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service for monitoring which courses in Learn are synchronised from EUGEX. */ @Service public class EugexService { @Autowired private DataSource stagingDataSource; @Autowired private DataSource eugexDataSource; /** * Synchronises details of courses in EUGEX marked as active in the VLE, * to the staging database. * @throws SQLException */ public void synchroniseVleActiveCourses() throws SQLException { final Connection stagingDatabase = this.getStagingDataSource().getConnection(); try { final Connection eugexDatabase = this.getEugexDataSource().getConnection(); try { this.synchroniseVleActiveCourses(stagingDatabase, eugexDatabase); } finally { eugexDatabase.close(); } } finally { stagingDatabase.close(); } } /** * Synchronises details of courses that are copied from EUGEX to Learn, * from the EUGEX database into the staging database. * * @param stagingDatabase a connection to the staging database. * @param eugexDatabase a connection to the EUGEX database. * @throws SQLException */ private void synchroniseVleActiveCourses(final Connection stagingDatabase, final Connection eugexDatabase) throws SQLException { stagingDatabase.setAutoCommit(false); try { final PreparedStatement sourceStatement = eugexDatabase.prepareStatement( "SELECT VCL1_COURSE_CODE course_code, VCL2_COURSE_OCCURENCE occurrence_code, " + "VCL3_COURSE_YEAR_CODE academic_year, VCL4_COURSE_PERIOD period_code, VCL13_WEBCT_ACTIVE webct_active " + "FROM EUGEX_VLE_COURSES_VW " + "ORDER BY VCL3_COURSE_YEAR_CODE, VCL1_COURSE_CODE, VCL2_COURSE_OCCURENCE, VCL4_COURSE_PERIOD" ); try { final PreparedStatement destinationStatement = stagingDatabase.prepareStatement( "SELECT tt_module_id, webct_active, cache_course_code course_code, " + "cache_occurrence_code occurrence_code, tt_academic_year academic_year, " + "cache_semester_code period_code " + "FROM module " + "WHERE cache_course_code IS NOT NULL " + "AND tt_academic_year IS NOT NULL " + "ORDER BY tt_academic_year, cache_course_code, cache_occurrence_code, cache_semester_code", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE ); try { final ResultSet sourceRs = sourceStatement.executeQuery(); try { final ResultSet destinationRs = destinationStatement.executeQuery(); try { this.doSynchroniseVleActiveCourses(destinationRs, sourceRs); } finally { destinationRs.close(); } } finally { sourceRs.close(); } } finally { destinationStatement.close(); } } finally { sourceStatement.close(); } stagingDatabase.commit(); } finally { // Roll back any uncommitted changes, then set autocommit on again. stagingDatabase.rollback(); stagingDatabase.setAutoCommit(true); } } /** * Does the actual copying of the WEBCT_ACTIVE field from EUGEX into the local * database. * @param destinationRs * @param sourceRs * @throws SQLException */ private void doSynchroniseVleActiveCourses(final ResultSet destinationRs, final ResultSet sourceRs) throws SQLException { CourseKey sourceCourse; if (sourceRs.next()) { sourceCourse = new CourseKey(sourceRs.getString("academic_year"), sourceRs.getString("course_code"), sourceRs.getString("occurrence_code"), sourceRs.getString("period_code")); } else { sourceCourse = null; } while (destinationRs.next()) { if (null == sourceCourse) { destinationRs.updateNull(2); destinationRs.updateRow(); continue; } CourseKey destinationCourse = new CourseKey(destinationRs.getString("academic_year"), destinationRs.getString("course_code"), destinationRs.getString("occurrence_code"), destinationRs.getString("period_code")); int comparison = destinationCourse.compareTo(sourceCourse); // If we're too far ahead, continue through the source we find a match // or run out of data. while (comparison > 0) { if (sourceRs.next()) { sourceCourse = new CourseKey(sourceRs.getString("academic_year"), sourceRs.getString("course_code"), sourceRs.getString("occurrence_code"), sourceRs.getString("period_code")); comparison = destinationCourse.compareTo(sourceCourse); } else { // End of data destinationRs.updateNull(2); destinationRs.updateRow(); sourceCourse = null; break; } } if (comparison < 0) { // Not yet at a match, jump to the next row destinationRs.updateNull(2); destinationRs.updateRow(); continue; } else if (comparison == 0) { destinationRs.updateString(2, sourceRs.getString("webct_active")); destinationRs.updateRow(); } } } /** * Gets the data source for the EUGEX database. * * @return the data source for the EUGEX database. */ public DataSource getEugexDataSource() { return eugexDataSource; } /** * Gets the data source for the staging database. * * @return the staging database data source. */ public DataSource getStagingDataSource() { return stagingDataSource; } /** * Sets the EUGEX database data source. * * @param eugexDataSource the EUGEX database data source to set. */ public void setEugexDataSource(DataSource eugexDataSource) { this.eugexDataSource = eugexDataSource; } /** * Sets the staging database data source. * * @param dataSource the staging database data source to set. */ public void setStagingDataSource(DataSource dataSource) { this.stagingDataSource = dataSource; } private static class CourseKey extends Object implements Comparable<CourseKey> { private final String ayrCode; private final String courseCode; private final String occurrenceCode; private final String periodCode; private CourseKey(final String setAyrCode, final String setCourseCode, final String setOccurrenceCode, final String setPeriodCode) { this.ayrCode = setAyrCode; this.courseCode = setCourseCode; this.occurrenceCode = setOccurrenceCode; this.periodCode = setPeriodCode; } @Override public int compareTo(final CourseKey other) { if (this.ayrCode.equals(other.ayrCode)) { if (this.courseCode.equals(other.courseCode)) { if (this.occurrenceCode.equals(other.occurrenceCode)) { if (this.periodCode.equals(other.periodCode)) { return 0; } else { return this.periodCode.compareTo(other.periodCode); } } else { return this.occurrenceCode.compareTo(other.occurrenceCode); } } else { return this.courseCode.compareTo(other.courseCode); } } else { return this.ayrCode.compareTo(other.ayrCode); } } @Override public boolean equals(final Object o) { if (null == o) { return false; } if (!(o instanceof CourseKey)) { return false; } final CourseKey other = (CourseKey)o; return this.compareTo(other) == 0; } @Override public int hashCode() { int hash = 1; hash = hash * 31 + this.ayrCode.hashCode(); hash = hash * 31 + this.courseCode.hashCode(); hash = hash * 31 + this.occurrenceCode.hashCode(); hash = hash * 31 + this.periodCode.hashCode(); return hash; } /** * @return the ayrCode */ public String getAyrCode() { return ayrCode; } /** * @return the courseCode */ public String getCourseCode() { return courseCode; } /** * @return the occurrenceCode */ public String getOccurrenceCode() { return occurrenceCode; } /** * @return the periodCode */ public String getPeriodCode() { return periodCode; } } }
src/main/java/uk/ac/ed/learn9/bb/timetabling/service/EugexService.java
package uk.ac.ed.learn9.bb.timetabling.service; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; /** * Service for monitoring which courses in Learn are synchronised from EUGEX. */ @Service public class EugexService { @Autowired private DataSource stagingDataSource; @Autowired private DataSource eugexDataSource; /** * Synchronises details of courses in EUGEX marked as active in the VLE, * to the staging database. * @throws SQLException */ public void synchroniseVleActiveCourses() throws SQLException { final Connection stagingDatabase = this.getStagingDataSource().getConnection(); try { final Connection eugexDatabase = this.getEugexDataSource().getConnection(); try { this.synchroniseVleActiveCourses(stagingDatabase, eugexDatabase); } finally { eugexDatabase.close(); } } finally { stagingDatabase.close(); } } /** * Synchronises details of courses that are copied from EUGEX to Learn, * from the EUGEX database into the staging database. * * @param stagingDatabase a connection to the staging database. * @param eugexDatabase a connection to the EUGEX database. * @throws SQLException */ private void synchroniseVleActiveCourses(final Connection stagingDatabase, final Connection eugexDatabase) throws SQLException { stagingDatabase.setAutoCommit(false); try { final PreparedStatement sourceStatement = eugexDatabase.prepareStatement( "SELECT VCL1_COURSE_CODE course_code, VCL2_COURSE_OCCURENCE occurrence_code, " + "VCL3_COURSE_YEAR_CODE academic_year, VCL4_COURSE_PERIOD period_code, VCL13_WEBCT_ACTIVE webct_active " + "FROM EUGEX_VLE_COURSES_VW " + "ORDER BY VCL3_COURSE_YEAR_CODE, VCL1_COURSE_CODE, VCL2_COURSE_OCCURENCE, VCL4_COURSE_PERIOD" ); try { final PreparedStatement destinationStatement = stagingDatabase.prepareStatement( "SELECT tt_module_id, webct_active, occurrence_course_code course_code, " + "cache_occurrence_code occurrence_code, tt_academic_year academic_year, " + "cache_semester_code period_code " + "FROM module " + "WHERE occurrence_course_code IS NOT NULL " + "AND tt_academic_year IS NOT NULL " + "ORDER BY tt_academic_year, occurrence_course_code, cache_occurrence_code, cache_semester_code", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE ); try { final ResultSet sourceRs = sourceStatement.executeQuery(); try { final ResultSet destinationRs = destinationStatement.executeQuery(); try { this.doSynchroniseVleActiveCourses(destinationRs, sourceRs); } finally { destinationRs.close(); } } finally { sourceRs.close(); } } finally { destinationStatement.close(); } } finally { sourceStatement.close(); } stagingDatabase.commit(); } finally { // Roll back any uncommitted changes, then set autocommit on again. stagingDatabase.rollback(); stagingDatabase.setAutoCommit(true); } } /** * Does the actual copying of the WEBCT_ACTIVE field from EUGEX into the local * database. * @param destinationRs * @param sourceRs * @throws SQLException */ private void doSynchroniseVleActiveCourses(final ResultSet destinationRs, final ResultSet sourceRs) throws SQLException { CourseKey sourceCourse; if (sourceRs.next()) { sourceCourse = new CourseKey(sourceRs.getString("academic_year"), sourceRs.getString("course_code"), sourceRs.getString("occurrence_code"), sourceRs.getString("period_code")); } else { sourceCourse = null; } while (destinationRs.next()) { if (null == sourceCourse) { destinationRs.updateNull(2); destinationRs.updateRow(); continue; } CourseKey destinationCourse = new CourseKey(destinationRs.getString("academic_year"), destinationRs.getString("course_code"), destinationRs.getString("occurrence_code"), destinationRs.getString("period_code")); int comparison = destinationCourse.compareTo(sourceCourse); // If we're too far ahead, continue through the source we find a match // or run out of data. while (comparison > 0) { if (sourceRs.next()) { sourceCourse = new CourseKey(sourceRs.getString("academic_year"), sourceRs.getString("course_code"), sourceRs.getString("occurrence_code"), sourceRs.getString("period_code")); comparison = destinationCourse.compareTo(sourceCourse); } else { // End of data destinationRs.updateNull(2); destinationRs.updateRow(); sourceCourse = null; break; } } if (comparison < 0) { // Not yet at a match, jump to the next row destinationRs.updateNull(2); destinationRs.updateRow(); continue; } else if (comparison == 0) { destinationRs.updateString(2, sourceRs.getString("webct_active")); destinationRs.updateRow(); } } } /** * Gets the data source for the EUGEX database. * * @return the data source for the EUGEX database. */ public DataSource getEugexDataSource() { return eugexDataSource; } /** * Gets the data source for the staging database. * * @return the staging database data source. */ public DataSource getStagingDataSource() { return stagingDataSource; } /** * Sets the EUGEX database data source. * * @param eugexDataSource the EUGEX database data source to set. */ public void setEugexDataSource(DataSource eugexDataSource) { this.eugexDataSource = eugexDataSource; } /** * Sets the staging database data source. * * @param dataSource the staging database data source to set. */ public void setStagingDataSource(DataSource dataSource) { this.stagingDataSource = dataSource; } private static class CourseKey extends Object implements Comparable<CourseKey> { private final String ayrCode; private final String courseCode; private final String occurrenceCode; private final String periodCode; private CourseKey(final String setAyrCode, final String setCourseCode, final String setOccurrenceCode, final String setPeriodCode) { this.ayrCode = setAyrCode; this.courseCode = setCourseCode; this.occurrenceCode = setOccurrenceCode; this.periodCode = setPeriodCode; } @Override public int compareTo(final CourseKey other) { if (this.ayrCode.equals(other.ayrCode)) { if (this.courseCode.equals(other.courseCode)) { if (this.occurrenceCode.equals(other.occurrenceCode)) { if (this.periodCode.equals(other.periodCode)) { return 0; } else { return this.periodCode.compareTo(other.periodCode); } } else { return this.occurrenceCode.compareTo(other.occurrenceCode); } } else { return this.courseCode.compareTo(other.courseCode); } } else { return this.ayrCode.compareTo(other.ayrCode); } } @Override public boolean equals(final Object o) { if (null == o) { return false; } if (!(o instanceof CourseKey)) { return false; } final CourseKey other = (CourseKey)o; return this.compareTo(other) == 0; } @Override public int hashCode() { int hash = 1; hash = hash * 31 + this.ayrCode.hashCode(); hash = hash * 31 + this.courseCode.hashCode(); hash = hash * 31 + this.occurrenceCode.hashCode(); hash = hash * 31 + this.periodCode.hashCode(); return hash; } /** * @return the ayrCode */ public String getAyrCode() { return ayrCode; } /** * @return the courseCode */ public String getCourseCode() { return courseCode; } /** * @return the occurrenceCode */ public String getOccurrenceCode() { return occurrenceCode; } /** * @return the periodCode */ public String getPeriodCode() { return periodCode; } } }
Corrected field names in EugexService
src/main/java/uk/ac/ed/learn9/bb/timetabling/service/EugexService.java
Corrected field names in EugexService
<ide><path>rc/main/java/uk/ac/ed/learn9/bb/timetabling/service/EugexService.java <ide> ); <ide> try { <ide> final PreparedStatement destinationStatement = stagingDatabase.prepareStatement( <del> "SELECT tt_module_id, webct_active, occurrence_course_code course_code, " <add> "SELECT tt_module_id, webct_active, cache_course_code course_code, " <ide> + "cache_occurrence_code occurrence_code, tt_academic_year academic_year, " <ide> + "cache_semester_code period_code " <ide> + "FROM module " <del> + "WHERE occurrence_course_code IS NOT NULL " <add> + "WHERE cache_course_code IS NOT NULL " <ide> + "AND tt_academic_year IS NOT NULL " <del> + "ORDER BY tt_academic_year, occurrence_course_code, cache_occurrence_code, cache_semester_code", <add> + "ORDER BY tt_academic_year, cache_course_code, cache_occurrence_code, cache_semester_code", <ide> ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE <ide> ); <ide> try {
Java
mpl-2.0
fed84fc82a324a8db999badf91dd76ebab6278f8
0
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
/************************************************************************* * * $RCSfile: Process.java,v $ * * $Revision: 1.3 $ * * last change: $Author: obo $ $Date: 2004-09-08 14:12:54 $ * * The Contents of this file are made available subject to the terms of * either of the following licenses * * - GNU Lesser General Public License Version 2.1 * - Sun Industry Standards Source License Version 1.1 * * Sun Microsystems Inc., October, 2000 * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2000 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * * * Sun Industry Standards Source License Version 1.1 * ================================================= * The contents of this file are subject to the Sun Industry Standards * Source License Version 1.1 (the "License"); You may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.openoffice.org/license.html. * * Software provided under this License is provided on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. * See the License for the specific provisions governing your rights and * obligations concerning the Software. * * The Initial Developer of the Original Code is: Sun Microsystems, Inc. * * Copyright: 2000 by Sun Microsystems, Inc. * * All Rights Reserved. * * Contributor(s): _______________________________________ * */ package com.sun.star.wizards.web; import java.io.File; import java.io.IOException; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.Document; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.wizards.common.ConfigSet; import com.sun.star.wizards.common.FileAccess; import com.sun.star.wizards.common.UCB; import com.sun.star.wizards.ui.event.Task; import com.sun.star.wizards.web.data.CGContent; import com.sun.star.wizards.web.data.CGDocument; import com.sun.star.wizards.web.data.CGExporter; import com.sun.star.wizards.web.data.CGLayout; import com.sun.star.wizards.web.data.CGPublish; import com.sun.star.wizards.web.data.CGSettings; import com.sun.star.wizards.web.export.Exporter; /** * @author rpiterman * This class is used to process a CGSession object * and generate a site. </br> * it does the following: <br/> * 1. create a temporary directory.<br/> * 2. export documents to the temporary directory.<br/> * 3. generate the TOC page, includes copying images from the * web wizard work directory and other layout files.<br/> * 4. publish, or copy, from the temporary directory to * different destinations.<br/> * 5. delete the temporary directory.<br/> * <br/> * to follow up the status/errors it uses a TaskListener object, * and an ErrorHandler. <br/> * in practice, the TaskListener is the status dialog, * and the Errorhandler does the interaction with the user, * if something goes wrong.<br/> * Note that this class takes it in count that * the given session object is prepared for it - * all preparations are done in WWD_Events.finishWizard methods. * <br/> * <br/> * * note on error handling: <br/> * on "catch" clauses I tries to decide whether the * exception is fatal or not. For fatal exception an error message * is displayed (or rather: the errorHandler is being called...) * and a false is returned. * In less-fatal errors, the errorHandler "should decide" which means, * the user is given the option to "OK" or to "Cancel" and depending * on that interaction I cary on. */ public class Process implements Runnable, WebWizardConst, ProcessErrors { private static final int TASKS_PER_DOC = 5; private static final int TASKS_PER_XSL = 2; private static final int TASKS_PER_PUBLISH = 2; private static final int TASKS_IN_PREPARE = 1; private static final int TASKS_IN_EXPORT = 2; private static final int TASKS_IN_GENERATE = 2; private static final int TASKS_IN_PUBLISH = 2; private static final int TASKS_IN_FINISHUP = 1; private CGSettings settings; private XMultiServiceFactory xmsf; private ErrorHandler errorHandler; private String tempDir; private FileAccess fileAccess; private UCB ucb; public Task myTask; /** * This is a cache for exporters, so I do not need to * instanciate the same exporter more than once. */ private Map exporters = new Hashtable(3); private boolean result; public Process( CGSettings settings, XMultiServiceFactory xmsf, ErrorHandler er ) throws Exception { this.xmsf = xmsf; this.settings = settings; fileAccess = new FileAccess(xmsf); errorHandler = er; ucb = new UCB(xmsf); int taskSteps = getTaskSteps(); myTask = new Task(TASK,TASK_PREPARE, taskSteps); } /** * @return to how many destinations should the * generated site be published. */ private int countPublish() { int count = 0; ConfigSet publishers = settings.cp_DefaultSession.cp_Publishing; for (int i = 0; i<publishers.getSize(); i++) if (((CGPublish)publishers.getElementAt(i)).cp_Publish) count++; return count; } /** * @return the number of task steps that this * session should have */ private int getTaskSteps() { int docs = settings.cp_DefaultSession.cp_Content.cp_Documents.getSize(); int xsl = 0; try { xsl = settings.cp_DefaultSession.getLayout().getTemplates(xmsf).size(); } catch (Exception ex) { } int publish = countPublish(); int taskSteps = TASKS_IN_PREPARE + TASKS_IN_EXPORT + docs * TASKS_PER_DOC + TASKS_IN_GENERATE + xsl * TASKS_PER_XSL + TASKS_IN_PUBLISH + publish * TASKS_PER_PUBLISH + TASKS_IN_FINISHUP; return taskSteps; } /** * does the job */ public void run() { myTask.start(); try { try { /* * I use here '&&' so if one of the * methods returns false, the next * will not be called. */ result = createTempDir(myTask) && export(myTask) && generate(tempDir,myTask) && publish(tempDir,myTask); } finally { //cleanup must be called. result = result & cleanup(myTask); } } catch (Exception ex) { result = false; } if (!result) myTask.fail(); //this is a bug protection. while (myTask.getStatus() < myTask.getMax()) myTask.advance(true); } /** * creates a temporary directory. * @param task * @return true should continue */ private boolean createTempDir(Task task) { tempDir = fileAccess.createNewDir(getSOTempDir(xmsf), "wwiztemp"); if (tempDir == null) { error(null,null,ERROR_MKDIR,ErrorHandler.ERROR_PROCESS_FATAL); return false; } else { task.advance(true); return true; } } /** * @param xmsf * @return the staroffice /openoffice temporary directory */ static String getSOTempDir(XMultiServiceFactory xmsf) { try { String s = FileAccess.getOfficePath(xmsf,"Temp",""); return s; } catch (Exception e) {} return null; } // CLEANUP /** * delete the temporary directory * @return true should continue */ private boolean cleanup(Task task) { task.setSubtaskName(TASK_FINISH); boolean b = cleanup(tempDir); if (!b) error(null,null,ERROR_CLEANUP,ErrorHandler.ERROR_WARNING); task.advance(b); return b; } /** * deletes the given directory * @param dir the directory to delete * @return true if should continue */ private boolean cleanup(String dir) { boolean success = true; if (dir != null && fileAccess.exists(dir,false)) { String[] files = fileAccess.listFiles(dir,true); for (int i = 0; i < files.length; i++) { if (fileAccess.isDirectory(files[i])) success = success && cleanup(files[i]); else success = success && fileAccess.delete(files[i]); } } return success && fileAccess.delete(dir); } /** * This method is used to copy style files to a target * Directory: css and background. * Note that this method is static since it is * also used when displaying a "preview" */ public static void copyMedia(UCB copy, CGSettings settings, String targetDir, Task task ) throws Exception { //1. .css String sourceDir = FileAccess.connectURLs(settings.workPath , "styles"); String filename = settings.cp_DefaultSession.getStyle().cp_CssHref; copy.copy(sourceDir,filename,targetDir,"style.css"); task.advance(true); //2. background image String background = settings.cp_DefaultSession.cp_Design.cp_BackgroundImage; if (background != null && !background.equals("")) { sourceDir = FileAccess.getParentDir(background); filename = background.substring(sourceDir.length()); copy.copy(sourceDir,filename,targetDir + "/images","background.gif"); } task.advance(true); } /** * Copy "static" files (which are always the same, * thus not user-input-dependant) to a target directory. * Note that this method is static since it is * also used when displaying a "preview" * @param copy * @param settings * @param targetDir * @throws Exception */ public static void copyStaticImages(UCB copy, CGSettings settings, String targetDir) throws Exception { copy.copy(FileAccess.connectURLs(settings.workPath , "images") ,targetDir+"/images"); } /** * publish the given directory. * @param dir the source directory to publish from * @param task task tracking. * @return true if should continue */ private boolean publish(String dir, Task task ) { task.setSubtaskName(TASK_PUBLISH_PREPARE); ConfigSet set = settings.cp_DefaultSession.cp_Publishing; try { copyMedia(ucb, settings, dir,task); copyStaticImages(ucb,settings,dir); task.advance(true); } catch (Exception ex) { //error in copying media error(ex, "", ERROR_PUBLISH_MEDIA, ErrorHandler.ERROR_PROCESS_FATAL); return false; } boolean result = true; for (int i = 0; i < set.getSize(); i++) { CGPublish p = (CGPublish)set.getElementAt(i); if (p.cp_Publish) { String key = (String)set.getKey(p); task.setSubtaskName(key); if (key.equals(ZIP_PUBLISHER)) fileAccess.delete(p.cp_URL); if (!publish(dir, p, ucb, task)) { return false; } } } return result; } /** * publish the given directory to the * given target CGPublish. * @param dir the dir to copy from * @param publish the object that specifies the target * @param copy ucb encapsulation * @param task task tracking * @return true if should continue */ private boolean publish(String dir,CGPublish publish,UCB copy,Task task) { try { //copy.deleteDirContent(publish.url); task.advance(true); copy.copy(dir,publish.url); task.advance(true); return true; } catch (Exception e) { task.advance(false); return error(e,publish, ERROR_PUBLISH,ErrorHandler.ERROR_NORMAL_IGNORE); } } //GENERATING METHODS /** * Generates the TOC pages for the current session. * @param targetDir generating to this directory. */ public boolean generate(String targetDir, Task task) { boolean result = false; task.setSubtaskName(TASK_GENERATE_PREPARE); CGLayout layout = settings.cp_DefaultSession.getLayout(); try { /* * here I create the DOM of the TOC to pass to the XSL */ Document doc = (Document)settings.cp_DefaultSession.createDOM(); generate(xmsf,layout,doc ,fileAccess,targetDir,task); } catch (Exception ex) { error(ex, "" , ERROR_GENERATE_XSLT ,ErrorHandler.ERROR_PROCESS_FATAL); return false; } /* copy files which are not xsl from layout directory to * website root. */ try { task.setSubtaskName(TASK_GENERATE_COPY); copyLayoutFiles(ucb,fileAccess,settings,layout,targetDir); task.advance(true); result = true; } catch (Exception ex) { task.advance(false); return error(ex,null,ERROR_GENERATE_COPY,ErrorHandler.ERROR_NORMAL_ABORT); } return result; } /** * copies layout files which are not .xsl files * to the target directory. * @param ucb UCB encapsulatzion object * @param fileAccess filaAccess encapsulation object * @param settings web wizard settings * @param layout the layout object * @param targetDir the target directory to copy to * @throws Exception */ public static void copyLayoutFiles(UCB ucb, FileAccess fileAccess, CGSettings settings, CGLayout layout, String targetDir) throws Exception { String filesPath = fileAccess.getURL( FileAccess.connectURLs(settings.workPath , "layouts/"), layout.cp_FSName ); ucb.copy(filesPath,targetDir,new ExtensionVerifier("xsl")); } /** * generates the TOC page for the given layout. * This method might generate more than one file, depending * on how many .xsl files are in the * directory specifies by the given layout object. * @param xmsf * @param layout specifies the layout to use. * @param doc the DOM representation of the web wizard session * @param fileAccess encapsulation of FileAccess * @param targetPath target directory * @param task * @throws Exception */ public static void generate( XMultiServiceFactory xmsf, CGLayout layout, Document doc, FileAccess fileAccess, String targetPath, Task task) throws Exception { /* * a map that contains xsl templates. the keys are the xsl file names. */ Map templates = layout.getTemplates(xmsf); task.advance(true,TASK_GENERATE_XSL); /* * each template generates a page. */ for (Iterator i = templates.keySet().iterator() ; i.hasNext(); ) { String key = ""; key = (String)i.next(); Transformer transformer = ((Templates)templates.get(key)).newTransformer(); doc.normalize(); task.advance(true); /* * The target file name is like the xsl template filename * without the .xsl extension. */ String fn = fileAccess.getPath( targetPath, key.substring(0,key.length()-4)); File f = new File(fn); transformer.transform( new DOMSource(doc), new StreamResult(f) ); task.advance(true); } } /** * I broke the export method to two methods * in a time where a tree with more than one contents was planned. * I left it that way, because it may be used in the future. * @param task * @return */ private boolean export(Task task) { return export(settings.cp_DefaultSession.cp_Content, tempDir, task); } /** * This method could actually, with light modification, use recursion. * In the present situation, where we only use a "flat" list of * documents, instead of the original plan to use a tree, * the recursion is not implemented. * @param content the content ( directory-like, contains documents) * @param dir (target directory for exporting this content. * @param task * @return true if should continue */ private boolean export(CGContent content, String dir, Task task) { int toPerform = 1; String contentDir = dir; try { task.setSubtaskName(TASK_EXPORT_PREPARE); /* 1. create a content directory. * each content (at the moment there is only one :-( ) * is created in its own directory. * faileure here is fatal. */ contentDir = fileAccess.createNewDir(dir,content.cp_Name); if (contentDir == null || contentDir.equals("") ) throw new IOException("Directory " + dir + " could not be created."); content.dirName = FileAccess.getFilename(contentDir); task.advance(true,TASK_EXPORT_DOCUMENTS); toPerform--; /*2. export all documents and sub contents. * (at the moment, only documents, no subcontents) */ Object item = null; for (int i = 0; i < content.cp_Documents.getSize(); i++) { try { item = content.cp_Documents.getElementAt(i); /* * In present this is always the case. * may be in the future, when * a tree is used, it will be abit different. */ if (item instanceof CGDocument) { if (!export((CGDocument) item, contentDir,task)) return false; } else /* * we never get here since we * did not implement sub-contents. */ if (!export((CGContent) item, contentDir,task)) return false; } catch (SecurityException sx) { // nonfatal if (!error(sx,item, ERROR_EXPORT_SECURITY,ErrorHandler.ERROR_NORMAL_IGNORE)) return false; result = false; } } } catch (IOException iox) { //nonfatal return error(iox,content,ERROR_EXPORT_IO,ErrorHandler.ERROR_NORMAL_IGNORE); } catch (SecurityException se) { //nonfatal return error(se,content,ERROR_EXPORT_SECURITY,ErrorHandler.ERROR_NORMAL_IGNORE); } failTask(task,toPerform); return true; } /** * exports a single document * @param doc the document to export * @param dir the target directory * @param task task tracking * @return true if should continue */ private boolean export(CGDocument doc, String dir,Task task) { //first I check if the document was already validated... if (!doc.valid) try { doc.validate(xmsf,null); } catch (Exception ex){ //fatal error(ex,doc,ERROR_DOC_VALIDATE,ErrorHandler.ERROR_PROCESS_FATAL); return false; } //get the exporter specified for this document CGExporter exporter = (CGExporter)settings.cp_Exporters.getElement(doc.cp_Exporter); try { /* * here I calculate the destination filename. * I take the original filename (docFilename), substract the extension, (docExt) -> (fn) * and find an available filename which starts with * this filename, but with the new extension. (destExt) */ String docFilename = FileAccess.getFilename(doc.cp_URL); String docExt = FileAccess.getExtension(docFilename); String fn = doc.localFilename.substring(0,doc.localFilename.length()-docExt.length()-1); //filename without extension /* * the copyExporter does not change * the extension of the target... */ String destExt = ( exporter.cp_Extension.equals("") ? FileAccess.getExtension(docFilename) : exporter.cp_Extension ); /* if this filter needs to export to its own directory... * this is the case in, for example, impress html export */ if (exporter.cp_OwnDirectory) { //+++ dir = fileAccess.createNewDir(dir, fn ); doc.dirName = FileAccess.getFilename(dir); } /* * if two files with the same name * need to be exported ? So here * i get a new filename, so I do not * overwrite files... */ String file = fileAccess.getNewFile(dir,fn,destExt); /* set filename with extension. * this will be used by the exporter, * and to generate the TOC. */ doc.urlFilename = FileAccess.getFilename(file); task.advance(true); try { //export getExporter(exporter).export(doc, file, xmsf, task); task.advance(true); } /* * getExporter(..) throws * IllegalAccessException, InstantiationException, ClassNotFoundException * export() throws Exception */ catch (Exception ex) { //nonfatal if (!error(ex, doc, ERROR_EXPORT, ErrorHandler.ERROR_NORMAL_IGNORE)) return false; } } catch (Exception ex) { //nonfatal if (!error(ex,doc,ERROR_EXPORT_MKDIR,ErrorHandler.ERROR_NORMAL_ABORT)) return false; } return true; } /** * submit an error. * @param ex the exception * @param arg1 error argument * @param arg2 error argument 2 * @param errType error type * @return the interaction result */ private boolean error(Exception ex, Object arg1, int arg2, int errType) { result = false; return errorHandler.error(ex,arg1,arg2,errType); } /** * advances the given task in the given count of steps, * marked as failed. * @param task the task to advance * @param count the number of steps to advance */ private void failTask(Task task, int count) { while (count-- > 0) task.advance(false); } /** * creates an instance of the exporter class * as specified by the * exporter object. * @param export specifies the exporter to be created * @return the Exporter instance * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException */ private Exporter createExporter(CGExporter export) throws ClassNotFoundException, IllegalAccessException, InstantiationException { Exporter e = (Exporter) Class.forName(export.cp_ExporterClass).newInstance(); e.init(export); return e; } /** * searches the an exporter for the given CGExporter object * in the cache. * If its not there, creates it, stores it in the cache and * returns it. * @param export specifies the needed exporter. * @return an Exporter instance * @throws ClassNotFoundException thrown when using Class.forName(string) * @throws IllegalAccessException thrown when using Class.forName(string) * @throws InstantiationException thrown when using Class.forName(string) */ private Exporter getExporter(CGExporter export) throws ClassNotFoundException, IllegalAccessException, InstantiationException { Exporter exp = (Exporter)exporters.get(export); if (exp == null) { exp = createExporter(export); exporters.put(export,exp); } return exp; } /** * @return tru if everything went smooth, false * if error(s) accured. */ public boolean getResult() { return (myTask.getFailed() == 0) && result; } }
wizards/com/sun/star/wizards/web/Process.java
/************************************************************************* * * $RCSfile: Process.java,v $ * * $Revision: 1.2 $ * * last change: $Author: kz $ $Date: 2004-05-19 13:12:58 $ * * The Contents of this file are made available subject to the terms of * either of the following licenses * * - GNU Lesser General Public License Version 2.1 * - Sun Industry Standards Source License Version 1.1 * * Sun Microsystems Inc., October, 2000 * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2000 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * * * Sun Industry Standards Source License Version 1.1 * ================================================= * The contents of this file are subject to the Sun Industry Standards * Source License Version 1.1 (the "License"); You may not use this file * except in compliance with the License. You may obtain a copy of the * License at http://www.openoffice.org/license.html. * * Software provided under this License is provided on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, * WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS, * MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING. * See the License for the specific provisions governing your rights and * obligations concerning the Software. * * The Initial Developer of the Original Code is: Sun Microsystems, Inc. * * Copyright: 2000 by Sun Microsystems, Inc. * * All Rights Reserved. * * Contributor(s): _______________________________________ * */ package com.sun.star.wizards.web; import java.io.*; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; //import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.*; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.wizards.common.*; import com.sun.star.wizards.ui.event.*; import com.sun.star.wizards.web.data.*; import com.sun.star.wizards.web.export.Exporter; /** * @author rpiterman * This class is used to process a CGSession object * and generate a site. </br> * it does the following: <br/> * 1. create a temporary directory.<br/> * 2. export documents to the temporary directory.<br/> * 3. generate the TOC page, includes copying images from the * web wizard work directory and other layout files.<br/> * 4. publish, or copy, from the temporary directory to * different destinations.<br/> * 5. delete the temporary directory.<br/> * <br/> * to follow up the status/errors it uses a TaskListener object, * and an ErrorHandler. <br/> * in practice, the TaskListener is the status dialog, * and the Errorhandler does the interaction with the user, * if something goes wrong.<br/> * Note that this class takes it in count that * the given session object is prepared for it - * all preparations are done in WWD_Events.finishWizard methods. * <br/> * <br/> * * note on error handling: <br/> * on "catch" clauses I tries to decide whether the * exception is fatal or not. For fatal exception an error message * is displayed (or rather: the errorHandler is being called...) * and a false is returned. * In less-fatal errors, the errorHandler "should decide" which means, * the user is given the option to "OK" or to "Cancel" and depending * on that interaction I cary on. */ public class Process implements Runnable, WebWizardConst, ProcessErrors { private static final int TASKS_PER_DOC = 5; private static final int TASKS_PER_XSL = 2; private static final int TASKS_PER_PUBLISH = 2; private static final int TASKS_IN_PREPARE = 1; private static final int TASKS_IN_EXPORT = 2; private static final int TASKS_IN_GENERATE = 2; private static final int TASKS_IN_PUBLISH = 2; private static final int TASKS_IN_FINISHUP = 1; private CGSettings settings; private XMultiServiceFactory xmsf; private ErrorHandler errorHandler; private String tempDir; private FileAccess fileAccess; private UCB ucb; public Task myTask; /** * This is a cache for exporters, so I do not need to * instanciate the same exporter more than once. */ private Map exporters = new Hashtable(3); private boolean result; public Process( CGSettings settings, XMultiServiceFactory xmsf, ErrorHandler er ) throws Exception { this.xmsf = xmsf; this.settings = settings; fileAccess = new FileAccess(xmsf); errorHandler = er; ucb = new UCB(xmsf); int taskSteps = getTaskSteps(); myTask = new Task(TASK,TASK_PREPARE, taskSteps); } /** * @return to how many destinations should the * generated site be published. */ private int countPublish() { int count = 0; ConfigSet publishers = settings.cp_DefaultSession.cp_Publishing; for (int i = 0; i<publishers.getSize(); i++) if (((CGPublish)publishers.getElementAt(i)).cp_Publish) count++; return count; } /** * @return the number of task steps that this * session should have */ private int getTaskSteps() { int docs = settings.cp_DefaultSession.cp_Content.cp_Documents.getSize(); int xsl = 0; try { xsl = settings.cp_DefaultSession.getLayout().getTemplates(xmsf).size(); } catch (Exception ex) { } int publish = countPublish(); int taskSteps = TASKS_IN_PREPARE + TASKS_IN_EXPORT + docs * TASKS_PER_DOC + TASKS_IN_GENERATE + xsl * TASKS_PER_XSL + TASKS_IN_PUBLISH + publish * TASKS_PER_PUBLISH + TASKS_IN_FINISHUP; return taskSteps; } /** * does the job */ public void run() { myTask.start(); try { try { /* * I use here '&&' so if one of the * methods returns false, the next * will not be called. */ result = createTempDir(myTask) && export(myTask) && generate(tempDir,myTask) && publish(tempDir,myTask); } finally { //cleanup must be called. result = result & cleanup(myTask); } } catch (Exception ex) { result = false; } if (!result) myTask.fail(); //this is a bug protection. while (myTask.getStatus() < myTask.getMax()) myTask.advance(true); } /** * creates a temporary directory. * @param task * @return true should continue */ private boolean createTempDir(Task task) { tempDir = fileAccess.createNewDir(getSOTempDir(xmsf), "wwiztemp"); if (tempDir == null) { error(null,null,ERROR_MKDIR,ErrorHandler.ERROR_PROCESS_FATAL); return false; } else { task.advance(true); return true; } } /** * @param xmsf * @return the staroffice /openoffice temporary directory */ static String getSOTempDir(XMultiServiceFactory xmsf) { try { String s = FileAccess.getOfficePath(xmsf,"Temp",""); return s; } catch (Exception e) {} return null; } // CLEANUP /** * delete the temporary directory * @return true should continue */ private boolean cleanup(Task task) { task.setSubtaskName(TASK_FINISH); boolean b = cleanup(tempDir); if (!b) error(null,null,ERROR_CLEANUP,ErrorHandler.ERROR_WARNING); task.advance(b); return b; } /** * deletes the given directory * @param dir the directory to delete * @return true if should continue */ private boolean cleanup(String dir) { boolean success = true; if (dir != null && fileAccess.exists(dir,false)) { String[] files = fileAccess.listFiles(dir,true); for (int i = 0; i < files.length; i++) { if (fileAccess.isDirectory(files[i])) success = success && cleanup(files[i]); else success = success && fileAccess.delete(files[i]); } } return success && fileAccess.delete(dir); } /** * This method is used to copy style files to a target * Directory: css and background. * Note that this method is static since it is * also used when displaying a "preview" */ public static void copyMedia(UCB copy, CGSettings settings, String targetDir, Task task ) throws Exception { //1. .css String sourceDir = FileAccess.connectURLs(settings.workPath , "styles"); String filename = settings.cp_DefaultSession.getStyle().cp_CssHref; copy.copy(sourceDir,filename,targetDir,"style.css"); task.advance(true); //2. background image String background = settings.cp_DefaultSession.cp_Design.cp_BackgroundImage; if (background != null && !background.equals("")) { sourceDir = FileAccess.getParentDir(background); filename = background.substring(sourceDir.length()); copy.copy(sourceDir,filename,targetDir + "/images","background.gif"); } task.advance(true); } /** * Copy "static" files (which are always the same, * thus not user-input-dependant) to a target directory. * Note that this method is static since it is * also used when displaying a "preview" * @param copy * @param settings * @param targetDir * @throws Exception */ public static void copyStaticImages(UCB copy, CGSettings settings, String targetDir) throws Exception { copy.copy(FileAccess.connectURLs(settings.workPath , "images") ,targetDir+"/images"); } /** * publish the given directory. * @param dir the source directory to publish from * @param task task tracking. * @return true if should continue */ private boolean publish(String dir, Task task ) { task.setSubtaskName(TASK_PUBLISH_PREPARE); ConfigSet set = settings.cp_DefaultSession.cp_Publishing; try { copyMedia(ucb, settings, dir,task); copyStaticImages(ucb,settings,dir); task.advance(true); } catch (Exception ex) { //error in copying media error(ex, "", ERROR_PUBLISH_MEDIA, ErrorHandler.ERROR_PROCESS_FATAL); return false; } boolean result = true; for (int i = 0; i < set.getSize(); i++) { CGPublish p = (CGPublish)set.getElementAt(i); if (p.cp_Publish) { String key = (String)set.getKey(p); task.setSubtaskName(key); if (!publish(dir, p, ucb, task)) { return false; } } } return result; } /** * publish the given directory to the * given target CGPublish. * @param dir the dir to copy from * @param publish the object that specifies the target * @param copy ucb encapsulation * @param task task tracking * @return true if should continue */ private boolean publish(String dir,CGPublish publish,UCB copy,Task task) { try { copy.deleteDirContent(publish.url); task.advance(true); copy.copy(dir,publish.url); task.advance(true); return true; } catch (Exception e) { task.advance(false); return error(e,publish, ERROR_PUBLISH,ErrorHandler.ERROR_NORMAL_IGNORE); } } //GENERATING METHODS /** * Generates the TOC pages for the current session. * @param targetDir generating to this directory. */ public boolean generate(String targetDir, Task task) { boolean result = false; task.setSubtaskName(TASK_GENERATE_PREPARE); CGLayout layout = settings.cp_DefaultSession.getLayout(); try { /* * here I create the DOM of the TOC to pass to the XSL */ Document doc = (Document)settings.cp_DefaultSession.createDOM(); generate(xmsf,layout,doc ,fileAccess,targetDir,task); } catch (Exception ex) { error(ex, "" , ERROR_GENERATE_XSLT ,ErrorHandler.ERROR_PROCESS_FATAL); return false; } /* copy files which are not xsl from layout directory to * website root. */ try { task.setSubtaskName(TASK_GENERATE_COPY); copyLayoutFiles(ucb,fileAccess,settings,layout,targetDir); task.advance(true); result = true; } catch (Exception ex) { task.advance(false); return error(ex,null,ERROR_GENERATE_COPY,ErrorHandler.ERROR_NORMAL_ABORT); } return result; } /** * copies layout files which are not .xsl files * to the target directory. * @param ucb UCB encapsulatzion object * @param fileAccess filaAccess encapsulation object * @param settings web wizard settings * @param layout the layout object * @param targetDir the target directory to copy to * @throws Exception */ public static void copyLayoutFiles(UCB ucb, FileAccess fileAccess, CGSettings settings, CGLayout layout, String targetDir) throws Exception { String filesPath = fileAccess.getURL( FileAccess.connectURLs(settings.workPath , "layouts/"), layout.cp_FSName ); ucb.copy(filesPath,targetDir,new ExtensionVerifier("xsl")); String icon = settings.cp_DefaultSession.cp_GeneralInfo.cp_Icon; if ((icon!=null) && (!icon.equals(""))) { String icon2 = FileAccess.connectURLs(targetDir , "images/favicon.ico"); fileAccess.copy(icon,icon2); } } /** * generates the TOC page for the given layout. * This method might generate more than one file, depending * on how many .xsl files are in the * directory specifies by the given layout object. * @param xmsf * @param layout specifies the layout to use. * @param doc the DOM representation of the web wizard session * @param fileAccess encapsulation of FileAccess * @param targetPath target directory * @param task * @throws Exception */ public static void generate( XMultiServiceFactory xmsf, CGLayout layout, Document doc, FileAccess fileAccess, String targetPath, Task task) throws Exception { /* * a map that contains xsl templates. the keys are the xsl file names. */ Map templates = layout.getTemplates(xmsf); task.advance(true,TASK_GENERATE_XSL); /* * each template generates a page. */ for (Iterator i = templates.keySet().iterator() ; i.hasNext(); ) { String key = ""; key = (String)i.next(); Transformer transformer = ((Templates)templates.get(key)).newTransformer(); doc.normalize(); task.advance(true); /* * The target file name is like the xsl template filename * without the .xsl extension. */ String fn = fileAccess.getPath( targetPath, key.substring(0,key.length()-4)); File f = new File(fn); transformer.transform( new DOMSource(doc), new StreamResult(f) ); task.advance(true); } } /** * I broke the export method to two methods * in a time where a tree with more than one contents was planned. * I left it that way, because it may be used in the future. * @param task * @return */ private boolean export(Task task) { return export(settings.cp_DefaultSession.cp_Content, tempDir, task); } /** * This method could actually, with light modification, use recursion. * In the present situation, where we only use a "flat" list of * documents, instead of the original plan to use a tree, * the recursion is not implemented. * @param content the content ( directory-like, contains documents) * @param dir (target directory for exporting this content. * @param task * @return true if should continue */ private boolean export(CGContent content, String dir, Task task) { int toPerform = 1; String contentDir = dir; try { task.setSubtaskName(TASK_EXPORT_PREPARE); /* 1. create a content directory. * each content (at the moment there is only one :-( ) * is created in its own directory. * faileure here is fatal. */ contentDir = fileAccess.createNewDir(dir,content.cp_Name); if (contentDir == null || contentDir.equals("") ) throw new IOException("Directory " + dir + " could not be created."); content.dirName = fileAccess.getFilename(contentDir); task.advance(true,TASK_EXPORT_DOCUMENTS); toPerform--; /*2. export all documents and sub contents. * (at the moment, only documents, no subcontents) */ Object item = null; for (int i = 0; i < content.cp_Documents.getSize(); i++) { try { item = content.cp_Documents.getElementAt(i); /* * In present this is always the case. * may be in the future, when * a tree is used, it will be abit different. */ if (item instanceof CGDocument) { if (!export((CGDocument) item, contentDir,task)) return false; } else /* * we never get here since we * did not implement sub-contents. */ if (!export((CGContent) item, contentDir,task)) return false; } catch (SecurityException sx) { // nonfatal if (!error(sx,item, ERROR_EXPORT_SECURITY,ErrorHandler.ERROR_NORMAL_IGNORE)) return false; result = false; } } } catch (IOException iox) { //nonfatal return error(iox,content,ERROR_EXPORT_IO,ErrorHandler.ERROR_NORMAL_IGNORE); } catch (SecurityException se) { //nonfatal return error(se,content,ERROR_EXPORT_SECURITY,ErrorHandler.ERROR_NORMAL_IGNORE); } failTask(task,toPerform); return true; } /** * exports a single document * @param doc the document to export * @param dir the target directory * @param task task tracking * @return true if should continue */ private boolean export(CGDocument doc, String dir,Task task) { //first I check if the document was already validated... if (!doc.valid) try { doc.validate(xmsf,null); } catch (Exception ex){ //fatal error(ex,doc,ERROR_DOC_VALIDATE,ErrorHandler.ERROR_PROCESS_FATAL); return false; } //get the exporter specified for this document CGExporter exporter = (CGExporter)settings.cp_Exporters.getElement(doc.cp_Exporter); try { /* * here I calculate the destination filename. * I take the original filename (docFilename), substract the extension, (docExt) -> (fn) * and find an available filename which starts with * this filename, but with the new extension. (destExt) */ String docFilename = fileAccess.getFilename(doc.cp_URL); String docExt = FileAccess.getExtension(docFilename); String fn = doc.localFilename.substring(0,doc.localFilename.length()-docExt.length()-1); //filename without extension /* * the copyExporter does not change * the extension of the target... */ String destExt = ( exporter.cp_Extension.equals("") ? FileAccess.getExtension(docFilename) : exporter.cp_Extension ); /* if this filter needs to export to its own directory... * this is the case in, for example, impress html export */ if (exporter.cp_OwnDirectory) { //+++ dir = fileAccess.createNewDir(dir, fn ); doc.dirName = fileAccess.getFilename(dir); } /* * if two files with the same name * need to be exported ? So here * i get a new filename, so I do not * overwrite files... */ String file = fileAccess.getNewFile(dir,fn,destExt); /* set filename with extension. * this will be used by the exporter, * and to generate the TOC. */ doc.urlFilename = fileAccess.getFilename(file); task.advance(true); try { //export getExporter(exporter).export(doc, file, xmsf, task); task.advance(true); } /* * getExporter(..) throws * IllegalAccessException, InstantiationException, ClassNotFoundException * export() throws Exception */ catch (Exception ex) { //nonfatal if (!error(ex, doc, ERROR_EXPORT, ErrorHandler.ERROR_NORMAL_IGNORE)) return false; } } catch (Exception ex) { //nonfatal if (!error(ex,doc,ERROR_EXPORT_MKDIR,ErrorHandler.ERROR_NORMAL_ABORT)) return false; } return true; } /** * submit an error. * @param ex the exception * @param arg1 error argument * @param arg2 error argument 2 * @param errType error type * @return the interaction result */ private boolean error(Exception ex, Object arg1, int arg2, int errType) { result = false; return errorHandler.error(ex,arg1,arg2,errType); } /** * advances the given task in the given count of steps, * marked as failed. * @param task the task to advance * @param count the number of steps to advance */ private void failTask(Task task, int count) { while (count-- > 0) task.advance(false); } /** * creates an instance of the exporter class * as specified by the * exporter object. * @param export specifies the exporter to be created * @return the Exporter instance * @throws ClassNotFoundException * @throws IllegalAccessException * @throws InstantiationException */ private Exporter createExporter(CGExporter export) throws ClassNotFoundException, IllegalAccessException, InstantiationException { Exporter e = (Exporter) Class.forName(export.cp_ExporterClass).newInstance(); e.init(export); return e; } /** * searches the an exporter for the given CGExporter object * in the cache. * If its not there, creates it, stores it in the cache and * returns it. * @param export specifies the needed exporter. * @return an Exporter instance * @throws ClassNotFoundException thrown when using Class.forName(string) * @throws IllegalAccessException thrown when using Class.forName(string) * @throws InstantiationException thrown when using Class.forName(string) */ private Exporter getExporter(CGExporter export) throws ClassNotFoundException, IllegalAccessException, InstantiationException { Exporter exp = (Exporter)exporters.get(export); if (exp == null) { exp = createExporter(export); exporters.put(export,exp); } return exp; } /** * @return tru if everything went smooth, false * if error(s) accured. */ public boolean getResult() { return (myTask.getFailed() == 0) && result; } }
INTEGRATION: CWS qwizards2 (1.2.2); FILE MERGED 2004/07/19 13:36:53 rpiterman 1.2.2.1: WebWizard spec revision removed fav icon added ZIP delete removed directory delete changed imports changed static calls to be nicer
wizards/com/sun/star/wizards/web/Process.java
INTEGRATION: CWS qwizards2 (1.2.2); FILE MERGED 2004/07/19 13:36:53 rpiterman 1.2.2.1: WebWizard spec revision removed fav icon added ZIP delete removed directory delete changed imports changed static calls to be nicer
<ide><path>izards/com/sun/star/wizards/web/Process.java <ide> * <ide> * $RCSfile: Process.java,v $ <ide> * <del> * $Revision: 1.2 $ <del> * <del> * last change: $Author: kz $ $Date: 2004-05-19 13:12:58 $ <add> * $Revision: 1.3 $ <add> * <add> * last change: $Author: obo $ $Date: 2004-09-08 14:12:54 $ <ide> * <ide> * The Contents of this file are made available subject to the terms of <ide> * either of the following licenses <ide> */ <ide> package com.sun.star.wizards.web; <ide> <del>import java.io.*; <add>import java.io.File; <add>import java.io.IOException; <ide> import java.util.Hashtable; <ide> import java.util.Iterator; <ide> import java.util.Map; <ide> <ide> import javax.xml.transform.Templates; <ide> import javax.xml.transform.Transformer; <del>//import javax.xml.transform.dom.DOMResult; <ide> import javax.xml.transform.dom.DOMSource; <ide> import javax.xml.transform.stream.StreamResult; <ide> <del>import org.w3c.dom.*; <add>import org.w3c.dom.Document; <ide> <ide> import com.sun.star.lang.XMultiServiceFactory; <del> <del>import com.sun.star.wizards.common.*; <del>import com.sun.star.wizards.ui.event.*; <del>import com.sun.star.wizards.web.data.*; <add>import com.sun.star.wizards.common.ConfigSet; <add>import com.sun.star.wizards.common.FileAccess; <add>import com.sun.star.wizards.common.UCB; <add>import com.sun.star.wizards.ui.event.Task; <add>import com.sun.star.wizards.web.data.CGContent; <add>import com.sun.star.wizards.web.data.CGDocument; <add>import com.sun.star.wizards.web.data.CGExporter; <add>import com.sun.star.wizards.web.data.CGLayout; <add>import com.sun.star.wizards.web.data.CGPublish; <add>import com.sun.star.wizards.web.data.CGSettings; <ide> import com.sun.star.wizards.web.export.Exporter; <ide> <ide> <ide> String key = (String)set.getKey(p); <ide> task.setSubtaskName(key); <ide> <add> if (key.equals(ZIP_PUBLISHER)) <add> fileAccess.delete(p.cp_URL); <add> <ide> if (!publish(dir, p, ucb, task)) { <ide> return false; <ide> } <ide> */ <ide> private boolean publish(String dir,CGPublish publish,UCB copy,Task task) { <ide> try { <del> copy.deleteDirContent(publish.url); <add> //copy.deleteDirContent(publish.url); <ide> task.advance(true); <ide> copy.copy(dir,publish.url); <ide> task.advance(true); <ide> String filesPath = fileAccess.getURL( <ide> FileAccess.connectURLs(settings.workPath , "layouts/"), layout.cp_FSName ); <ide> ucb.copy(filesPath,targetDir,new ExtensionVerifier("xsl")); <del> String icon = settings.cp_DefaultSession.cp_GeneralInfo.cp_Icon; <del> if ((icon!=null) && (!icon.equals(""))) { <del> String icon2 = FileAccess.connectURLs(targetDir , "images/favicon.ico"); <del> fileAccess.copy(icon,icon2); <del> } <add> <ide> } <ide> <ide> /** <ide> contentDir = fileAccess.createNewDir(dir,content.cp_Name); <ide> if (contentDir == null || contentDir.equals("") ) <ide> throw new IOException("Directory " + dir + " could not be created."); <del> content.dirName = fileAccess.getFilename(contentDir); <add> content.dirName = FileAccess.getFilename(contentDir); <ide> <ide> task.advance(true,TASK_EXPORT_DOCUMENTS); <ide> toPerform--; <ide> * and find an available filename which starts with <ide> * this filename, but with the new extension. (destExt) <ide> */ <del> String docFilename = fileAccess.getFilename(doc.cp_URL); <add> String docFilename = FileAccess.getFilename(doc.cp_URL); <ide> <ide> String docExt = FileAccess.getExtension(docFilename); <ide> String fn = doc.localFilename.substring(0,doc.localFilename.length()-docExt.length()-1); //filename without extension <ide> */ <ide> if (exporter.cp_OwnDirectory) { //+++ <ide> dir = fileAccess.createNewDir(dir, fn ); <del> doc.dirName = fileAccess.getFilename(dir); <add> doc.dirName = FileAccess.getFilename(dir); <ide> } <ide> <ide> /* <ide> * this will be used by the exporter, <ide> * and to generate the TOC. <ide> */ <del> doc.urlFilename = fileAccess.getFilename(file); <add> doc.urlFilename = FileAccess.getFilename(file); <ide> <ide> task.advance(true); <ide>
Java
apache-2.0
7540801ef1e9cf3be3cdd33a31cf1e5410d6da76
0
hurricup/intellij-community,semonte/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,xfournet/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,allotria/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,asedunov/intellij-community,hurricup/intellij-community,xfournet/intellij-community,da1z/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,allotria/intellij-community,ibinti/intellij-community,semonte/intellij-community,semonte/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,allotria/intellij-community,vvv1559/intellij-community,allotria/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,hurricup/intellij-community,apixandru/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,vvv1559/intellij-community,allotria/intellij-community,signed/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,apixandru/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,semonte/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,allotria/intellij-community,xfournet/intellij-community,FHannes/intellij-community,retomerz/intellij-community,asedunov/intellij-community,apixandru/intellij-community,retomerz/intellij-community,allotria/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,apixandru/intellij-community,da1z/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,allotria/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,signed/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,da1z/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,FHannes/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,asedunov/intellij-community,xfournet/intellij-community,semonte/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,fitermay/intellij-community,xfournet/intellij-community,retomerz/intellij-community,fitermay/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,signed/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,da1z/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,signed/intellij-community,signed/intellij-community,ibinti/intellij-community,signed/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,signed/intellij-community,signed/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,FHannes/intellij-community,asedunov/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,hurricup/intellij-community,asedunov/intellij-community,asedunov/intellij-community,FHannes/intellij-community,hurricup/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,hurricup/intellij-community,da1z/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,hurricup/intellij-community,xfournet/intellij-community,semonte/intellij-community,FHannes/intellij-community,da1z/intellij-community,vvv1559/intellij-community,da1z/intellij-community,salguarnieri/intellij-community
package com.jetbrains.jsonSchema.impl; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.util.concurrency.Semaphore; import org.junit.Assert; import org.junit.Test; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; /** * @author Irina.Chernushina on 8/29/2015. */ public class JsonSchemaReadTest { @org.junit.Test public void testReadSchemaItself() throws Exception { final File file = new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/schema.json"); Assert.assertTrue(file.exists()); final JsonSchemaReader reader = new JsonSchemaReader(); final JsonSchemaObject read = reader.read(new FileReader(file)); Assert.assertEquals("http://json-schema.org/draft-04/schema#", read.getId()); Assert.assertTrue(read.getDefinitions().containsKey("positiveInteger")); Assert.assertTrue(read.getProperties().containsKey("multipleOf")); Assert.assertTrue(read.getProperties().containsKey("type")); Assert.assertTrue(read.getProperties().containsKey("additionalProperties")); Assert.assertEquals(2, read.getProperties().get("additionalItems").getAnyOf().size()); Assert.assertEquals("#", read.getProperties().get("additionalItems").getAnyOf().get(1).getRef()); final JsonSchemaObject required = read.getProperties().get("required"); Assert.assertEquals(JsonSchemaType._array, required.getType()); Assert.assertEquals(1, required.getMinItems().intValue()); Assert.assertEquals(JsonSchemaType._string, required.getItemsSchema().getType()); final JsonSchemaObject minLength = read.getProperties().get("minLength"); Assert.assertNotNull(minLength.getAllOf()); final List<JsonSchemaObject> minLengthAllOf = minLength.getAllOf(); boolean haveIntegerType = false; Integer defaultValue = null; Integer minValue = null; for (JsonSchemaObject object : minLengthAllOf) { haveIntegerType |= JsonSchemaType._integer.equals(object.getType()); if (object.getDefault() instanceof Number) { defaultValue = ((Number)object.getDefault()).intValue(); } if (object.getMinimum() != null) { minValue = object.getMinimum().intValue(); } } Assert.assertTrue(haveIntegerType); Assert.assertEquals(0, defaultValue.intValue()); Assert.assertEquals(0, minValue.intValue()); } @Test public void testReadSchemaWithCustomTags() throws Exception { final File file = new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withNotesCustomTag.json"); Assert.assertTrue(file.exists()); final JsonSchemaReader reader = new JsonSchemaReader(); final JsonSchemaObject read = reader.read(new FileReader(file)); Assert.assertTrue(read.getDefinitions().get("common").getProperties().containsKey("id")); } @Test public void testReadSchemaWithWrongRequired() throws Exception { testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/WithWrongRequired.json")); } @Test public void testReadSchemaWithWrongItems() throws Exception { testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/WithWrongItems.json")); } private static void testSchemaReadNotHung(final File file) throws IOException { // because of threading if (Runtime.getRuntime().availableProcessors() < 2) return; Assert.assertTrue(file.exists()); final AtomicBoolean done = new AtomicBoolean(); final AtomicReference<IOException> error = new AtomicReference<>(); final Semaphore semaphore = new Semaphore(); semaphore.down(); final Thread thread = new Thread(new Runnable() { @Override public void run() { final JsonSchemaReader reader = new JsonSchemaReader(); try { reader.read(new FileReader(file)); done.set(true); } catch (IOException e) { error.set(e); } finally { semaphore.up(); } } }, "read test json schema " + file.getName()); try { thread.start(); semaphore.waitFor(TimeUnit.SECONDS.toMillis(120)); if (error.get() != null) throw error.get(); Assert.assertTrue("Reading test schema hung!", done.get()); } finally { thread.interrupt(); } } }
json/tests/test/com/jetbrains/jsonSchema/impl/JsonSchemaReadTest.java
package com.jetbrains.jsonSchema.impl; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.util.concurrency.Semaphore; import org.junit.Assert; import org.junit.Test; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; /** * @author Irina.Chernushina on 8/29/2015. */ public class JsonSchemaReadTest { @org.junit.Test public void testReadSchemaItself() throws Exception { final File file = new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/schema.json"); Assert.assertTrue(file.exists()); final JsonSchemaReader reader = new JsonSchemaReader(); final JsonSchemaObject read = reader.read(new FileReader(file)); Assert.assertEquals("http://json-schema.org/draft-04/schema#", read.getId()); Assert.assertTrue(read.getDefinitions().containsKey("positiveInteger")); Assert.assertTrue(read.getProperties().containsKey("multipleOf")); Assert.assertTrue(read.getProperties().containsKey("type")); Assert.assertTrue(read.getProperties().containsKey("additionalProperties")); Assert.assertEquals(2, read.getProperties().get("additionalItems").getAnyOf().size()); Assert.assertEquals("#", read.getProperties().get("additionalItems").getAnyOf().get(1).getRef()); final JsonSchemaObject required = read.getProperties().get("required"); Assert.assertEquals(JsonSchemaType._array, required.getType()); Assert.assertEquals(1, required.getMinItems().intValue()); Assert.assertEquals(JsonSchemaType._string, required.getItemsSchema().getType()); final JsonSchemaObject minLength = read.getProperties().get("minLength"); Assert.assertNotNull(minLength.getAllOf()); final List<JsonSchemaObject> minLengthAllOf = minLength.getAllOf(); boolean haveIntegerType = false; Integer defaultValue = null; Integer minValue = null; for (JsonSchemaObject object : minLengthAllOf) { haveIntegerType |= JsonSchemaType._integer.equals(object.getType()); if (object.getDefault() instanceof Number) { defaultValue = ((Number)object.getDefault()).intValue(); } if (object.getMinimum() != null) { minValue = object.getMinimum().intValue(); } } Assert.assertTrue(haveIntegerType); Assert.assertEquals(0, defaultValue.intValue()); Assert.assertEquals(0, minValue.intValue()); } @Test public void testReadSchemaWithCustomTags() throws Exception { final File file = new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withNotesCustomTag.json"); Assert.assertTrue(file.exists()); final JsonSchemaReader reader = new JsonSchemaReader(); final JsonSchemaObject read = reader.read(new FileReader(file)); Assert.assertTrue(read.getDefinitions().get("common").getProperties().containsKey("id")); } @Test public void testReadSchemaWithWrongRequired() throws Exception { testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withWrongRequired.json")); } @Test public void testReadSchemaWithWrongItems() throws Exception { testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withWrongItems.json")); } private static void testSchemaReadNotHung(final File file) throws IOException { Assert.assertTrue(file.exists()); final AtomicBoolean done = new AtomicBoolean(); final AtomicReference<IOException> error = new AtomicReference<>(); final Semaphore semaphore = new Semaphore(); semaphore.down(); new Thread(new Runnable() { @Override public void run() { final JsonSchemaReader reader = new JsonSchemaReader(); try { reader.read(new FileReader(file)); done.set(true); } catch (IOException e) { error.set(e); } finally { semaphore.up(); } } }, "read test json schema " + file.getName()).start(); semaphore.waitFor(TimeUnit.SECONDS.toMillis(60)); if (error.get() != null) throw error.get(); Assert.assertTrue("Reading test schema hung!", done.get()); } }
fix test, sorry (filenames letters case) + always kill the second thread
json/tests/test/com/jetbrains/jsonSchema/impl/JsonSchemaReadTest.java
fix test, sorry (filenames letters case) + always kill the second thread
<ide><path>son/tests/test/com/jetbrains/jsonSchema/impl/JsonSchemaReadTest.java <ide> <ide> @Test <ide> public void testReadSchemaWithWrongRequired() throws Exception { <del> testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withWrongRequired.json")); <add> testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/WithWrongRequired.json")); <ide> } <ide> <ide> @Test <ide> public void testReadSchemaWithWrongItems() throws Exception { <del> testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/withWrongItems.json")); <add> testSchemaReadNotHung(new File(PlatformTestUtil.getCommunityPath(), "json/tests/testData/jsonSchema/WithWrongItems.json")); <ide> } <ide> <ide> private static void testSchemaReadNotHung(final File file) throws IOException { <add> // because of threading <add> if (Runtime.getRuntime().availableProcessors() < 2) return; <add> <ide> Assert.assertTrue(file.exists()); <ide> <ide> final AtomicBoolean done = new AtomicBoolean(); <ide> final AtomicReference<IOException> error = new AtomicReference<>(); <ide> final Semaphore semaphore = new Semaphore(); <ide> semaphore.down(); <del> new Thread(new Runnable() { <add> final Thread thread = new Thread(new Runnable() { <ide> @Override <ide> public void run() { <ide> final JsonSchemaReader reader = new JsonSchemaReader(); <ide> } <ide> catch (IOException e) { <ide> error.set(e); <del> } finally { <add> } <add> finally { <ide> semaphore.up(); <ide> } <ide> } <del> }, "read test json schema " + file.getName()).start(); <del> semaphore.waitFor(TimeUnit.SECONDS.toMillis(60)); <del> if (error.get() != null) throw error.get(); <del> Assert.assertTrue("Reading test schema hung!", done.get()); <add> }, "read test json schema " + file.getName()); <add> try { <add> thread.start(); <add> semaphore.waitFor(TimeUnit.SECONDS.toMillis(120)); <add> if (error.get() != null) throw error.get(); <add> Assert.assertTrue("Reading test schema hung!", done.get()); <add> } finally { <add> thread.interrupt(); <add> } <ide> } <ide> }
Java
apache-2.0
2e6a46f7e729a94b61a961b624e5e3acea6da4c3
0
polopoly/typica
// // typica - A client library for Amazon Web Services // Copyright (C) 2007,2008,2009 Xerox Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.xerox.amazonws.ec2; import java.io.IOException; import java.net.MalformedURLException; import java.util.*; import javax.xml.bind.JAXBException; import com.xerox.amazonws.typica.jaxb.*; import org.apache.commons.codec.binary.Base64; import org.apache.commons.httpclient.HttpMethodBase; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.xerox.amazonws.common.AWSException; import com.xerox.amazonws.common.AWSQueryConnection; import com.xerox.amazonws.typica.jaxb.AllocateAddressResponse; import com.xerox.amazonws.typica.jaxb.AssociateAddressResponse; import com.xerox.amazonws.typica.jaxb.AttachmentSetResponseType; import com.xerox.amazonws.typica.jaxb.AttachmentSetItemResponseType; import com.xerox.amazonws.typica.jaxb.AttachVolumeResponse; import com.xerox.amazonws.typica.jaxb.AvailabilityZoneItemType; import com.xerox.amazonws.typica.jaxb.AvailabilityZoneMessageType; import com.xerox.amazonws.typica.jaxb.AvailabilityZoneSetType; import com.xerox.amazonws.typica.jaxb.AuthorizeSecurityGroupIngressResponse; import com.xerox.amazonws.typica.jaxb.BlockDeviceMappingType; import com.xerox.amazonws.typica.jaxb.BlockDeviceMappingItemType; import com.xerox.amazonws.typica.jaxb.BundleInstanceResponse; import com.xerox.amazonws.typica.jaxb.BundleInstanceTaskType; import com.xerox.amazonws.typica.jaxb.CancelBundleTaskResponse; import com.xerox.amazonws.typica.jaxb.CreateImageResponse; import com.xerox.amazonws.typica.jaxb.CreateKeyPairResponse; import com.xerox.amazonws.typica.jaxb.CreateSnapshotResponse; import com.xerox.amazonws.typica.jaxb.CreateVolumeResponse; import com.xerox.amazonws.typica.jaxb.ConfirmProductInstanceResponse; import com.xerox.amazonws.typica.jaxb.CreateSecurityGroupResponse; import com.xerox.amazonws.typica.jaxb.DeleteKeyPairResponse; import com.xerox.amazonws.typica.jaxb.DeleteSecurityGroupResponse; import com.xerox.amazonws.typica.jaxb.DeleteSnapshotResponse; import com.xerox.amazonws.typica.jaxb.DeleteVolumeResponse; import com.xerox.amazonws.typica.jaxb.DeregisterImageResponse; import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponse; import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponseInfoType; import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponseItemType; import com.xerox.amazonws.typica.jaxb.DescribeAvailabilityZonesResponse; import com.xerox.amazonws.typica.jaxb.DescribeBundleTasksResponse; import com.xerox.amazonws.typica.jaxb.DescribeBundleTasksItemType; import com.xerox.amazonws.typica.jaxb.DescribeImageAttributeResponse; import com.xerox.amazonws.typica.jaxb.DescribeImagesResponse; import com.xerox.amazonws.typica.jaxb.DescribeImagesResponseInfoType; import com.xerox.amazonws.typica.jaxb.DescribeImagesResponseItemType; import com.xerox.amazonws.typica.jaxb.DescribeInstancesResponse; import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesResponse; import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesResponseSetItemType; import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesOfferingsResponse; import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesOfferingsResponseSetItemType; import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsResponse; import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsSetResponseType; import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsSetItemResponseType; import com.xerox.amazonws.typica.jaxb.DescribeVolumesResponse; import com.xerox.amazonws.typica.jaxb.DescribeVolumesSetResponseType; import com.xerox.amazonws.typica.jaxb.DescribeVolumesSetItemResponseType; import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponse; import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponseInfoType; import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponseItemType; import com.xerox.amazonws.typica.jaxb.DescribeRegionsResponse; import com.xerox.amazonws.typica.jaxb.DescribeSecurityGroupsResponse; import com.xerox.amazonws.typica.jaxb.DetachVolumeResponse; import com.xerox.amazonws.typica.jaxb.DisassociateAddressResponse; import com.xerox.amazonws.typica.jaxb.GetConsoleOutputResponse; import com.xerox.amazonws.typica.jaxb.GetPasswordDataResponse; import com.xerox.amazonws.typica.jaxb.GroupItemType; import com.xerox.amazonws.typica.jaxb.GroupSetType; import com.xerox.amazonws.typica.jaxb.InstanceStateChangeSetType; import com.xerox.amazonws.typica.jaxb.InstanceStateChangeType; import com.xerox.amazonws.typica.jaxb.IpPermissionSetType; /** * A Java wrapper for the EC2 web services API */ public class Jec2 extends AWSQueryConnection { private static Log logger = LogFactory.getLog(Jec2.class); /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. */ public Jec2(String awsAccessId, String awsSecretKey) { this(awsAccessId, awsSecretKey, true); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure) { this(awsAccessId, awsSecretKey, isSecure, "ec2.amazonaws.com"); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. * @param server Which host to connect to. Usually, this will be ec2.amazonaws.com */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure, String server) { this(awsAccessId, awsSecretKey, isSecure, server, isSecure ? 443 : 80); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. * @param server Which host to connect to. Usually, this will be ec2.amazonaws.com * @param port Which port to use. */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure, String server, int port) { super(awsAccessId, awsSecretKey, isSecure, server, port); ArrayList<String> vals = new ArrayList<String>(); vals.add("2009-11-30"); super.headers.put("Version", vals); } /** * Creates an AMI that uses an EBS root device. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param name a name to associate with the image * @param description a descriptive string to attach to the image * @param noReboot normally false; if set to true, instance is not shutdown first. * NOTE: filesystem integrity isn't guaranteed when noReboot=true * @return image ID * @throws EC2Exception wraps checked exceptions */ public String createImage(String instanceId, String name, String description, boolean noReboot) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("Name", name); if (description != null && !description.trim().equals("")) { params.put("Description", description); } if (noReboot) { params.put("NoReboot", "true"); } GetMethod method = new GetMethod(); try { CreateImageResponse response = makeRequestInt(method, "CreateImage", params, CreateImageResponse.class); return response.getImageId(); } finally { method.releaseConnection(); } } /** * Register an S3 based AMI. * * @param imageLocation An AMI path within S3. * @return A unique AMI ID that can be used to create and manage instances of this AMI. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public String registerImage(String imageLocation) throws EC2Exception { return registerImage(imageLocation, null, null, null, null, null, null, null); } /** * Register a snapshot as an EBS backed AMI * * @param imageLocation An AMI path within S3. * @return A unique AMI ID that can be used to create and manage instances of this AMI. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public String registerImage(String name, String description, String architecture, String kernelId, String ramdiskId, String rootDeviceName, List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { return registerImage(null, name, description, architecture, kernelId, ramdiskId, rootDeviceName, blockDeviceMappings); } protected String registerImage(String imageLocation, String name, String description, String architecture, String kernelId, String ramdiskId, String rootDeviceName, List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (imageLocation != null && !imageLocation.trim().equals("")) { params.put("ImageLocation", imageLocation); } if (name != null && !name.trim().equals("")) { params.put("Name", name); } if (description != null && !description.trim().equals("")) { params.put("Description", description); } if (architecture != null && !architecture.trim().equals("")) { params.put("Architecture", architecture); } if (kernelId != null && !kernelId.trim().equals("")) { params.put("KernelId", kernelId); } if (ramdiskId != null && !ramdiskId.trim().equals("")) { params.put("RamdiskId", ramdiskId); } if (rootDeviceName != null && !rootDeviceName.trim().equals("")) { params.put("RootDeviceName", rootDeviceName); } if (blockDeviceMappings != null) { for(int i = 0; i < blockDeviceMappings.size(); i++) { BlockDeviceMapping bdm = blockDeviceMappings.get(i); params.put("BlockDeviceMapping." + (i + 1) + ".VirtualName", bdm.getVirtualName()); params.put("BlockDeviceMapping." + (i + 1) + ".DeviceName", bdm.getDeviceName()); } } GetMethod method = new GetMethod(); try { RegisterImageResponse response = makeRequestInt(method, "RegisterImage", params, RegisterImageResponse.class); return response.getImageId(); } finally { method.releaseConnection(); } } /** * Deregister the given AMI. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void deregisterImage(String imageId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); GetMethod method = new GetMethod(); try { DeregisterImageResponse response = makeRequestInt(method, "DeregisterImage", params, DeregisterImageResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not deregister image : "+imageId+". No reason given."); } } finally { method.releaseConnection(); } } /** * Describe the given AMIs. * * @param imageIds An array of AMI IDs as returned by {@link #registerImage(String)}. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(String[] imageIds) throws EC2Exception { return describeImages(Arrays.asList(imageIds)); } /** * Describe the given AMIs. * * @param imageIds A list of AMI IDs as returned by {@link #registerImage(String)}. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(List<String> imageIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<imageIds.size(); i++) { params.put("ImageId."+(i+1), imageIds.get(i)); } return describeImages(params); } /** * Describe the AMIs belonging to the supplied owners. * * @param owners A list of owners. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImagesByOwner(List<String> owners) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<owners.size(); i++) { params.put("Owner."+(i+1), owners.get(i)); } return describeImages(params); } /** * Describe the AMIs executable by supplied users. * * @param users A list of users. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImagesByExecutability(List<String> users) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<users.size(); i++) { params.put("ExecutableBy."+(i+1), users.get(i)); } return describeImages(params); } /** * Describe the AMIs that match the intersection of the criteria supplied * * @param imageIds A list of AMI IDs as returned by {@link #registerImage(String)}. * @param owners A list of owners. * @param users A list of users. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(List<String> imageIds, List<String> owners, List<String> users) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<imageIds.size(); i++) { params.put("ImageId."+(i+1), imageIds.get(i)); } for (int i=0 ; i<owners.size(); i++) { params.put("Owner."+(i+1), owners.get(i)); } for (int i=0 ; i<users.size(); i++) { params.put("ExecutableBy."+(i+1), users.get(i)); } return describeImages(params); } protected List<ImageDescription> describeImages(Map<String, String> params) throws EC2Exception { GetMethod method = new GetMethod(); try { DescribeImagesResponse response = makeRequestInt(method, "DescribeImages", params, DescribeImagesResponse.class); List<ImageDescription> result = new ArrayList<ImageDescription>(); DescribeImagesResponseInfoType set = response.getImagesSet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeImagesResponseItemType item = (DescribeImagesResponseItemType) set_iter .next(); ArrayList<String> codes = new ArrayList<String>(); ProductCodesSetType code_set = item.getProductCodes(); if (code_set != null) { for (ProductCodesSetItemType code : code_set.getItems()) { codes.add(code.getProductCode()); } } result.add(new ImageDescription(item.getImageId(), item.getImageLocation(), item.getImageOwnerId(), item.getImageState(), item.isIsPublic(), codes, item.getArchitecture(), item.getImageType(), item.getKernelId(), item.getRamdiskId(), item.getPlatform())); } return result; } finally { method.releaseConnection(); } } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * <p> * NOTE: this method defaults to the AWS desired "public" addressing type. * NOTE: this method defaults to the small(traditional) instance type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, true, InstanceType.DEFAULT); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * NOTE: this method defaults to the small(traditional) instance type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, publicAddr, InstanceType.DEFAULT); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * NOTE: this method defaults to the AWS desired "public" addressing type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param type instance type * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, InstanceType type) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, true, type); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @param type instance type * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr, InstanceType type) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, publicAddr, type, null, null, null, null); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @param type instance type * @param availabilityZone the zone in which to launch the instance(s) * @param kernelId id of the kernel with which to launch the instance(s) * @param ramdiskId id of the RAM disk with wich to launch the imstance(s) * @param blockDeviceMappings mappings of virtual to device names * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr, InstanceType type, String availabilityZone, String kernelId, String ramdiskId, List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { LaunchConfiguration lc = new LaunchConfiguration(imageId); lc.setMinCount(minCount); lc.setMaxCount(maxCount); lc.setSecurityGroup(groupSet); if (userData != null) { lc.setUserData(userData.getBytes()); } lc.setKeyName(keyName); lc.setInstanceType(type); lc.setAvailabilityZone(availabilityZone); lc.setKernelId(kernelId); lc.setRamdiskId(ramdiskId); lc.setBlockDevicemappings(blockDeviceMappings); lc.setPublicAddressing(publicAddr); return runInstances(lc); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param lc object containing launch configuration * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(LaunchConfiguration lc) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", lc.getImageId()); params.put("MinCount", "" + lc.getMinCount()); params.put("MaxCount", "" + lc.getMaxCount()); byte[] userData = lc.getUserData(); if (userData != null && userData.length > 0) { params.put("UserData", new String(Base64.encodeBase64(userData))); } params.put("AddressingType", lc.isPublicAddressing()?"public":"private"); String keyName = lc.getKeyName(); if (keyName != null && !keyName.trim().equals("")) { params.put("KeyName", keyName); } if (lc.getSecurityGroup() != null) { for(int i = 0; i < lc.getSecurityGroup().size(); i++) { params.put("SecurityGroup." + (i + 1), lc.getSecurityGroup().get(i)); } } params.put("InstanceType", lc.getInstanceType().getTypeId()); if (lc.getAvailabilityZone() != null && !lc.getAvailabilityZone().trim().equals("")) { params.put("Placement.AvailabilityZone", lc.getAvailabilityZone()); } if (lc.getKernelId() != null && !lc.getKernelId().trim().equals("")) { params.put("KernelId", lc.getKernelId()); } if (lc.getRamdiskId() != null && !lc.getRamdiskId().trim().equals("")) { params.put("RamdiskId", lc.getRamdiskId()); } if (lc.getBlockDevicemappings() != null) { for(int i = 0; i < lc.getBlockDevicemappings().size(); i++) { BlockDeviceMapping bdm = lc.getBlockDevicemappings().get(i); params.put("BlockDeviceMapping." + (i + 1) + ".VirtualName", bdm.getVirtualName()); params.put("BlockDeviceMapping." + (i + 1) + ".DeviceName", bdm.getDeviceName()); } } if (lc.isMonitoring()) { params.put("Monitoring.Enabled", "true"); } GetMethod method = new GetMethod(); try { RunInstancesResponse response = makeRequestInt(method, "RunInstances", params, RunInstancesResponse.class); ReservationDescription res = new ReservationDescription(response.getRequestId(), response.getOwnerId(), response.getReservationId()); GroupSetType grp_set = response.getGroupSet(); Iterator groups_iter = grp_set.getItems().iterator(); while (groups_iter.hasNext()) { GroupItemType rsp_item = (GroupItemType) groups_iter.next(); res.addGroup(rsp_item.getGroupId()); } RunningInstancesSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { RunningInstancesItemType rsp_item = (RunningInstancesItemType) instances_iter .next(); res.addInstance(rsp_item.getImageId(), rsp_item.getInstanceId(), rsp_item.getPrivateDnsName(), rsp_item.getDnsName(), rsp_item.getInstanceState(), rsp_item.getReason(), rsp_item.getKeyName(), rsp_item.getLaunchTime().toGregorianCalendar(), InstanceType.getTypeFromString(rsp_item.getInstanceType()), rsp_item.getPlacement().getAvailabilityZone(), rsp_item.getKernelId(), rsp_item.getRamdiskId(), rsp_item.getPlatform(), rsp_item.getMonitoring().getState().equals("true"), rsp_item.getSubnetId(), rsp_item.getPrivateIpAddress(), rsp_item.getIpAddress()); } return res; } finally { method.releaseConnection(); } } /** * Starts a selection of stopped instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> startInstances(String[] instanceIds) throws EC2Exception { return this.startInstances(Arrays.asList(instanceIds)); } /** * Starts a selection of stopped instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> startInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { StartInstancesResponse response = makeRequestInt(method, "StartInstances", params, StartInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Stops a selection of running instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param force forces the instance to stop. bypasses filesystem flush. Use with caution! * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> stopInstances(String[] instanceIds, boolean force) throws EC2Exception { return this.stopInstances(Arrays.asList(instanceIds), force); } /** * Stops a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param force forces the instance to stop. bypasses filesystem flush. Use with caution! * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> stopInstances(List<String> instanceIds, boolean force) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { StopInstancesResponse response = makeRequestInt(method, "StopInstances", params, StopInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Terminates a selection of running instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> terminateInstances(String[] instanceIds) throws EC2Exception { return this.terminateInstances(Arrays.asList(instanceIds)); } /** * Terminates a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> terminateInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { TerminateInstancesResponse response = makeRequestInt(method, "TerminateInstances", params, TerminateInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Gets a list of running instances. * <p> * If the array of instance IDs is empty then a list of all instances owned * by the caller will be returned. Otherwise the list will contain * information for the requested instances only. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link com.xerox.amazonws.ec2.ReservationDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<ReservationDescription> describeInstances(String[] instanceIds) throws EC2Exception { return this.describeInstances(Arrays.asList(instanceIds)); } /** * Gets a list of running instances. * <p> * If the list of instance IDs is empty then a list of all instances owned * by the caller will be returned. Otherwise the list will contain * information for the requested instances only. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link com.xerox.amazonws.ec2.ReservationDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<ReservationDescription> describeInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeInstancesResponse response = makeRequestInt(method, "DescribeInstances", params, DescribeInstancesResponse.class); List<ReservationDescription> result = new ArrayList<ReservationDescription>(); ReservationSetType res_set = response.getReservationSet(); for (ReservationInfoType item : res_set.getItems()) { ReservationDescription res = new ReservationDescription(response.getRequestId(), item.getOwnerId(), item.getReservationId()); GroupSetType grp_set = item.getGroupSet(); for (GroupItemType rsp_item : grp_set.getItems()) { res.addGroup(rsp_item.getGroupId()); } RunningInstancesSetType set = item.getInstancesSet(); for (RunningInstancesItemType rsp_item : set.getItems()) { res.addInstance(rsp_item.getImageId(), rsp_item.getInstanceId(), rsp_item.getPrivateDnsName(), rsp_item.getDnsName(), rsp_item.getInstanceState(), rsp_item.getReason(), rsp_item.getKeyName(), rsp_item.getLaunchTime().toGregorianCalendar(), InstanceType.getTypeFromString(rsp_item.getInstanceType()), rsp_item.getPlacement().getAvailabilityZone(), rsp_item.getKernelId(), rsp_item.getRamdiskId(), rsp_item.getPlatform(), rsp_item.getMonitoring().getState().equals("true"), rsp_item.getSubnetId(), rsp_item.getPrivateIpAddress(), rsp_item.getIpAddress()); } result.add(res); } return result; } finally { method.releaseConnection(); } } /** * Reboot a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void rebootInstances(String [] instanceIds) throws EC2Exception { this.rebootInstances(Arrays.asList(instanceIds)); } /** * Reboot a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void rebootInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { RebootInstancesResponse response = makeRequestInt(method, "RebootInstances", params, RebootInstancesResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reboot instances. No reason given."); } } finally { method.releaseConnection(); } } /** * Get an instance's console output. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return ({@link ConsoleOutput}) * @throws EC2Exception wraps checked exceptions */ public ConsoleOutput getConsoleOutput(String instanceId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); GetMethod method = new GetMethod(); try { GetConsoleOutputResponse response = makeRequestInt(method, "GetConsoleOutput", params, GetConsoleOutputResponse.class); return new ConsoleOutput(response.getRequestId(), response.getInstanceId(), response.getTimestamp().toGregorianCalendar(), new String(Base64.decodeBase64(response.getOutput().getBytes()))); } finally { method.releaseConnection(); } } /** * Get a Windows instance's admin password. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return password data * @throws EC2Exception wraps checked exceptions */ public String getPasswordData(String instanceId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); GetMethod method = new GetMethod(); try { GetPasswordDataResponse response = makeRequestInt(method, "GetPasswordData", params, GetPasswordDataResponse.class); return response.getPasswordData(); } finally { method.releaseConnection(); } } /** * Creates a security group. * * @param name The name of the security group. * @param desc The description of the security group. * @throws EC2Exception wraps checked exceptions */ public void createSecurityGroup(String name, String desc) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", name); params.put("GroupDescription", desc); GetMethod method = new GetMethod(); try { CreateSecurityGroupResponse response = makeRequestInt(method, "CreateSecurityGroup", params, CreateSecurityGroupResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not create security group : "+name+". No reason given."); } } finally { method.releaseConnection(); } } /** * Deletes a security group. * * @param name The name of the security group. * @throws EC2Exception wraps checked exceptions */ public void deleteSecurityGroup(String name) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", name); GetMethod method = new GetMethod(); try { DeleteSecurityGroupResponse response = makeRequestInt(method, "DeleteSecurityGroup", params, DeleteSecurityGroupResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not delete security group : "+name+". No reason given."); } } finally { method.releaseConnection(); } } /** * Gets a list of security groups and their associated permissions. * * @param groupNames An array of groups to describe. * @return A list of groups ({@link GroupDescription}. * @throws EC2Exception wraps checked exceptions */ public List<GroupDescription> describeSecurityGroups(String[] groupNames) throws EC2Exception { return describeSecurityGroups(Arrays.asList(groupNames)); } /** * Gets a list of security groups and their associated permissions. * * @param groupNames A list of groups to describe. * @return A list of groups ({@link GroupDescription}. * @throws EC2Exception wraps checked exceptions */ public List<GroupDescription> describeSecurityGroups(List<String> groupNames) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<groupNames.size(); i++) { params.put("GroupName."+(i+1), groupNames.get(i)); } GetMethod method = new GetMethod(); try { DescribeSecurityGroupsResponse response = makeRequestInt(method, "DescribeSecurityGroups", params, DescribeSecurityGroupsResponse.class); List<GroupDescription> result = new ArrayList<GroupDescription>(); SecurityGroupSetType rsp_set = response.getSecurityGroupInfo(); Iterator set_iter = rsp_set.getItems().iterator(); while (set_iter.hasNext()) { SecurityGroupItemType item = (SecurityGroupItemType) set_iter .next(); GroupDescription group = new GroupDescription(item.getGroupName(), item.getGroupDescription(), item.getOwnerId()); IpPermissionSetType perms = item.getIpPermissions(); Iterator perm_iter = perms.getItems().iterator(); while (perm_iter.hasNext()) { IpPermissionType perm = (IpPermissionType) perm_iter.next(); GroupDescription.IpPermission group_perms = group .addPermission(perm.getIpProtocol(), perm.getFromPort(), perm.getToPort()); Iterator group_iter = perm.getGroups().getItems().iterator(); while (group_iter.hasNext()) { UserIdGroupPairType uid_group = (UserIdGroupPairType) group_iter .next(); group_perms.addUserGroupPair(uid_group.getUserId(), uid_group.getGroupName()); } Iterator iprange_iter = perm.getIpRanges().getItems().iterator(); while (iprange_iter.hasNext()) { IpRangeItemType range = (IpRangeItemType) iprange_iter .next(); group_perms.addIpRange(range.getCidrIp()); } } result.add(group); } return result; } finally { method.releaseConnection(); } } /** * Adds incoming permissions to a security group. * * @param groupName name of group to modify * @param secGroupName name of security group to authorize access to * @param secGroupOwnerId owner of security group to authorize access to * @throws EC2Exception wraps checked exceptions */ public void authorizeSecurityGroupIngress(String groupName, String secGroupName, String secGroupOwnerId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("SourceSecurityGroupOwnerId", secGroupOwnerId); params.put("SourceSecurityGroupName", secGroupName); GetMethod method = new GetMethod(); try { AuthorizeSecurityGroupIngressResponse response = makeRequestInt(method, "AuthorizeSecurityGroupIngress", params, AuthorizeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not authorize security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Adds incoming permissions to a security group. * * @param groupName name of group to modify * @param ipProtocol protocol to authorize (tcp, udp, icmp) * @param fromPort bottom of port range to authorize * @param toPort top of port range to authorize * @param cidrIp CIDR IP range to authorize (i.e. 0.0.0.0/0) * @throws EC2Exception wraps checked exceptions */ public void authorizeSecurityGroupIngress(String groupName, String ipProtocol, int fromPort, int toPort, String cidrIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("IpProtocol", ipProtocol); params.put("FromPort", ""+fromPort); params.put("ToPort", ""+toPort); params.put("CidrIp", cidrIp); GetMethod method = new GetMethod(); try { AuthorizeSecurityGroupIngressResponse response = makeRequestInt(method, "AuthorizeSecurityGroupIngress", params, AuthorizeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not authorize security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Revokes incoming permissions from a security group. * * @param groupName name of group to modify * @param secGroupName name of security group to revoke access from * @param secGroupOwnerId owner of security group to revoke access from * @throws EC2Exception wraps checked exceptions */ public void revokeSecurityGroupIngress(String groupName, String secGroupName, String secGroupOwnerId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("SourceSecurityGroupOwnerId", secGroupOwnerId); params.put("SourceSecurityGroupName", secGroupName); GetMethod method = new GetMethod(); try { RevokeSecurityGroupIngressResponse response = makeRequestInt(method, "RevokeSecurityGroupIngress", params, RevokeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not revoke security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Revokes incoming permissions from a security group. * * @param groupName name of group to modify * @param ipProtocol protocol to revoke (tcp, udp, icmp) * @param fromPort bottom of port range to revoke * @param toPort top of port range to revoke * @param cidrIp CIDR IP range to revoke (i.e. 0.0.0.0/0) * @throws EC2Exception wraps checked exceptions */ public void revokeSecurityGroupIngress(String groupName, String ipProtocol, int fromPort, int toPort, String cidrIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("IpProtocol", ipProtocol); params.put("FromPort", ""+fromPort); params.put("ToPort", ""+toPort); params.put("CidrIp", cidrIp); GetMethod method = new GetMethod(); try { RevokeSecurityGroupIngressResponse response = makeRequestInt(method, "RevokeSecurityGroupIngress", params, RevokeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not revoke security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Creates a public/private keypair. * * @param keyName Name of the keypair. * @return A keypair description ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public KeyPairInfo createKeyPair(String keyName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("KeyName", keyName); GetMethod method = new GetMethod(); try { CreateKeyPairResponse response = makeRequestInt(method, "CreateKeyPair", params, CreateKeyPairResponse.class); return new KeyPairInfo(response.getKeyName(), response.getKeyFingerprint(), response.getKeyMaterial()); } finally { method.releaseConnection(); } } /** * Lists public/private keypairs. * * @param keyIds An array of keypairs. * @return A list of keypair descriptions ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions */ public List<KeyPairInfo> describeKeyPairs(String[] keyIds) throws EC2Exception { return describeKeyPairs(Arrays.asList(keyIds)); } /** * Lists public/private keypairs. NOTE: the KeyPairInfo.getMaterial() method will return null * because this API call doesn't return the keypair material. * * @param keyIds A list of keypairs. * @return A list of keypair descriptions ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<KeyPairInfo> describeKeyPairs(List<String> keyIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<keyIds.size(); i++) { params.put("KeyName."+(i+1), keyIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeKeyPairsResponse response = makeRequestInt(method, "DescribeKeyPairs", params, DescribeKeyPairsResponse.class); List<KeyPairInfo> result = new ArrayList<KeyPairInfo>(); DescribeKeyPairsResponseInfoType set = response.getKeySet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeKeyPairsResponseItemType item = (DescribeKeyPairsResponseItemType) set_iter.next(); result.add(new KeyPairInfo(item.getKeyName(), item.getKeyFingerprint(), null)); } return result; } finally { method.releaseConnection(); } } /** * Deletes a public/private keypair. * * @param keyName Name of the keypair. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void deleteKeyPair(String keyName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("KeyName", keyName); GetMethod method = new GetMethod(); try { DeleteKeyPairResponse response = makeRequestInt(method, "DeleteKeyPair", params, DeleteKeyPairResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not delete keypair : "+keyName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Enumerates image list attribute operation types. */ public enum ImageListAttributeOperationType { add, remove } /** * Modifies an attribute by the given items with the given operation. * * @param imageId The ID of the AMI to modify the attributes for. * @param attribute The name of the attribute to change. * @param operationType The name of the operation to change. May be add or remove. * @throws EC2Exception wraps checked exceptions */ public void modifyImageAttribute(String imageId, ImageListAttribute attribute, ImageListAttributeOperationType operationType) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (attribute.getType().equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); switch (operationType) { case add: params.put("OperationType", "add"); break; case remove: params.put("OperationType", "remove"); break; default: throw new IllegalArgumentException("Unknown attribute operation."); } } else if (attribute.getType().equals(ImageAttribute.ImageAttributeType.productCodes)) { params.put("Attribute", "productCodes"); } int gNum = 1; int iNum = 1; int pNum = 1; for(ImageListAttributeItem item : attribute.getImageListAttributeItems()) { switch (item.getType()) { case group: params.put("UserGroup."+gNum, item.getValue()); gNum++; break; case userId: params.put("UserId."+iNum, item.getValue()); iNum++; break; case productCode: params.put("ProductCode."+pNum, item.getValue()); pNum++; break; default: throw new IllegalArgumentException("Unknown item type."); } } GetMethod method = new GetMethod(); try { ModifyImageAttributeResponse response = makeRequestInt(method, "ModifyImageAttribute", params, ModifyImageAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reset image attribute. No reason given."); } } finally { method.releaseConnection(); } } /** * Resets an attribute on an AMI. * * @param imageId The AMI to reset the attribute on. * @param imageAttribute The attribute type to reset. * @throws EC2Exception wraps checked exceptions */ public void resetImageAttribute(String imageId, ImageAttribute.ImageAttributeType imageAttribute) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (imageAttribute.equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); } else if (imageAttribute.equals(ImageAttribute.ImageAttributeType.productCodes)) { throw new IllegalArgumentException("Cannot reset productCodes attribute"); } GetMethod method = new GetMethod(); try { ResetImageAttributeResponse response = makeRequestInt(method, "ResetImageAttribute", params, ResetImageAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reset image attribute. No reason given."); } } finally { method.releaseConnection(); } } /** * Describes an attribute of an AMI. * * @param imageId The AMI for which the attribute is described. * @param imageAttribute The attribute type to describe. * @return An object containing the imageId and a list of list attribute item types and values. * @throws EC2Exception wraps checked exceptions */ public DescribeImageAttributeResult describeImageAttribute(String imageId, ImageAttribute.ImageAttributeType imageAttribute) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (imageAttribute.equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); } else if (imageAttribute.equals(ImageAttribute.ImageAttributeType.productCodes)) { params.put("Attribute", "productCodes"); } GetMethod method = new GetMethod(); try { DescribeImageAttributeResponse response = makeRequestInt(method, "DescribeImageAttribute", params, DescribeImageAttributeResponse.class); ImageListAttribute attribute = null; if (response.getLaunchPermission() != null) { LaunchPermissionListType list = response.getLaunchPermission(); attribute = new LaunchPermissionAttribute(); java.util.ListIterator i = list.getItems().listIterator(); while (i.hasNext()) { LaunchPermissionItemType item = (LaunchPermissionItemType) i.next(); if (item.getGroup() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.group, item.getGroup()); } else if (item.getUserId() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.userId, item.getUserId()); } } } else if (response.getProductCodes() != null) { ProductCodeListType list = response.getProductCodes(); attribute = new ProductCodesAttribute(); java.util.ListIterator i = list.getItems().listIterator(); while (i.hasNext()) { ProductCodeItemType item = (ProductCodeItemType) i.next(); if (item.getProductCode() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.productCode, item.getProductCode()); } } } ArrayList<String> codes = new ArrayList<String>(); ProductCodeListType set = response.getProductCodes(); if (set != null) { for (ProductCodeItemType code : set.getItems()) { codes.add(code.getProductCode()); } } NullableAttributeValueType val = response.getKernel(); String kernel = (val != null)?val.getValue():""; val = response.getRamdisk(); String ramdisk = (val != null)?val.getValue():""; ArrayList<BlockDeviceMapping> bdm = new ArrayList<BlockDeviceMapping>(); BlockDeviceMappingType bdmSet = response.getBlockDeviceMapping(); if (bdmSet != null) { for (BlockDeviceMappingItemType mapping : bdmSet.getItems()) { bdm.add(new BlockDeviceMapping(mapping.getVirtualName(), mapping.getDeviceName())); } } return new DescribeImageAttributeResult(response.getImageId(), attribute, codes, kernel, ramdisk, bdm); } finally { method.releaseConnection(); } } /** * Returns true if the productCode is associated with the instance. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param productCode the code for the project you registered with AWS * @return null if no relationship exists, otherwise information about the owner * @throws EC2Exception wraps checked exceptions */ public ProductInstanceInfo confirmProductInstance(String instanceId, String productCode) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("ProductCode", productCode); GetMethod method = new GetMethod(); try { ConfirmProductInstanceResponse response = makeRequestInt(method, "ConfirmProductInstance", params, ConfirmProductInstanceResponse.class); if (response.isReturn()) { return new ProductInstanceInfo(instanceId, productCode, response.getOwnerId()); } else return null; } finally { method.releaseConnection(); } } /** * Returns a list of availability zones and their status. * * @param zones a list of zones to limit the results, or null * @return a list of zones and their availability * @throws EC2Exception wraps checked exceptions */ public List<AvailabilityZone> describeAvailabilityZones(List<String> zones) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (zones != null && zones.size() > 0) { for (int i=0 ; i<zones.size(); i++) { params.put("ZoneName."+(i+1), zones.get(i)); } } GetMethod method = new GetMethod(); try { DescribeAvailabilityZonesResponse response = makeRequestInt(method, "DescribeAvailabilityZones", params, DescribeAvailabilityZonesResponse.class); List<AvailabilityZone> ret = new ArrayList<AvailabilityZone>(); AvailabilityZoneSetType set = response.getAvailabilityZoneInfo(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { AvailabilityZoneItemType item = (AvailabilityZoneItemType) set_iter.next(); List<String> messages = new ArrayList<String>(); for (AvailabilityZoneMessageType msg : item.getMessageSet().getItems()) { messages.add(msg.getMessage()); } ret.add(new AvailabilityZone(item.getZoneName(), item.getZoneState(), item.getRegionName(), messages)); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of addresses associated with this account. * * @param addresses a list of zones to limit the results, or null * @return a list of addresses and their associated instance * @throws EC2Exception wraps checked exceptions */ public List<AddressInfo> describeAddresses(List<String> addresses) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (addresses != null && addresses.size() > 0) { for (int i=0 ; i<addresses.size(); i++) { params.put("PublicIp."+(i+1), addresses.get(i)); } } GetMethod method = new GetMethod(); try { DescribeAddressesResponse response = makeRequestInt(method, "DescribeAddresses", params, DescribeAddressesResponse.class); List<AddressInfo> ret = new ArrayList<AddressInfo>(); DescribeAddressesResponseInfoType set = response.getAddressesSet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeAddressesResponseItemType item = (DescribeAddressesResponseItemType) set_iter.next(); ret.add(new AddressInfo(item.getPublicIp(), item.getInstanceId())); } return ret; } finally { method.releaseConnection(); } } /** * Allocates an address for this account. * * @return the new address allocated * @throws EC2Exception wraps checked exceptions */ public String allocateAddress() throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); GetMethod method = new GetMethod(); try { AllocateAddressResponse response = makeRequestInt(method, "AllocateAddress", params, AllocateAddressResponse.class); return response.getPublicIp(); } finally { method.releaseConnection(); } } /** * Associates an address with an instance. * * @param instanceId the instance * @param publicIp the ip address to associate * @throws EC2Exception wraps checked exceptions */ public void associateAddress(String instanceId, String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { AssociateAddressResponse response = makeRequestInt(method, "AssociateAddress", params, AssociateAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not associate address with instance (no reason given)."); } } finally { method.releaseConnection(); } } /** * Disassociates an address with an instance. * * @param publicIp the ip address to disassociate * @throws EC2Exception wraps checked exceptions */ public void disassociateAddress(String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { DisassociateAddressResponse response = makeRequestInt(method, "DisassociateAddress", params, DisassociateAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not disassociate address with instance (no reason given)."); } } finally { method.releaseConnection(); } } /** * Releases an address * * @param publicIp the ip address to release * @throws EC2Exception wraps checked exceptions */ public void releaseAddress(String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { ReleaseAddressResponse response = makeRequestInt(method, "ReleaseAddress", params, ReleaseAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release address (no reason given)."); } } finally { method.releaseConnection(); } } /** * Creates an EBS volume either by size, or from a snapshot. The zone must be the same as * that of the instance you wish to attach it to. * * @param size the size of the volume in gigabytes * @param snapshotId the snapshot from which to create the new volume * @param zoneName the availability zone for the new volume * @return information about the volume * @throws EC2Exception wraps checked exceptions */ public VolumeInfo createVolume(String size, String snapshotId, String zoneName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (size != null && !size.equals("")) { params.put("Size", size); } params.put("SnapshotId", (snapshotId==null)?"":snapshotId); params.put("AvailabilityZone", zoneName); GetMethod method = new GetMethod(); try { CreateVolumeResponse response = makeRequestInt(method, "CreateVolume", params, CreateVolumeResponse.class); return new VolumeInfo(response.getVolumeId(), response.getSize(), response.getSnapshotId(), response.getAvailabilityZone(), response.getStatus(), response.getCreateTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Deletes the EBS volume. * * @param volumeId the id of the volume to be deleted * @throws EC2Exception wraps checked exceptions */ public void deleteVolume(String volumeId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); GetMethod method = new GetMethod(); try { DeleteVolumeResponse response = makeRequestInt(method, "DeleteVolume", params, DeleteVolumeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release delete volume (no reason given)."); } } finally { method.releaseConnection(); } } /** * Gets a list of EBS volumes for this account. * <p> * If the array of volume IDs is empty then a list of all volumes owned * by the caller will be returned. Otherwise the list will contain * information for the requested volumes only. * * @param volumeIds An array of volumes ({@link com.xerox.amazonws.ec2.VolumeInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<VolumeInfo> describeVolumes(String[] volumeIds) throws EC2Exception { return this.describeVolumes(Arrays.asList(volumeIds)); } /** * Gets a list of EBS volumes for this account. * <p> * If the list of volume IDs is empty then a list of all volumes owned * by the caller will be returned. Otherwise the list will contain * information for the requested volumes only. * * @param volumeIds A list of volumes ({@link com.xerox.amazonws.ec2.VolumeInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<VolumeInfo> describeVolumes(List<String> volumeIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<volumeIds.size(); i++) { params.put("VolumeId."+(i+1), volumeIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeVolumesResponse response = makeRequestInt(method, "DescribeVolumes", params, DescribeVolumesResponse.class); List<VolumeInfo> result = new ArrayList<VolumeInfo>(); DescribeVolumesSetResponseType res_set = response.getVolumeSet(); Iterator reservations_iter = res_set.getItems().iterator(); while (reservations_iter.hasNext()) { DescribeVolumesSetItemResponseType item = (DescribeVolumesSetItemResponseType) reservations_iter.next(); VolumeInfo vol = new VolumeInfo(item.getVolumeId(), item.getSize(), item.getSnapshotId(), item.getAvailabilityZone(), item.getStatus(), item.getCreateTime().toGregorianCalendar()); AttachmentSetResponseType set = item.getAttachmentSet(); Iterator attachments_iter = set.getItems().iterator(); while (attachments_iter.hasNext()) { AttachmentSetItemResponseType as_item = (AttachmentSetItemResponseType) attachments_iter .next(); vol.addAttachmentInfo(as_item.getVolumeId(), as_item.getInstanceId(), as_item.getDevice(), as_item.getStatus(), as_item.getAttachTime().toGregorianCalendar()); } result.add(vol); } return result; } finally { method.releaseConnection(); } } /** * Attaches an EBS volume to an instance. * * @param volumeId the id of the volume * @param instanceId the id of the instance * @param device the device name for the attached volume * @return the information about this attachment * @throws EC2Exception wraps checked exceptions */ public AttachmentInfo attachVolume(String volumeId, String instanceId, String device) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("InstanceId", instanceId); params.put("Device", device); GetMethod method = new GetMethod(); try { AttachVolumeResponse response = makeRequestInt(method, "AttachVolume", params, AttachVolumeResponse.class); return new AttachmentInfo(response.getVolumeId(), response.getInstanceId(), response.getDevice(), response.getStatus(), response.getAttachTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Detaches an EBS volume from an instance. * * @param volumeId the id of the volume * @param instanceId the id of the instance * @param device the device name for the attached volume * @param force if true, forces the detachment, only use if normal detachment fails * @return the information about this attachment * @throws EC2Exception wraps checked exceptions */ public AttachmentInfo detachVolume(String volumeId, String instanceId, String device, boolean force) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("InstanceId", (instanceId==null)?"":instanceId); params.put("Device", (device==null)?"":device); params.put("Force", force?"true":"false"); GetMethod method = new GetMethod(); try { DetachVolumeResponse response = makeRequestInt(method, "DetachVolume", params, DetachVolumeResponse.class); return new AttachmentInfo(response.getVolumeId(), response.getInstanceId(), response.getDevice(), response.getStatus(), response.getAttachTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Creates a snapshot of the EBS Volume. * * @param volumeId the id of the volume * @param description an optional descriptive string (256 chars max) * @return information about the snapshot * @throws EC2Exception wraps checked exceptions */ public SnapshotInfo createSnapshot(String volumeId, String description) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("Description", description); GetMethod method = new GetMethod(); try { CreateSnapshotResponse response = makeRequestInt(method, "CreateSnapshot", params, CreateSnapshotResponse.class); return new SnapshotInfo(response.getSnapshotId(), response.getVolumeId(), response.getStatus(), response.getStartTime().toGregorianCalendar(), response.getProgress(), response.getOwnerId(), response.getVolumeSize(), response.getDescription()); } finally { method.releaseConnection(); } } /** * Deletes the snapshot. * * @param snapshotId the id of the snapshot * @throws EC2Exception wraps checked exceptions */ public void deleteSnapshot(String snapshotId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("SnapshotId", snapshotId); GetMethod method = new GetMethod(); try { DeleteSnapshotResponse response = makeRequestInt(method, "DeleteSnapshot", params, DeleteSnapshotResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release delete snapshot (no reason given)."); } } finally { method.releaseConnection(); } } /** * Gets a list of EBS snapshots for this account. * <p> * If the array of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds An array of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(String[] snapshotIds) throws EC2Exception { return this.describeSnapshots(Arrays.asList(snapshotIds)); } /** * Gets a list of EBS snapshots for this account. * <p> * If the list of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds A list of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(List<String> snapshotIds) throws EC2Exception { return describeSnapshots(snapshotIds, null, null); } /** * Gets a list of EBS snapshots for this account. * <p> * If the list of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds A list of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @param owner limits results to snapshots owned by this user * @param restorableBy limits results to account that can create volumes from this snapshot * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(List<String> snapshotIds, String owner, String restorableBy) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<snapshotIds.size(); i++) { params.put("SnapshotId."+(i+1), snapshotIds.get(i)); } if (owner != null) { params.put("Owner", owner); } if (restorableBy != null) { params.put("RestorableBy", owner); } GetMethod method = new GetMethod(); try { DescribeSnapshotsResponse response = makeRequestInt(method, "DescribeSnapshots", params, DescribeSnapshotsResponse.class); List<SnapshotInfo> result = new ArrayList<SnapshotInfo>(); DescribeSnapshotsSetResponseType res_set = response.getSnapshotSet(); Iterator reservations_iter = res_set.getItems().iterator(); while (reservations_iter.hasNext()) { DescribeSnapshotsSetItemResponseType item = (DescribeSnapshotsSetItemResponseType) reservations_iter.next(); SnapshotInfo vol = new SnapshotInfo(item.getSnapshotId(), item.getVolumeId(), item.getStatus(), item.getStartTime().toGregorianCalendar(), item.getProgress(), item.getOwnerId(), item.getVolumeSize(), item.getDescription()); result.add(vol); } return result; } finally { method.releaseConnection(); } } /** * Changes permissions settings of a snapshot. * * @param snapshotId the snapshot you are addressing * @param attribute for now, should be "createVolumePermission" * @param opType either add or remove * @param userId optional userId (this or userGroup); * @param userGroup optional userGroup (this or userId) * @throws EC2Exception wraps checked exceptions */ public void modifySnapshotAttribute(String snapshotId, String attribute, OperationType opType, String userId, String userGroup) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("SnapshotId", snapshotId); if (userId != null) { params.put("UserId", userId); } if (userGroup != null) { params.put("UserGroup", userGroup); } params.put("Attribute", attribute); params.put("OperationType", opType.getTypeId()); GetMethod method = new GetMethod(); try { ModifySnapshotAttributeResponse response = makeRequestInt(method, "ModifySnapshotAttribute", params, ModifySnapshotAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not modify snapshot attribute : "+attribute+". No reason given."); } } finally { method.releaseConnection(); } } /** * Returns a list of regions * * @param regions a list of regions to limit the results, or null * @return a list of regions and endpoints * @throws EC2Exception wraps checked exceptions */ public List<RegionInfo> describeRegions(List<String> regions) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (regions != null && regions.size() > 0) { for (int i=0 ; i<regions.size(); i++) { params.put("Region."+(i+1), regions.get(i)); } } GetMethod method = new GetMethod(); try { DescribeRegionsResponse response = makeRequestInt(method, "DescribeRegions", params, DescribeRegionsResponse.class); List<RegionInfo> ret = new ArrayList<RegionInfo>(); RegionSetType set = response.getRegionInfo(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { RegionItemType item = (RegionItemType) set_iter.next(); ret.add(new RegionInfo(item.getRegionName(), item.getRegionEndpoint())); } return ret; } finally { method.releaseConnection(); } } /** * Sets the region to use. * * @param region the region to use, from describeRegions() */ public void setRegion(RegionInfo region) { setServer(region.getUrl()); } /** * Sets the region Url to use. * * @param region the region Url to use from RegionInfo.getUrl() */ public void setRegionUrl(String regionUrl) { setServer(regionUrl); } /** * Initiates bundling of an instance running Windows. * * @param instanceId the Id of the instance to bundle * @param accessId the accessId of the owner of the S3 bucket * @param bucketName the name of the S3 bucket in which the AMi will be stored * @param prefix the prefix to append to the AMI * @param policy an UploadPolicy object containing policy parameters * @return information about the bundle task * @throws EC2Exception wraps checked exceptions */ public BundleInstanceInfo bundleInstance(String instanceId, String accessId, String bucketName, String prefix, UploadPolicy policy) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("Storage.S3.AWSAccessKeyId", accessId); params.put("Storage.S3.Bucket", bucketName); params.put("Storage.S3.Prefix", prefix); String jsonPolicy = policy.getPolicyString(); params.put("Storage.S3.UploadPolicy", jsonPolicy); params.put("Storage.S3.UploadPolicySignature", encode(getSecretAccessKey(), jsonPolicy, false, "HmacSHA1")); GetMethod method = new GetMethod(); try { BundleInstanceResponse response = makeRequestInt(method, "BundleInstance", params, BundleInstanceResponse.class); BundleInstanceTaskType task = response.getBundleInstanceTask(); return new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError()); } finally { method.releaseConnection(); } } /** * Cancel a bundling operation. * * @param bundleId the Id of the bundle task to cancel * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public BundleInstanceInfo cancelBundleInstance(String bundleId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("BundleId", bundleId); GetMethod method = new GetMethod(); try { CancelBundleTaskResponse response = makeRequestInt(method, "CancelBundleTask", params, CancelBundleTaskResponse.class); BundleInstanceTaskType task = response.getBundleInstanceTask(); return new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError()); } finally { method.releaseConnection(); } } /** * Returns a list of current bundling tasks. An empty array causes all tasks to be returned. * * @param bundleIds the Ids of the bundle task to describe * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public List<BundleInstanceInfo> describeBundleTasks(String [] bundleIds) throws EC2Exception { return this.describeBundleTasks(Arrays.asList(bundleIds)); } /** * Returns a list of current bundling tasks. An empty list causes all tasks to be returned. * * @param bundleIds the Ids of the bundle task to describe * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public List<BundleInstanceInfo> describeBundleTasks(List<String> bundleIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<bundleIds.size(); i++) { params.put("BundleId."+(i+1), bundleIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeBundleTasksResponse response = makeRequestInt(method, "DescribeBundleTasks", params, DescribeBundleTasksResponse.class); List<BundleInstanceInfo> ret = new ArrayList<BundleInstanceInfo>(); Iterator task_iter = response.getBundleInstanceTasksSet().getItems().iterator(); while (task_iter.hasNext()) { BundleInstanceTaskType task = (BundleInstanceTaskType) task_iter.next(); ret.add(new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError())); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of Reserved Instance offerings that are available for purchase. * * @param instanceIds specific reserved instance offering ids to return * @throws EC2Exception wraps checked exceptions */ public List<ReservedInstances> describeReservedInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (instanceIds != null) { for (int i=0 ; i<instanceIds.size(); i++) { params.put("ReservedInstanceId."+(i+1), instanceIds.get(i)); } } GetMethod method = new GetMethod(); try { DescribeReservedInstancesResponse response = makeRequestInt(method, "DescribeReservedInstances", params, DescribeReservedInstancesResponse.class); List<ReservedInstances> ret = new ArrayList<ReservedInstances>(); Iterator task_iter = response.getReservedInstancesSet().getItems().iterator(); while (task_iter.hasNext()) { DescribeReservedInstancesResponseSetItemType type = (DescribeReservedInstancesResponseSetItemType) task_iter.next(); ret.add(new ReservedInstances(type.getReservedInstancesId(), InstanceType.getTypeFromString(type.getInstanceType()), type.getAvailabilityZone(), type.getStart().toGregorianCalendar(), type.getDuration(), type.getFixedPrice(), type.getUsagePrice(), type.getProductDescription(), type.getInstanceCount().intValue(), type.getState())); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of Reserved Instance offerings that are available for purchase. * * @param offeringIds specific reserved instance offering ids to return * @param instanceType the type of instance offering to be returned * @param availabilityZone the availability zone to get offerings for * @param productDescription limit results to those with a matching product description * @return a list of product descriptions * @throws EC2Exception wraps checked exceptions */ public List<ProductDescription> describeReservedInstancesOfferings(List<String> offeringIds, InstanceType instanceType, String availabilityZone, String productDescription) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (offeringIds != null) { for (int i=0 ; i<offeringIds.size(); i++) { params.put("ReservedInstancesOfferingId."+(i+1), offeringIds.get(i)); } } if (instanceType != null) { params.put("InstanceType", instanceType.getTypeId()); } if (availabilityZone != null) { params.put("AvailabilityZone", availabilityZone); } if (productDescription != null) { params.put("ProductDescription", productDescription); } GetMethod method = new GetMethod(); try { DescribeReservedInstancesOfferingsResponse response = makeRequestInt(method, "DescribeReservedInstancesOfferings", params, DescribeReservedInstancesOfferingsResponse.class); List<ProductDescription> ret = new ArrayList<ProductDescription>(); Iterator task_iter = response.getReservedInstancesOfferingsSet().getItems().iterator(); while (task_iter.hasNext()) { DescribeReservedInstancesOfferingsResponseSetItemType type = (DescribeReservedInstancesOfferingsResponseSetItemType) task_iter.next(); ret.add(new ProductDescription(type.getReservedInstancesOfferingId(), InstanceType.getTypeFromString(type.getInstanceType()), type.getAvailabilityZone(), type.getDuration(), type.getFixedPrice(), type.getUsagePrice(), type.getProductDescription())); } return ret; } finally { method.releaseConnection(); } } /** * This method purchases a reserved instance offering. * * NOTE: Use With Caution!!! This can cost a lot of money! * * @param offeringId the id of the offering to purchase * @param instanceCount the number of instances to reserve * @return id of reserved instances * @throws EC2Exception wraps checked exceptions */ public String purchaseReservedInstancesOffering(String offeringId, int instanceCount) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ReservedInstancesOfferingId", offeringId); params.put("InstanceCount", ""+instanceCount); GetMethod method = new GetMethod(); try { PurchaseReservedInstancesOfferingResponse response = makeRequestInt(method, "PurchaseReservedInstancesOffering", params, PurchaseReservedInstancesOfferingResponse.class); return response.getReservedInstancesId(); } finally { method.releaseConnection(); } } /** * This method enables monitoring for some instances * * @param instanceIds the id of the instances to enable monitoring for * @return information about the monitoring state of those instances * @throws EC2Exception wraps checked exceptions */ public List<MonitoredInstanceInfo> monitorInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { MonitorInstancesResponseType response = makeRequestInt(method, "MonitorInstances", params, MonitorInstancesResponseType.class); List<MonitoredInstanceInfo> ret = new ArrayList<MonitoredInstanceInfo>(); for (MonitorInstancesResponseSetItemType item : response.getInstancesSet().getItems()) { ret.add(new MonitoredInstanceInfo(item.getInstanceId(), item.getMonitoring().getState())); } return ret; } finally { method.releaseConnection(); } } /** * This method disables monitoring for some instances * * @param instanceIds the id of the instances to disable monitoring for * @return information about the monitoring state of those instances * @throws EC2Exception wraps checked exceptions */ public List<MonitoredInstanceInfo> unmonitorInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { MonitorInstancesResponseType response = makeRequestInt(method, "UnmonitorInstances", params, MonitorInstancesResponseType.class); List<MonitoredInstanceInfo> ret = new ArrayList<MonitoredInstanceInfo>(); for (MonitorInstancesResponseSetItemType item : response.getInstancesSet().getItems()) { ret.add(new MonitoredInstanceInfo(item.getInstanceId(), item.getMonitoring().getState())); } return ret; } finally { method.releaseConnection(); } } public List<SpotPriceHistoryItem> describeSpotPriceHistory(Calendar start, Calendar end, String productDescription, InstanceType... instanceTypes) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (start != null) { params.put("StartTime", httpDate(start)); } if (end != null) { params.put("EndTime", httpDate(end)); } if (productDescription != null) { params.put("ProductDescription", productDescription); } for (int i = 0; i < instanceTypes.length; i++) { InstanceType instanceType = instanceTypes[i]; params.put("InstanceType." + (i + 1), instanceType.getTypeId()); } GetMethod method = new GetMethod(); try { List<SpotPriceHistoryItem> ret = new ArrayList<SpotPriceHistoryItem>(); DescribeSpotPriceHistoryResponse response = makeRequestInt(method, "DescribeSpotPriceHistory", params, DescribeSpotPriceHistoryResponse.class); List<SpotPriceHistorySetItemType> items = response.getSpotPriceHistorySet().getItems(); if (items != null) { for (SpotPriceHistorySetItemType item : items) { ret.add(new SpotPriceHistoryItem(item)); } } return ret; } finally { method.releaseConnection(); } } protected <T> T makeRequestInt(HttpMethodBase method, String action, Map<String, String> params, Class<T> respType) throws EC2Exception { try { return makeRequest(method, action, params, respType); } catch (AWSException ex) { throw new EC2Exception(ex); } catch (JAXBException ex) { throw new EC2Exception("Problem parsing returned message.", ex); } catch (MalformedURLException ex) { throw new EC2Exception(ex.getMessage(), ex); } catch (IOException ex) { throw new EC2Exception(ex.getMessage(), ex); } } }
java/com/xerox/amazonws/ec2/Jec2.java
// // typica - A client library for Amazon Web Services // Copyright (C) 2007,2008,2009 Xerox Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.xerox.amazonws.ec2; import java.io.IOException; import java.net.MalformedURLException; import java.util.*; import javax.xml.bind.JAXBException; import com.xerox.amazonws.typica.jaxb.*; import org.apache.commons.codec.binary.Base64; import org.apache.commons.httpclient.HttpMethodBase; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.xerox.amazonws.common.AWSException; import com.xerox.amazonws.common.AWSQueryConnection; import com.xerox.amazonws.typica.jaxb.IpPermissionSetType; /** * A Java wrapper for the EC2 web services API */ public class Jec2 extends AWSQueryConnection { private static Log logger = LogFactory.getLog(Jec2.class); /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. */ public Jec2(String awsAccessId, String awsSecretKey) { this(awsAccessId, awsSecretKey, true); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure) { this(awsAccessId, awsSecretKey, isSecure, "ec2.amazonaws.com"); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. * @param server Which host to connect to. Usually, this will be ec2.amazonaws.com */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure, String server) { this(awsAccessId, awsSecretKey, isSecure, server, isSecure ? 443 : 80); } /** * Initializes the ec2 service with your AWS login information. * * @param awsAccessId The your user key into AWS * @param awsSecretKey The secret string used to generate signatures for authentication. * @param isSecure True if the data should be encrypted on the wire on the way to or from EC2. * @param server Which host to connect to. Usually, this will be ec2.amazonaws.com * @param port Which port to use. */ public Jec2(String awsAccessId, String awsSecretKey, boolean isSecure, String server, int port) { super(awsAccessId, awsSecretKey, isSecure, server, port); ArrayList<String> vals = new ArrayList<String>(); vals.add("2009-11-30"); super.headers.put("Version", vals); } /** * Register the given AMI. * * @param imageLocation An AMI path within S3. * @return A unique AMI ID that can be used to create and manage instances of this AMI. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public String registerImage(String imageLocation) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageLocation", imageLocation); GetMethod method = new GetMethod(); try { RegisterImageResponse response = makeRequestInt(method, "RegisterImage", params, RegisterImageResponse.class); return response.getImageId(); } finally { method.releaseConnection(); } } /** * Deregister the given AMI. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void deregisterImage(String imageId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); GetMethod method = new GetMethod(); try { DeregisterImageResponse response = makeRequestInt(method, "DeregisterImage", params, DeregisterImageResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not deregister image : "+imageId+". No reason given."); } } finally { method.releaseConnection(); } } /** * Describe the given AMIs. * * @param imageIds An array of AMI IDs as returned by {@link #registerImage(String)}. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(String[] imageIds) throws EC2Exception { return describeImages(Arrays.asList(imageIds)); } /** * Describe the given AMIs. * * @param imageIds A list of AMI IDs as returned by {@link #registerImage(String)}. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(List<String> imageIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<imageIds.size(); i++) { params.put("ImageId."+(i+1), imageIds.get(i)); } return describeImages(params); } /** * Describe the AMIs belonging to the supplied owners. * * @param owners A list of owners. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImagesByOwner(List<String> owners) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<owners.size(); i++) { params.put("Owner."+(i+1), owners.get(i)); } return describeImages(params); } /** * Describe the AMIs executable by supplied users. * * @param users A list of users. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImagesByExecutability(List<String> users) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<users.size(); i++) { params.put("ExecutableBy."+(i+1), users.get(i)); } return describeImages(params); } /** * Describe the AMIs that match the intersection of the criteria supplied * * @param imageIds A list of AMI IDs as returned by {@link #registerImage(String)}. * @param owners A list of owners. * @param users A list of users. * @return A list of {@link ImageDescription} instances describing each AMI ID. * @throws EC2Exception wraps checked exceptions */ public List<ImageDescription> describeImages(List<String> imageIds, List<String> owners, List<String> users) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<imageIds.size(); i++) { params.put("ImageId."+(i+1), imageIds.get(i)); } for (int i=0 ; i<owners.size(); i++) { params.put("Owner."+(i+1), owners.get(i)); } for (int i=0 ; i<users.size(); i++) { params.put("ExecutableBy."+(i+1), users.get(i)); } return describeImages(params); } protected List<ImageDescription> describeImages(Map<String, String> params) throws EC2Exception { GetMethod method = new GetMethod(); try { DescribeImagesResponse response = makeRequestInt(method, "DescribeImages", params, DescribeImagesResponse.class); List<ImageDescription> result = new ArrayList<ImageDescription>(); DescribeImagesResponseInfoType set = response.getImagesSet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeImagesResponseItemType item = (DescribeImagesResponseItemType) set_iter .next(); ArrayList<String> codes = new ArrayList<String>(); ProductCodesSetType code_set = item.getProductCodes(); if (code_set != null) { for (ProductCodesSetItemType code : code_set.getItems()) { codes.add(code.getProductCode()); } } result.add(new ImageDescription(item.getImageId(), item.getImageLocation(), item.getImageOwnerId(), item.getImageState(), item.isIsPublic(), codes, item.getArchitecture(), item.getImageType(), item.getKernelId(), item.getRamdiskId(), item.getPlatform())); } return result; } finally { method.releaseConnection(); } } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * <p> * NOTE: this method defaults to the AWS desired "public" addressing type. * NOTE: this method defaults to the small(traditional) instance type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, true, InstanceType.DEFAULT); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * NOTE: this method defaults to the small(traditional) instance type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, publicAddr, InstanceType.DEFAULT); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * NOTE: this method defaults to the AWS desired "public" addressing type. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param type instance type * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, InstanceType type) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, true, type); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @param type instance type * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr, InstanceType type) throws EC2Exception { return runInstances(imageId, minCount, maxCount, groupSet, userData, keyName, publicAddr, type, null, null, null, null); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param imageId An AMI ID as returned by {@link #registerImage(String)}. * @param minCount The minimum number of instances to attempt to reserve. * @param maxCount The maximum number of instances to attempt to reserve. * @param groupSet A (possibly empty) set of security group definitions. * @param userData User supplied data that will be made available to the instance(s) * @param publicAddr sets addressing mode to public * @param type instance type * @param availabilityZone the zone in which to launch the instance(s) * @param kernelId id of the kernel with which to launch the instance(s) * @param ramdiskId id of the RAM disk with wich to launch the imstance(s) * @param blockDeviceMappings mappings of virtual to device names * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(String imageId, int minCount, int maxCount, List<String> groupSet, String userData, String keyName, boolean publicAddr, InstanceType type, String availabilityZone, String kernelId, String ramdiskId, List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { LaunchConfiguration lc = new LaunchConfiguration(imageId); lc.setMinCount(minCount); lc.setMaxCount(maxCount); lc.setSecurityGroup(groupSet); if (userData != null) { lc.setUserData(userData.getBytes()); } lc.setKeyName(keyName); lc.setInstanceType(type); lc.setAvailabilityZone(availabilityZone); lc.setKernelId(kernelId); lc.setRamdiskId(ramdiskId); lc.setBlockDevicemappings(blockDeviceMappings); lc.setPublicAddressing(publicAddr); return runInstances(lc); } /** * Requests reservation of a number of instances. * <p> * This will begin launching those instances for which a reservation was * successfully obtained. * <p> * If less than <code>minCount</code> instances are available no instances * will be reserved. * * @param lc object containing launch configuration * @return A {@link com.xerox.amazonws.ec2.ReservationDescription} describing the instances that * have been reserved. * @throws EC2Exception wraps checked exceptions */ public ReservationDescription runInstances(LaunchConfiguration lc) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", lc.getImageId()); params.put("MinCount", "" + lc.getMinCount()); params.put("MaxCount", "" + lc.getMaxCount()); byte[] userData = lc.getUserData(); if (userData != null && userData.length > 0) { params.put("UserData", new String(Base64.encodeBase64(userData))); } params.put("AddressingType", lc.isPublicAddressing()?"public":"private"); String keyName = lc.getKeyName(); if (keyName != null && !keyName.trim().equals("")) { params.put("KeyName", keyName); } if (lc.getSecurityGroup() != null) { for(int i = 0; i < lc.getSecurityGroup().size(); i++) { params.put("SecurityGroup." + (i + 1), lc.getSecurityGroup().get(i)); } } params.put("InstanceType", lc.getInstanceType().getTypeId()); if (lc.getAvailabilityZone() != null && !lc.getAvailabilityZone().trim().equals("")) { params.put("Placement.AvailabilityZone", lc.getAvailabilityZone()); } if (lc.getKernelId() != null && !lc.getKernelId().trim().equals("")) { params.put("KernelId", lc.getKernelId()); } if (lc.getRamdiskId() != null && !lc.getRamdiskId().trim().equals("")) { params.put("RamdiskId", lc.getRamdiskId()); } if (lc.getBlockDevicemappings() != null) { for(int i = 0; i < lc.getBlockDevicemappings().size(); i++) { BlockDeviceMapping bdm = lc.getBlockDevicemappings().get(i); params.put("BlockDeviceMapping." + (i + 1) + ".VirtualName", bdm.getVirtualName()); params.put("BlockDeviceMapping." + (i + 1) + ".DeviceName", bdm.getDeviceName()); } } if (lc.isMonitoring()) { params.put("Monitoring.Enabled", "true"); } GetMethod method = new GetMethod(); try { RunInstancesResponse response = makeRequestInt(method, "RunInstances", params, RunInstancesResponse.class); ReservationDescription res = new ReservationDescription(response.getRequestId(), response.getOwnerId(), response.getReservationId()); GroupSetType grp_set = response.getGroupSet(); Iterator groups_iter = grp_set.getItems().iterator(); while (groups_iter.hasNext()) { GroupItemType rsp_item = (GroupItemType) groups_iter.next(); res.addGroup(rsp_item.getGroupId()); } RunningInstancesSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { RunningInstancesItemType rsp_item = (RunningInstancesItemType) instances_iter .next(); res.addInstance(rsp_item.getImageId(), rsp_item.getInstanceId(), rsp_item.getPrivateDnsName(), rsp_item.getDnsName(), rsp_item.getInstanceState(), rsp_item.getReason(), rsp_item.getKeyName(), rsp_item.getLaunchTime().toGregorianCalendar(), InstanceType.getTypeFromString(rsp_item.getInstanceType()), rsp_item.getPlacement().getAvailabilityZone(), rsp_item.getKernelId(), rsp_item.getRamdiskId(), rsp_item.getPlatform(), rsp_item.getMonitoring().getState().equals("true"), rsp_item.getSubnetId(), rsp_item.getPrivateIpAddress(), rsp_item.getIpAddress()); } return res; } finally { method.releaseConnection(); } } /** * Starts a selection of stopped instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> startInstances(String[] instanceIds) throws EC2Exception { return this.startInstances(Arrays.asList(instanceIds)); } /** * Starts a selection of stopped instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> startInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { StartInstancesResponse response = makeRequestInt(method, "StartInstances", params, StartInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Stops a selection of running instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param force forces the instance to stop. bypasses filesystem flush. Use with caution! * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> stopInstances(String[] instanceIds, boolean force) throws EC2Exception { return this.stopInstances(Arrays.asList(instanceIds), force); } /** * Stops a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param force forces the instance to stop. bypasses filesystem flush. Use with caution! * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> stopInstances(List<String> instanceIds, boolean force) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { StopInstancesResponse response = makeRequestInt(method, "StopInstances", params, StopInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Terminates a selection of running instances. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<InstanceStateChangeDescription> terminateInstances(String[] instanceIds) throws EC2Exception { return this.terminateInstances(Arrays.asList(instanceIds)); } /** * Terminates a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link InstanceStateChangeDescription} instances. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<InstanceStateChangeDescription> terminateInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { TerminateInstancesResponse response = makeRequestInt(method, "TerminateInstances", params, TerminateInstancesResponse.class); List<InstanceStateChangeDescription> res = new ArrayList<InstanceStateChangeDescription>(); InstanceStateChangeSetType set = response.getInstancesSet(); Iterator instances_iter = set.getItems().iterator(); while (instances_iter.hasNext()) { InstanceStateChangeType rsp_item = (InstanceStateChangeType)instances_iter.next(); res.add(new InstanceStateChangeDescription( rsp_item.getInstanceId(), rsp_item.getPreviousState().getName(), rsp_item.getPreviousState().getCode(), rsp_item.getCurrentState().getName(), rsp_item.getCurrentState().getCode())); } return res; } finally { method.releaseConnection(); } } /** * Gets a list of running instances. * <p> * If the array of instance IDs is empty then a list of all instances owned * by the caller will be returned. Otherwise the list will contain * information for the requested instances only. * * @param instanceIds An array of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link com.xerox.amazonws.ec2.ReservationDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<ReservationDescription> describeInstances(String[] instanceIds) throws EC2Exception { return this.describeInstances(Arrays.asList(instanceIds)); } /** * Gets a list of running instances. * <p> * If the list of instance IDs is empty then a list of all instances owned * by the caller will be returned. Otherwise the list will contain * information for the requested instances only. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return A list of {@link com.xerox.amazonws.ec2.ReservationDescription} instances. * @throws EC2Exception wraps checked exceptions */ public List<ReservationDescription> describeInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeInstancesResponse response = makeRequestInt(method, "DescribeInstances", params, DescribeInstancesResponse.class); List<ReservationDescription> result = new ArrayList<ReservationDescription>(); ReservationSetType res_set = response.getReservationSet(); for (ReservationInfoType item : res_set.getItems()) { ReservationDescription res = new ReservationDescription(response.getRequestId(), item.getOwnerId(), item.getReservationId()); GroupSetType grp_set = item.getGroupSet(); for (GroupItemType rsp_item : grp_set.getItems()) { res.addGroup(rsp_item.getGroupId()); } RunningInstancesSetType set = item.getInstancesSet(); for (RunningInstancesItemType rsp_item : set.getItems()) { res.addInstance(rsp_item.getImageId(), rsp_item.getInstanceId(), rsp_item.getPrivateDnsName(), rsp_item.getDnsName(), rsp_item.getInstanceState(), rsp_item.getReason(), rsp_item.getKeyName(), rsp_item.getLaunchTime().toGregorianCalendar(), InstanceType.getTypeFromString(rsp_item.getInstanceType()), rsp_item.getPlacement().getAvailabilityZone(), rsp_item.getKernelId(), rsp_item.getRamdiskId(), rsp_item.getPlatform(), rsp_item.getMonitoring().getState().equals("true"), rsp_item.getSubnetId(), rsp_item.getPrivateIpAddress(), rsp_item.getIpAddress()); } result.add(res); } return result; } finally { method.releaseConnection(); } } /** * Reboot a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void rebootInstances(String [] instanceIds) throws EC2Exception { this.rebootInstances(Arrays.asList(instanceIds)); } /** * Reboot a selection of running instances. * * @param instanceIds A list of instances ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void rebootInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { RebootInstancesResponse response = makeRequestInt(method, "RebootInstances", params, RebootInstancesResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reboot instances. No reason given."); } } finally { method.releaseConnection(); } } /** * Get an instance's console output. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return ({@link ConsoleOutput}) * @throws EC2Exception wraps checked exceptions */ public ConsoleOutput getConsoleOutput(String instanceId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); GetMethod method = new GetMethod(); try { GetConsoleOutputResponse response = makeRequestInt(method, "GetConsoleOutput", params, GetConsoleOutputResponse.class); return new ConsoleOutput(response.getRequestId(), response.getInstanceId(), response.getTimestamp().toGregorianCalendar(), new String(Base64.decodeBase64(response.getOutput().getBytes()))); } finally { method.releaseConnection(); } } /** * Get a Windows instance's admin password. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @return password data * @throws EC2Exception wraps checked exceptions */ public String getPasswordData(String instanceId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); GetMethod method = new GetMethod(); try { GetPasswordDataResponse response = makeRequestInt(method, "GetPasswordData", params, GetPasswordDataResponse.class); return response.getPasswordData(); } finally { method.releaseConnection(); } } /** * Creates a security group. * * @param name The name of the security group. * @param desc The description of the security group. * @throws EC2Exception wraps checked exceptions */ public void createSecurityGroup(String name, String desc) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", name); params.put("GroupDescription", desc); GetMethod method = new GetMethod(); try { CreateSecurityGroupResponse response = makeRequestInt(method, "CreateSecurityGroup", params, CreateSecurityGroupResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not create security group : "+name+". No reason given."); } } finally { method.releaseConnection(); } } /** * Deletes a security group. * * @param name The name of the security group. * @throws EC2Exception wraps checked exceptions */ public void deleteSecurityGroup(String name) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", name); GetMethod method = new GetMethod(); try { DeleteSecurityGroupResponse response = makeRequestInt(method, "DeleteSecurityGroup", params, DeleteSecurityGroupResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not delete security group : "+name+". No reason given."); } } finally { method.releaseConnection(); } } /** * Gets a list of security groups and their associated permissions. * * @param groupNames An array of groups to describe. * @return A list of groups ({@link GroupDescription}. * @throws EC2Exception wraps checked exceptions */ public List<GroupDescription> describeSecurityGroups(String[] groupNames) throws EC2Exception { return describeSecurityGroups(Arrays.asList(groupNames)); } /** * Gets a list of security groups and their associated permissions. * * @param groupNames A list of groups to describe. * @return A list of groups ({@link GroupDescription}. * @throws EC2Exception wraps checked exceptions */ public List<GroupDescription> describeSecurityGroups(List<String> groupNames) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<groupNames.size(); i++) { params.put("GroupName."+(i+1), groupNames.get(i)); } GetMethod method = new GetMethod(); try { DescribeSecurityGroupsResponse response = makeRequestInt(method, "DescribeSecurityGroups", params, DescribeSecurityGroupsResponse.class); List<GroupDescription> result = new ArrayList<GroupDescription>(); SecurityGroupSetType rsp_set = response.getSecurityGroupInfo(); Iterator set_iter = rsp_set.getItems().iterator(); while (set_iter.hasNext()) { SecurityGroupItemType item = (SecurityGroupItemType) set_iter .next(); GroupDescription group = new GroupDescription(item.getGroupName(), item.getGroupDescription(), item.getOwnerId()); IpPermissionSetType perms = item.getIpPermissions(); Iterator perm_iter = perms.getItems().iterator(); while (perm_iter.hasNext()) { IpPermissionType perm = (IpPermissionType) perm_iter.next(); GroupDescription.IpPermission group_perms = group .addPermission(perm.getIpProtocol(), perm.getFromPort(), perm.getToPort()); Iterator group_iter = perm.getGroups().getItems().iterator(); while (group_iter.hasNext()) { UserIdGroupPairType uid_group = (UserIdGroupPairType) group_iter .next(); group_perms.addUserGroupPair(uid_group.getUserId(), uid_group.getGroupName()); } Iterator iprange_iter = perm.getIpRanges().getItems().iterator(); while (iprange_iter.hasNext()) { IpRangeItemType range = (IpRangeItemType) iprange_iter .next(); group_perms.addIpRange(range.getCidrIp()); } } result.add(group); } return result; } finally { method.releaseConnection(); } } /** * Adds incoming permissions to a security group. * * @param groupName name of group to modify * @param secGroupName name of security group to authorize access to * @param secGroupOwnerId owner of security group to authorize access to * @throws EC2Exception wraps checked exceptions */ public void authorizeSecurityGroupIngress(String groupName, String secGroupName, String secGroupOwnerId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("SourceSecurityGroupOwnerId", secGroupOwnerId); params.put("SourceSecurityGroupName", secGroupName); GetMethod method = new GetMethod(); try { AuthorizeSecurityGroupIngressResponse response = makeRequestInt(method, "AuthorizeSecurityGroupIngress", params, AuthorizeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not authorize security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Adds incoming permissions to a security group. * * @param groupName name of group to modify * @param ipProtocol protocol to authorize (tcp, udp, icmp) * @param fromPort bottom of port range to authorize * @param toPort top of port range to authorize * @param cidrIp CIDR IP range to authorize (i.e. 0.0.0.0/0) * @throws EC2Exception wraps checked exceptions */ public void authorizeSecurityGroupIngress(String groupName, String ipProtocol, int fromPort, int toPort, String cidrIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("IpProtocol", ipProtocol); params.put("FromPort", ""+fromPort); params.put("ToPort", ""+toPort); params.put("CidrIp", cidrIp); GetMethod method = new GetMethod(); try { AuthorizeSecurityGroupIngressResponse response = makeRequestInt(method, "AuthorizeSecurityGroupIngress", params, AuthorizeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not authorize security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Revokes incoming permissions from a security group. * * @param groupName name of group to modify * @param secGroupName name of security group to revoke access from * @param secGroupOwnerId owner of security group to revoke access from * @throws EC2Exception wraps checked exceptions */ public void revokeSecurityGroupIngress(String groupName, String secGroupName, String secGroupOwnerId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("SourceSecurityGroupOwnerId", secGroupOwnerId); params.put("SourceSecurityGroupName", secGroupName); GetMethod method = new GetMethod(); try { RevokeSecurityGroupIngressResponse response = makeRequestInt(method, "RevokeSecurityGroupIngress", params, RevokeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not revoke security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Revokes incoming permissions from a security group. * * @param groupName name of group to modify * @param ipProtocol protocol to revoke (tcp, udp, icmp) * @param fromPort bottom of port range to revoke * @param toPort top of port range to revoke * @param cidrIp CIDR IP range to revoke (i.e. 0.0.0.0/0) * @throws EC2Exception wraps checked exceptions */ public void revokeSecurityGroupIngress(String groupName, String ipProtocol, int fromPort, int toPort, String cidrIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("GroupName", groupName); params.put("IpProtocol", ipProtocol); params.put("FromPort", ""+fromPort); params.put("ToPort", ""+toPort); params.put("CidrIp", cidrIp); GetMethod method = new GetMethod(); try { RevokeSecurityGroupIngressResponse response = makeRequestInt(method, "RevokeSecurityGroupIngress", params, RevokeSecurityGroupIngressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not revoke security ingress : "+groupName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Creates a public/private keypair. * * @param keyName Name of the keypair. * @return A keypair description ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public KeyPairInfo createKeyPair(String keyName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("KeyName", keyName); GetMethod method = new GetMethod(); try { CreateKeyPairResponse response = makeRequestInt(method, "CreateKeyPair", params, CreateKeyPairResponse.class); return new KeyPairInfo(response.getKeyName(), response.getKeyFingerprint(), response.getKeyMaterial()); } finally { method.releaseConnection(); } } /** * Lists public/private keypairs. * * @param keyIds An array of keypairs. * @return A list of keypair descriptions ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions */ public List<KeyPairInfo> describeKeyPairs(String[] keyIds) throws EC2Exception { return describeKeyPairs(Arrays.asList(keyIds)); } /** * Lists public/private keypairs. NOTE: the KeyPairInfo.getMaterial() method will return null * because this API call doesn't return the keypair material. * * @param keyIds A list of keypairs. * @return A list of keypair descriptions ({@link KeyPairInfo}). * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public List<KeyPairInfo> describeKeyPairs(List<String> keyIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<keyIds.size(); i++) { params.put("KeyName."+(i+1), keyIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeKeyPairsResponse response = makeRequestInt(method, "DescribeKeyPairs", params, DescribeKeyPairsResponse.class); List<KeyPairInfo> result = new ArrayList<KeyPairInfo>(); DescribeKeyPairsResponseInfoType set = response.getKeySet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeKeyPairsResponseItemType item = (DescribeKeyPairsResponseItemType) set_iter.next(); result.add(new KeyPairInfo(item.getKeyName(), item.getKeyFingerprint(), null)); } return result; } finally { method.releaseConnection(); } } /** * Deletes a public/private keypair. * * @param keyName Name of the keypair. * @throws EC2Exception wraps checked exceptions * TODO: need to return request id */ public void deleteKeyPair(String keyName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("KeyName", keyName); GetMethod method = new GetMethod(); try { DeleteKeyPairResponse response = makeRequestInt(method, "DeleteKeyPair", params, DeleteKeyPairResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not delete keypair : "+keyName+". No reason given."); } } finally { method.releaseConnection(); } } /** * Enumerates image list attribute operation types. */ public enum ImageListAttributeOperationType { add, remove } /** * Modifies an attribute by the given items with the given operation. * * @param imageId The ID of the AMI to modify the attributes for. * @param attribute The name of the attribute to change. * @param operationType The name of the operation to change. May be add or remove. * @throws EC2Exception wraps checked exceptions */ public void modifyImageAttribute(String imageId, ImageListAttribute attribute, ImageListAttributeOperationType operationType) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (attribute.getType().equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); switch (operationType) { case add: params.put("OperationType", "add"); break; case remove: params.put("OperationType", "remove"); break; default: throw new IllegalArgumentException("Unknown attribute operation."); } } else if (attribute.getType().equals(ImageAttribute.ImageAttributeType.productCodes)) { params.put("Attribute", "productCodes"); } int gNum = 1; int iNum = 1; int pNum = 1; for(ImageListAttributeItem item : attribute.getImageListAttributeItems()) { switch (item.getType()) { case group: params.put("UserGroup."+gNum, item.getValue()); gNum++; break; case userId: params.put("UserId."+iNum, item.getValue()); iNum++; break; case productCode: params.put("ProductCode."+pNum, item.getValue()); pNum++; break; default: throw new IllegalArgumentException("Unknown item type."); } } GetMethod method = new GetMethod(); try { ModifyImageAttributeResponse response = makeRequestInt(method, "ModifyImageAttribute", params, ModifyImageAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reset image attribute. No reason given."); } } finally { method.releaseConnection(); } } /** * Resets an attribute on an AMI. * * @param imageId The AMI to reset the attribute on. * @param imageAttribute The attribute type to reset. * @throws EC2Exception wraps checked exceptions */ public void resetImageAttribute(String imageId, ImageAttribute.ImageAttributeType imageAttribute) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (imageAttribute.equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); } else if (imageAttribute.equals(ImageAttribute.ImageAttributeType.productCodes)) { throw new IllegalArgumentException("Cannot reset productCodes attribute"); } GetMethod method = new GetMethod(); try { ResetImageAttributeResponse response = makeRequestInt(method, "ResetImageAttribute", params, ResetImageAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not reset image attribute. No reason given."); } } finally { method.releaseConnection(); } } /** * Describes an attribute of an AMI. * * @param imageId The AMI for which the attribute is described. * @param imageAttribute The attribute type to describe. * @return An object containing the imageId and a list of list attribute item types and values. * @throws EC2Exception wraps checked exceptions */ public DescribeImageAttributeResult describeImageAttribute(String imageId, ImageAttribute.ImageAttributeType imageAttribute) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ImageId", imageId); if (imageAttribute.equals(ImageAttribute.ImageAttributeType.launchPermission)) { params.put("Attribute", "launchPermission"); } else if (imageAttribute.equals(ImageAttribute.ImageAttributeType.productCodes)) { params.put("Attribute", "productCodes"); } GetMethod method = new GetMethod(); try { DescribeImageAttributeResponse response = makeRequestInt(method, "DescribeImageAttribute", params, DescribeImageAttributeResponse.class); ImageListAttribute attribute = null; if (response.getLaunchPermission() != null) { LaunchPermissionListType list = response.getLaunchPermission(); attribute = new LaunchPermissionAttribute(); java.util.ListIterator i = list.getItems().listIterator(); while (i.hasNext()) { LaunchPermissionItemType item = (LaunchPermissionItemType) i.next(); if (item.getGroup() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.group, item.getGroup()); } else if (item.getUserId() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.userId, item.getUserId()); } } } else if (response.getProductCodes() != null) { ProductCodeListType list = response.getProductCodes(); attribute = new ProductCodesAttribute(); java.util.ListIterator i = list.getItems().listIterator(); while (i.hasNext()) { ProductCodeItemType item = (ProductCodeItemType) i.next(); if (item.getProductCode() != null) { attribute.addImageListAttributeItem(ImageListAttribute.ImageListAttributeItemType.productCode, item.getProductCode()); } } } ArrayList<String> codes = new ArrayList<String>(); ProductCodeListType set = response.getProductCodes(); if (set != null) { for (ProductCodeItemType code : set.getItems()) { codes.add(code.getProductCode()); } } NullableAttributeValueType val = response.getKernel(); String kernel = (val != null)?val.getValue():""; val = response.getRamdisk(); String ramdisk = (val != null)?val.getValue():""; ArrayList<BlockDeviceMapping> bdm = new ArrayList<BlockDeviceMapping>(); BlockDeviceMappingType bdmSet = response.getBlockDeviceMapping(); if (bdmSet != null) { for (BlockDeviceMappingItemType mapping : bdmSet.getItems()) { bdm.add(new BlockDeviceMapping(mapping.getVirtualName(), mapping.getDeviceName())); } } return new DescribeImageAttributeResult(response.getImageId(), attribute, codes, kernel, ramdisk, bdm); } finally { method.releaseConnection(); } } /** * Returns true if the productCode is associated with the instance. * * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. * @param productCode the code for the project you registered with AWS * @return null if no relationship exists, otherwise information about the owner * @throws EC2Exception wraps checked exceptions */ public ProductInstanceInfo confirmProductInstance(String instanceId, String productCode) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("ProductCode", productCode); GetMethod method = new GetMethod(); try { ConfirmProductInstanceResponse response = makeRequestInt(method, "ConfirmProductInstance", params, ConfirmProductInstanceResponse.class); if (response.isReturn()) { return new ProductInstanceInfo(instanceId, productCode, response.getOwnerId()); } else return null; } finally { method.releaseConnection(); } } /** * Returns a list of availability zones and their status. * * @param zones a list of zones to limit the results, or null * @return a list of zones and their availability * @throws EC2Exception wraps checked exceptions */ public List<AvailabilityZone> describeAvailabilityZones(List<String> zones) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (zones != null && zones.size() > 0) { for (int i=0 ; i<zones.size(); i++) { params.put("ZoneName."+(i+1), zones.get(i)); } } GetMethod method = new GetMethod(); try { DescribeAvailabilityZonesResponse response = makeRequestInt(method, "DescribeAvailabilityZones", params, DescribeAvailabilityZonesResponse.class); List<AvailabilityZone> ret = new ArrayList<AvailabilityZone>(); AvailabilityZoneSetType set = response.getAvailabilityZoneInfo(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { AvailabilityZoneItemType item = (AvailabilityZoneItemType) set_iter.next(); List<String> messages = new ArrayList<String>(); for (AvailabilityZoneMessageType msg : item.getMessageSet().getItems()) { messages.add(msg.getMessage()); } ret.add(new AvailabilityZone(item.getZoneName(), item.getZoneState(), item.getRegionName(), messages)); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of addresses associated with this account. * * @param addresses a list of zones to limit the results, or null * @return a list of addresses and their associated instance * @throws EC2Exception wraps checked exceptions */ public List<AddressInfo> describeAddresses(List<String> addresses) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (addresses != null && addresses.size() > 0) { for (int i=0 ; i<addresses.size(); i++) { params.put("PublicIp."+(i+1), addresses.get(i)); } } GetMethod method = new GetMethod(); try { DescribeAddressesResponse response = makeRequestInt(method, "DescribeAddresses", params, DescribeAddressesResponse.class); List<AddressInfo> ret = new ArrayList<AddressInfo>(); DescribeAddressesResponseInfoType set = response.getAddressesSet(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { DescribeAddressesResponseItemType item = (DescribeAddressesResponseItemType) set_iter.next(); ret.add(new AddressInfo(item.getPublicIp(), item.getInstanceId())); } return ret; } finally { method.releaseConnection(); } } /** * Allocates an address for this account. * * @return the new address allocated * @throws EC2Exception wraps checked exceptions */ public String allocateAddress() throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); GetMethod method = new GetMethod(); try { AllocateAddressResponse response = makeRequestInt(method, "AllocateAddress", params, AllocateAddressResponse.class); return response.getPublicIp(); } finally { method.releaseConnection(); } } /** * Associates an address with an instance. * * @param instanceId the instance * @param publicIp the ip address to associate * @throws EC2Exception wraps checked exceptions */ public void associateAddress(String instanceId, String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { AssociateAddressResponse response = makeRequestInt(method, "AssociateAddress", params, AssociateAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not associate address with instance (no reason given)."); } } finally { method.releaseConnection(); } } /** * Disassociates an address with an instance. * * @param publicIp the ip address to disassociate * @throws EC2Exception wraps checked exceptions */ public void disassociateAddress(String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { DisassociateAddressResponse response = makeRequestInt(method, "DisassociateAddress", params, DisassociateAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not disassociate address with instance (no reason given)."); } } finally { method.releaseConnection(); } } /** * Releases an address * * @param publicIp the ip address to release * @throws EC2Exception wraps checked exceptions */ public void releaseAddress(String publicIp) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("PublicIp", publicIp); GetMethod method = new GetMethod(); try { ReleaseAddressResponse response = makeRequestInt(method, "ReleaseAddress", params, ReleaseAddressResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release address (no reason given)."); } } finally { method.releaseConnection(); } } /** * Creates an EBS volume either by size, or from a snapshot. The zone must be the same as * that of the instance you wish to attach it to. * * @param size the size of the volume in gigabytes * @param snapshotId the snapshot from which to create the new volume * @param zoneName the availability zone for the new volume * @return information about the volume * @throws EC2Exception wraps checked exceptions */ public VolumeInfo createVolume(String size, String snapshotId, String zoneName) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (size != null && !size.equals("")) { params.put("Size", size); } params.put("SnapshotId", (snapshotId==null)?"":snapshotId); params.put("AvailabilityZone", zoneName); GetMethod method = new GetMethod(); try { CreateVolumeResponse response = makeRequestInt(method, "CreateVolume", params, CreateVolumeResponse.class); return new VolumeInfo(response.getVolumeId(), response.getSize(), response.getSnapshotId(), response.getAvailabilityZone(), response.getStatus(), response.getCreateTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Deletes the EBS volume. * * @param volumeId the id of the volume to be deleted * @throws EC2Exception wraps checked exceptions */ public void deleteVolume(String volumeId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); GetMethod method = new GetMethod(); try { DeleteVolumeResponse response = makeRequestInt(method, "DeleteVolume", params, DeleteVolumeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release delete volume (no reason given)."); } } finally { method.releaseConnection(); } } /** * Gets a list of EBS volumes for this account. * <p> * If the array of volume IDs is empty then a list of all volumes owned * by the caller will be returned. Otherwise the list will contain * information for the requested volumes only. * * @param volumeIds An array of volumes ({@link com.xerox.amazonws.ec2.VolumeInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<VolumeInfo> describeVolumes(String[] volumeIds) throws EC2Exception { return this.describeVolumes(Arrays.asList(volumeIds)); } /** * Gets a list of EBS volumes for this account. * <p> * If the list of volume IDs is empty then a list of all volumes owned * by the caller will be returned. Otherwise the list will contain * information for the requested volumes only. * * @param volumeIds A list of volumes ({@link com.xerox.amazonws.ec2.VolumeInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<VolumeInfo> describeVolumes(List<String> volumeIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<volumeIds.size(); i++) { params.put("VolumeId."+(i+1), volumeIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeVolumesResponse response = makeRequestInt(method, "DescribeVolumes", params, DescribeVolumesResponse.class); List<VolumeInfo> result = new ArrayList<VolumeInfo>(); DescribeVolumesSetResponseType res_set = response.getVolumeSet(); Iterator reservations_iter = res_set.getItems().iterator(); while (reservations_iter.hasNext()) { DescribeVolumesSetItemResponseType item = (DescribeVolumesSetItemResponseType) reservations_iter.next(); VolumeInfo vol = new VolumeInfo(item.getVolumeId(), item.getSize(), item.getSnapshotId(), item.getAvailabilityZone(), item.getStatus(), item.getCreateTime().toGregorianCalendar()); AttachmentSetResponseType set = item.getAttachmentSet(); Iterator attachments_iter = set.getItems().iterator(); while (attachments_iter.hasNext()) { AttachmentSetItemResponseType as_item = (AttachmentSetItemResponseType) attachments_iter .next(); vol.addAttachmentInfo(as_item.getVolumeId(), as_item.getInstanceId(), as_item.getDevice(), as_item.getStatus(), as_item.getAttachTime().toGregorianCalendar()); } result.add(vol); } return result; } finally { method.releaseConnection(); } } /** * Attaches an EBS volume to an instance. * * @param volumeId the id of the volume * @param instanceId the id of the instance * @param device the device name for the attached volume * @return the information about this attachment * @throws EC2Exception wraps checked exceptions */ public AttachmentInfo attachVolume(String volumeId, String instanceId, String device) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("InstanceId", instanceId); params.put("Device", device); GetMethod method = new GetMethod(); try { AttachVolumeResponse response = makeRequestInt(method, "AttachVolume", params, AttachVolumeResponse.class); return new AttachmentInfo(response.getVolumeId(), response.getInstanceId(), response.getDevice(), response.getStatus(), response.getAttachTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Detaches an EBS volume from an instance. * * @param volumeId the id of the volume * @param instanceId the id of the instance * @param device the device name for the attached volume * @param force if true, forces the detachment, only use if normal detachment fails * @return the information about this attachment * @throws EC2Exception wraps checked exceptions */ public AttachmentInfo detachVolume(String volumeId, String instanceId, String device, boolean force) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("InstanceId", (instanceId==null)?"":instanceId); params.put("Device", (device==null)?"":device); params.put("Force", force?"true":"false"); GetMethod method = new GetMethod(); try { DetachVolumeResponse response = makeRequestInt(method, "DetachVolume", params, DetachVolumeResponse.class); return new AttachmentInfo(response.getVolumeId(), response.getInstanceId(), response.getDevice(), response.getStatus(), response.getAttachTime().toGregorianCalendar()); } finally { method.releaseConnection(); } } /** * Creates a snapshot of the EBS Volume. * * @param volumeId the id of the volume * @param description an optional descriptive string (256 chars max) * @return information about the snapshot * @throws EC2Exception wraps checked exceptions */ public SnapshotInfo createSnapshot(String volumeId, String description) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("VolumeId", volumeId); params.put("Description", description); GetMethod method = new GetMethod(); try { CreateSnapshotResponse response = makeRequestInt(method, "CreateSnapshot", params, CreateSnapshotResponse.class); return new SnapshotInfo(response.getSnapshotId(), response.getVolumeId(), response.getStatus(), response.getStartTime().toGregorianCalendar(), response.getProgress(), response.getOwnerId(), response.getVolumeSize(), response.getDescription()); } finally { method.releaseConnection(); } } /** * Deletes the snapshot. * * @param snapshotId the id of the snapshot * @throws EC2Exception wraps checked exceptions */ public void deleteSnapshot(String snapshotId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("SnapshotId", snapshotId); GetMethod method = new GetMethod(); try { DeleteSnapshotResponse response = makeRequestInt(method, "DeleteSnapshot", params, DeleteSnapshotResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not release delete snapshot (no reason given)."); } } finally { method.releaseConnection(); } } /** * Gets a list of EBS snapshots for this account. * <p> * If the array of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds An array of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(String[] snapshotIds) throws EC2Exception { return this.describeSnapshots(Arrays.asList(snapshotIds)); } /** * Gets a list of EBS snapshots for this account. * <p> * If the list of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds A list of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(List<String> snapshotIds) throws EC2Exception { return describeSnapshots(snapshotIds, null, null); } /** * Gets a list of EBS snapshots for this account. * <p> * If the list of snapshot IDs is empty then a list of all snapshots owned * by the caller will be returned. Otherwise the list will contain * information for the requested snapshots only. * * @param snapshotIds A list of snapshots ({@link com.xerox.amazonws.ec2.SnapshotInfo}. * @param owner limits results to snapshots owned by this user * @param restorableBy limits results to account that can create volumes from this snapshot * @return A list of {@link com.xerox.amazonws.ec2.VolumeInfo} volumes. * @throws EC2Exception wraps checked exceptions */ public List<SnapshotInfo> describeSnapshots(List<String> snapshotIds, String owner, String restorableBy) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<snapshotIds.size(); i++) { params.put("SnapshotId."+(i+1), snapshotIds.get(i)); } if (owner != null) { params.put("Owner", owner); } if (restorableBy != null) { params.put("RestorableBy", owner); } GetMethod method = new GetMethod(); try { DescribeSnapshotsResponse response = makeRequestInt(method, "DescribeSnapshots", params, DescribeSnapshotsResponse.class); List<SnapshotInfo> result = new ArrayList<SnapshotInfo>(); DescribeSnapshotsSetResponseType res_set = response.getSnapshotSet(); Iterator reservations_iter = res_set.getItems().iterator(); while (reservations_iter.hasNext()) { DescribeSnapshotsSetItemResponseType item = (DescribeSnapshotsSetItemResponseType) reservations_iter.next(); SnapshotInfo vol = new SnapshotInfo(item.getSnapshotId(), item.getVolumeId(), item.getStatus(), item.getStartTime().toGregorianCalendar(), item.getProgress(), item.getOwnerId(), item.getVolumeSize(), item.getDescription()); result.add(vol); } return result; } finally { method.releaseConnection(); } } /** * Changes permissions settings of a snapshot. * * @param snapshotId the snapshot you are addressing * @param attribute for now, should be "createVolumePermission" * @param opType either add or remove * @param userId optional userId (this or userGroup); * @param userGroup optional userGroup (this or userId) * @throws EC2Exception wraps checked exceptions */ public void modifySnapshotAttribute(String snapshotId, String attribute, OperationType opType, String userId, String userGroup) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("SnapshotId", snapshotId); if (userId != null) { params.put("UserId", userId); } if (userGroup != null) { params.put("UserGroup", userGroup); } params.put("Attribute", attribute); params.put("OperationType", opType.getTypeId()); GetMethod method = new GetMethod(); try { ModifySnapshotAttributeResponse response = makeRequestInt(method, "ModifySnapshotAttribute", params, ModifySnapshotAttributeResponse.class); if (!response.isReturn()) { throw new EC2Exception("Could not modify snapshot attribute : "+attribute+". No reason given."); } } finally { method.releaseConnection(); } } /** * Returns a list of regions * * @param regions a list of regions to limit the results, or null * @return a list of regions and endpoints * @throws EC2Exception wraps checked exceptions */ public List<RegionInfo> describeRegions(List<String> regions) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (regions != null && regions.size() > 0) { for (int i=0 ; i<regions.size(); i++) { params.put("Region."+(i+1), regions.get(i)); } } GetMethod method = new GetMethod(); try { DescribeRegionsResponse response = makeRequestInt(method, "DescribeRegions", params, DescribeRegionsResponse.class); List<RegionInfo> ret = new ArrayList<RegionInfo>(); RegionSetType set = response.getRegionInfo(); Iterator set_iter = set.getItems().iterator(); while (set_iter.hasNext()) { RegionItemType item = (RegionItemType) set_iter.next(); ret.add(new RegionInfo(item.getRegionName(), item.getRegionEndpoint())); } return ret; } finally { method.releaseConnection(); } } /** * Sets the region to use. * * @param region the region to use, from describeRegions() */ public void setRegion(RegionInfo region) { setServer(region.getUrl()); } /** * Sets the region Url to use. * * @param region the region Url to use from RegionInfo.getUrl() */ public void setRegionUrl(String regionUrl) { setServer(regionUrl); } /** * Initiates bundling of an instance running Windows. * * @param instanceId the Id of the instance to bundle * @param accessId the accessId of the owner of the S3 bucket * @param bucketName the name of the S3 bucket in which the AMi will be stored * @param prefix the prefix to append to the AMI * @param policy an UploadPolicy object containing policy parameters * @return information about the bundle task * @throws EC2Exception wraps checked exceptions */ public BundleInstanceInfo bundleInstance(String instanceId, String accessId, String bucketName, String prefix, UploadPolicy policy) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("InstanceId", instanceId); params.put("Storage.S3.AWSAccessKeyId", accessId); params.put("Storage.S3.Bucket", bucketName); params.put("Storage.S3.Prefix", prefix); String jsonPolicy = policy.getPolicyString(); params.put("Storage.S3.UploadPolicy", jsonPolicy); params.put("Storage.S3.UploadPolicySignature", encode(getSecretAccessKey(), jsonPolicy, false, "HmacSHA1")); GetMethod method = new GetMethod(); try { BundleInstanceResponse response = makeRequestInt(method, "BundleInstance", params, BundleInstanceResponse.class); BundleInstanceTaskType task = response.getBundleInstanceTask(); return new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError()); } finally { method.releaseConnection(); } } /** * Cancel a bundling operation. * * @param bundleId the Id of the bundle task to cancel * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public BundleInstanceInfo cancelBundleInstance(String bundleId) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("BundleId", bundleId); GetMethod method = new GetMethod(); try { CancelBundleTaskResponse response = makeRequestInt(method, "CancelBundleTask", params, CancelBundleTaskResponse.class); BundleInstanceTaskType task = response.getBundleInstanceTask(); return new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError()); } finally { method.releaseConnection(); } } /** * Returns a list of current bundling tasks. An empty array causes all tasks to be returned. * * @param bundleIds the Ids of the bundle task to describe * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public List<BundleInstanceInfo> describeBundleTasks(String [] bundleIds) throws EC2Exception { return this.describeBundleTasks(Arrays.asList(bundleIds)); } /** * Returns a list of current bundling tasks. An empty list causes all tasks to be returned. * * @param bundleIds the Ids of the bundle task to describe * @return information about the cancelled task * @throws EC2Exception wraps checked exceptions */ public List<BundleInstanceInfo> describeBundleTasks(List<String> bundleIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<bundleIds.size(); i++) { params.put("BundleId."+(i+1), bundleIds.get(i)); } GetMethod method = new GetMethod(); try { DescribeBundleTasksResponse response = makeRequestInt(method, "DescribeBundleTasks", params, DescribeBundleTasksResponse.class); List<BundleInstanceInfo> ret = new ArrayList<BundleInstanceInfo>(); Iterator task_iter = response.getBundleInstanceTasksSet().getItems().iterator(); while (task_iter.hasNext()) { BundleInstanceTaskType task = (BundleInstanceTaskType) task_iter.next(); ret.add(new BundleInstanceInfo(response.getRequestId(), task.getInstanceId(), task.getBundleId(), task.getState(), task.getStartTime().toGregorianCalendar(), task.getUpdateTime().toGregorianCalendar(), task.getStorage(), task.getProgress(), task.getError())); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of Reserved Instance offerings that are available for purchase. * * @param instanceIds specific reserved instance offering ids to return * @throws EC2Exception wraps checked exceptions */ public List<ReservedInstances> describeReservedInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (instanceIds != null) { for (int i=0 ; i<instanceIds.size(); i++) { params.put("ReservedInstanceId."+(i+1), instanceIds.get(i)); } } GetMethod method = new GetMethod(); try { DescribeReservedInstancesResponse response = makeRequestInt(method, "DescribeReservedInstances", params, DescribeReservedInstancesResponse.class); List<ReservedInstances> ret = new ArrayList<ReservedInstances>(); Iterator task_iter = response.getReservedInstancesSet().getItems().iterator(); while (task_iter.hasNext()) { DescribeReservedInstancesResponseSetItemType type = (DescribeReservedInstancesResponseSetItemType) task_iter.next(); ret.add(new ReservedInstances(type.getReservedInstancesId(), InstanceType.getTypeFromString(type.getInstanceType()), type.getAvailabilityZone(), type.getStart().toGregorianCalendar(), type.getDuration(), type.getFixedPrice(), type.getUsagePrice(), type.getProductDescription(), type.getInstanceCount().intValue(), type.getState())); } return ret; } finally { method.releaseConnection(); } } /** * Returns a list of Reserved Instance offerings that are available for purchase. * * @param offeringIds specific reserved instance offering ids to return * @param instanceType the type of instance offering to be returned * @param availabilityZone the availability zone to get offerings for * @param productDescription limit results to those with a matching product description * @return a list of product descriptions * @throws EC2Exception wraps checked exceptions */ public List<ProductDescription> describeReservedInstancesOfferings(List<String> offeringIds, InstanceType instanceType, String availabilityZone, String productDescription) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (offeringIds != null) { for (int i=0 ; i<offeringIds.size(); i++) { params.put("ReservedInstancesOfferingId."+(i+1), offeringIds.get(i)); } } if (instanceType != null) { params.put("InstanceType", instanceType.getTypeId()); } if (availabilityZone != null) { params.put("AvailabilityZone", availabilityZone); } if (productDescription != null) { params.put("ProductDescription", productDescription); } GetMethod method = new GetMethod(); try { DescribeReservedInstancesOfferingsResponse response = makeRequestInt(method, "DescribeReservedInstancesOfferings", params, DescribeReservedInstancesOfferingsResponse.class); List<ProductDescription> ret = new ArrayList<ProductDescription>(); Iterator task_iter = response.getReservedInstancesOfferingsSet().getItems().iterator(); while (task_iter.hasNext()) { DescribeReservedInstancesOfferingsResponseSetItemType type = (DescribeReservedInstancesOfferingsResponseSetItemType) task_iter.next(); ret.add(new ProductDescription(type.getReservedInstancesOfferingId(), InstanceType.getTypeFromString(type.getInstanceType()), type.getAvailabilityZone(), type.getDuration(), type.getFixedPrice(), type.getUsagePrice(), type.getProductDescription())); } return ret; } finally { method.releaseConnection(); } } /** * This method purchases a reserved instance offering. * * NOTE: Use With Caution!!! This can cost a lot of money! * * @param offeringId the id of the offering to purchase * @param instanceCount the number of instances to reserve * @return id of reserved instances * @throws EC2Exception wraps checked exceptions */ public String purchaseReservedInstancesOffering(String offeringId, int instanceCount) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); params.put("ReservedInstancesOfferingId", offeringId); params.put("InstanceCount", ""+instanceCount); GetMethod method = new GetMethod(); try { PurchaseReservedInstancesOfferingResponse response = makeRequestInt(method, "PurchaseReservedInstancesOffering", params, PurchaseReservedInstancesOfferingResponse.class); return response.getReservedInstancesId(); } finally { method.releaseConnection(); } } /** * This method enables monitoring for some instances * * @param instanceIds the id of the instances to enable monitoring for * @return information about the monitoring state of those instances * @throws EC2Exception wraps checked exceptions */ public List<MonitoredInstanceInfo> monitorInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { MonitorInstancesResponseType response = makeRequestInt(method, "MonitorInstances", params, MonitorInstancesResponseType.class); List<MonitoredInstanceInfo> ret = new ArrayList<MonitoredInstanceInfo>(); for (MonitorInstancesResponseSetItemType item : response.getInstancesSet().getItems()) { ret.add(new MonitoredInstanceInfo(item.getInstanceId(), item.getMonitoring().getState())); } return ret; } finally { method.releaseConnection(); } } /** * This method disables monitoring for some instances * * @param instanceIds the id of the instances to disable monitoring for * @return information about the monitoring state of those instances * @throws EC2Exception wraps checked exceptions */ public List<MonitoredInstanceInfo> unmonitorInstances(List<String> instanceIds) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); for (int i=0 ; i<instanceIds.size(); i++) { params.put("InstanceId."+(i+1), instanceIds.get(i)); } GetMethod method = new GetMethod(); try { MonitorInstancesResponseType response = makeRequestInt(method, "UnmonitorInstances", params, MonitorInstancesResponseType.class); List<MonitoredInstanceInfo> ret = new ArrayList<MonitoredInstanceInfo>(); for (MonitorInstancesResponseSetItemType item : response.getInstancesSet().getItems()) { ret.add(new MonitoredInstanceInfo(item.getInstanceId(), item.getMonitoring().getState())); } return ret; } finally { method.releaseConnection(); } } public List<SpotPriceHistoryItem> describeSpotPriceHistory(Calendar start, Calendar end, String productDescription, InstanceType... instanceTypes) throws EC2Exception { Map<String, String> params = new HashMap<String, String>(); if (start != null) { params.put("StartTime", httpDate(start)); } if (end != null) { params.put("EndTime", httpDate(end)); } if (productDescription != null) { params.put("ProductDescription", productDescription); } for (int i = 0; i < instanceTypes.length; i++) { InstanceType instanceType = instanceTypes[i]; params.put("InstanceType." + (i + 1), instanceType.getTypeId()); } GetMethod method = new GetMethod(); try { List<SpotPriceHistoryItem> ret = new ArrayList<SpotPriceHistoryItem>(); DescribeSpotPriceHistoryResponse response = makeRequestInt(method, "DescribeSpotPriceHistory", params, DescribeSpotPriceHistoryResponse.class); List<SpotPriceHistorySetItemType> items = response.getSpotPriceHistorySet().getItems(); if (items != null) { for (SpotPriceHistorySetItemType item : items) { ret.add(new SpotPriceHistoryItem(item)); } } return ret; } finally { method.releaseConnection(); } } protected <T> T makeRequestInt(HttpMethodBase method, String action, Map<String, String> params, Class<T> respType) throws EC2Exception { try { return makeRequest(method, action, params, respType); } catch (AWSException ex) { throw new EC2Exception(ex); } catch (JAXBException ex) { throw new EC2Exception("Problem parsing returned message.", ex); } catch (MalformedURLException ex) { throw new EC2Exception(ex.getMessage(), ex); } catch (IOException ex) { throw new EC2Exception(ex.getMessage(), ex); } } }
added createImage, new registerImage in support of EBS images git-svn-id: ed72533e8e76b4727d8751eef669f13bfa24ccfa@302 7ed6c317-272d-0410-8ab0-49a3fbf2961c
java/com/xerox/amazonws/ec2/Jec2.java
added createImage, new registerImage in support of EBS images
<ide><path>ava/com/xerox/amazonws/ec2/Jec2.java <ide> <ide> import com.xerox.amazonws.common.AWSException; <ide> import com.xerox.amazonws.common.AWSQueryConnection; <add>import com.xerox.amazonws.typica.jaxb.AllocateAddressResponse; <add>import com.xerox.amazonws.typica.jaxb.AssociateAddressResponse; <add>import com.xerox.amazonws.typica.jaxb.AttachmentSetResponseType; <add>import com.xerox.amazonws.typica.jaxb.AttachmentSetItemResponseType; <add>import com.xerox.amazonws.typica.jaxb.AttachVolumeResponse; <add>import com.xerox.amazonws.typica.jaxb.AvailabilityZoneItemType; <add>import com.xerox.amazonws.typica.jaxb.AvailabilityZoneMessageType; <add>import com.xerox.amazonws.typica.jaxb.AvailabilityZoneSetType; <add>import com.xerox.amazonws.typica.jaxb.AuthorizeSecurityGroupIngressResponse; <add>import com.xerox.amazonws.typica.jaxb.BlockDeviceMappingType; <add>import com.xerox.amazonws.typica.jaxb.BlockDeviceMappingItemType; <add>import com.xerox.amazonws.typica.jaxb.BundleInstanceResponse; <add>import com.xerox.amazonws.typica.jaxb.BundleInstanceTaskType; <add>import com.xerox.amazonws.typica.jaxb.CancelBundleTaskResponse; <add>import com.xerox.amazonws.typica.jaxb.CreateImageResponse; <add>import com.xerox.amazonws.typica.jaxb.CreateKeyPairResponse; <add>import com.xerox.amazonws.typica.jaxb.CreateSnapshotResponse; <add>import com.xerox.amazonws.typica.jaxb.CreateVolumeResponse; <add>import com.xerox.amazonws.typica.jaxb.ConfirmProductInstanceResponse; <add>import com.xerox.amazonws.typica.jaxb.CreateSecurityGroupResponse; <add>import com.xerox.amazonws.typica.jaxb.DeleteKeyPairResponse; <add>import com.xerox.amazonws.typica.jaxb.DeleteSecurityGroupResponse; <add>import com.xerox.amazonws.typica.jaxb.DeleteSnapshotResponse; <add>import com.xerox.amazonws.typica.jaxb.DeleteVolumeResponse; <add>import com.xerox.amazonws.typica.jaxb.DeregisterImageResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponseInfoType; <add>import com.xerox.amazonws.typica.jaxb.DescribeAddressesResponseItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeAvailabilityZonesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeBundleTasksResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeBundleTasksItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeImageAttributeResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeImagesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeImagesResponseInfoType; <add>import com.xerox.amazonws.typica.jaxb.DescribeImagesResponseItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeInstancesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesResponseSetItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesOfferingsResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeReservedInstancesOfferingsResponseSetItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsSetResponseType; <add>import com.xerox.amazonws.typica.jaxb.DescribeSnapshotsSetItemResponseType; <add>import com.xerox.amazonws.typica.jaxb.DescribeVolumesResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeVolumesSetResponseType; <add>import com.xerox.amazonws.typica.jaxb.DescribeVolumesSetItemResponseType; <add>import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponseInfoType; <add>import com.xerox.amazonws.typica.jaxb.DescribeKeyPairsResponseItemType; <add>import com.xerox.amazonws.typica.jaxb.DescribeRegionsResponse; <add>import com.xerox.amazonws.typica.jaxb.DescribeSecurityGroupsResponse; <add>import com.xerox.amazonws.typica.jaxb.DetachVolumeResponse; <add>import com.xerox.amazonws.typica.jaxb.DisassociateAddressResponse; <add>import com.xerox.amazonws.typica.jaxb.GetConsoleOutputResponse; <add>import com.xerox.amazonws.typica.jaxb.GetPasswordDataResponse; <add>import com.xerox.amazonws.typica.jaxb.GroupItemType; <add>import com.xerox.amazonws.typica.jaxb.GroupSetType; <add>import com.xerox.amazonws.typica.jaxb.InstanceStateChangeSetType; <add>import com.xerox.amazonws.typica.jaxb.InstanceStateChangeType; <ide> import com.xerox.amazonws.typica.jaxb.IpPermissionSetType; <ide> <ide> /** <ide> } <ide> <ide> /** <del> * Register the given AMI. <add> * Creates an AMI that uses an EBS root device. <add> * <add> * @param instanceId An instance's id ({@link com.xerox.amazonws.ec2.ReservationDescription.Instance#instanceId}. <add> * @param name a name to associate with the image <add> * @param description a descriptive string to attach to the image <add> * @param noReboot normally false; if set to true, instance is not shutdown first. <add> * NOTE: filesystem integrity isn't guaranteed when noReboot=true <add> * @return image ID <add> * @throws EC2Exception wraps checked exceptions <add> */ <add> public String createImage(String instanceId, String name, String description, <add> boolean noReboot) throws EC2Exception { <add> Map<String, String> params = new HashMap<String, String>(); <add> params.put("InstanceId", instanceId); <add> params.put("Name", name); <add> if (description != null && !description.trim().equals("")) { <add> params.put("Description", description); <add> } <add> if (noReboot) { <add> params.put("NoReboot", "true"); <add> } <add> GetMethod method = new GetMethod(); <add> try { <add> CreateImageResponse response = <add> makeRequestInt(method, "CreateImage", params, CreateImageResponse.class); <add> return response.getImageId(); <add> } finally { <add> method.releaseConnection(); <add> } <add> } <add> <add> /** <add> * Register an S3 based AMI. <ide> * <ide> * @param imageLocation An AMI path within S3. <ide> * @return A unique AMI ID that can be used to create and manage instances of this AMI. <ide> * TODO: need to return request id <ide> */ <ide> public String registerImage(String imageLocation) throws EC2Exception { <del> Map<String, String> params = new HashMap<String, String>(); <del> params.put("ImageLocation", imageLocation); <add> return registerImage(imageLocation, null, null, null, null, null, null, null); <add> } <add> <add> /** <add> * Register a snapshot as an EBS backed AMI <add> * <add> * @param imageLocation An AMI path within S3. <add> * @return A unique AMI ID that can be used to create and manage instances of this AMI. <add> * @throws EC2Exception wraps checked exceptions <add> * TODO: need to return request id <add> */ <add> public String registerImage(String name, <add> String description, String architecture, <add> String kernelId, String ramdiskId, String rootDeviceName, <add> List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { <add> return registerImage(null, name, description, architecture, kernelId, ramdiskId, <add> rootDeviceName, blockDeviceMappings); <add> } <add> <add> protected String registerImage(String imageLocation, String name, <add> String description, String architecture, <add> String kernelId, String ramdiskId, String rootDeviceName, <add> List<BlockDeviceMapping> blockDeviceMappings) throws EC2Exception { <add> Map<String, String> params = new HashMap<String, String>(); <add> if (imageLocation != null && !imageLocation.trim().equals("")) { <add> params.put("ImageLocation", imageLocation); <add> } <add> if (name != null && !name.trim().equals("")) { <add> params.put("Name", name); <add> } <add> if (description != null && !description.trim().equals("")) { <add> params.put("Description", description); <add> } <add> if (architecture != null && !architecture.trim().equals("")) { <add> params.put("Architecture", architecture); <add> } <add> if (kernelId != null && !kernelId.trim().equals("")) { <add> params.put("KernelId", kernelId); <add> } <add> if (ramdiskId != null && !ramdiskId.trim().equals("")) { <add> params.put("RamdiskId", ramdiskId); <add> } <add> if (rootDeviceName != null && !rootDeviceName.trim().equals("")) { <add> params.put("RootDeviceName", rootDeviceName); <add> } <add> if (blockDeviceMappings != null) { <add> for(int i = 0; i < blockDeviceMappings.size(); i++) { <add> BlockDeviceMapping bdm = blockDeviceMappings.get(i); <add> params.put("BlockDeviceMapping." + (i + 1) + ".VirtualName", <add> bdm.getVirtualName()); <add> params.put("BlockDeviceMapping." + (i + 1) + ".DeviceName", <add> bdm.getDeviceName()); <add> } <add> } <ide> GetMethod method = new GetMethod(); <ide> try { <ide> RegisterImageResponse response =
JavaScript
mit
24f94093c7b9955217a95ddfe19a5bcebd32783b
0
derek-boman/mx-react-components,mxenabled/mx-react-components
const React = require('react'); const Radium = require('radium'); const Spin = require('./Spin'); const StyleConstants = require('../constants/Style'); const Icon = require('../components/Icon'); const Button = React.createClass({ propTypes: { actionText: React.PropTypes.string, icon: React.PropTypes.string, isActive: React.PropTypes.bool, primaryColor: React.PropTypes.string, type: React.PropTypes.oneOf([ 'base', 'disabled', 'neutral', 'primary', 'primaryOutline', 'secondary' ]) }, getDefaultProps () { return { isActive: false, primaryColor: StyleConstants.Colors.PRIMARY, type: 'primary' }; }, _renderButtonContent () { const styles = this.styles(); if (this.props.isActive) { return ( <div> <Spin direction='counterclockwise'> <Icon size='20' style={[styles.icon, styles.spinnerStyles]} type='spinner' /> </Spin> {this.props.actionText ? <div style={styles.actionTextStyles}> {this.props.actionText} </div> : null } </div> ); } else { return this.props.children; } }, render () { const styles = this.styles(); return ( <div {...this.props} style={[styles.component, styles[this.props.type], this.props.style]}> {this.props.icon && !this.props.isActive ? <Icon size={20} style={styles.icon} type={this.props.icon} /> : null} {this._renderButtonContent()} </div> ); }, styles () { return { component: { borderRadius: 2, borderStyle: 'solid', borderWidth: 1, borderColor: 'transparent', display: 'inline-block', padding: '7px 14px', textAlign: 'center', fontSize: StyleConstants.FontSizes.MEDIUM, fontFamily: StyleConstants.Fonts.SEMIBOLD, cursor: 'pointer', transition: 'all .2s ease-in', minWidth: 50 }, primary: { backgroundColor: this.props.primaryColor, borderColor: this.props.primaryColor, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in', ':hover': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -30), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -30), transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -16), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -16), transition: 'all .2s ease-in' } }, primaryOutline: { backgroundColor: 'transparent', borderColor: this.props.primaryColor, color: this.props.primaryColor, fill: this.props.primaryColor, transition: 'all .2s ease-in', ':hover': { backgroundColor: this.props.primaryColor, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -16), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -16), color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' } }, secondary: { backgroundColor: 'transparent', borderColor: StyleConstants.Colors.ASH, color: StyleConstants.Colors.ASH, fill: StyleConstants.Colors.ASH, transition: 'all .2s ease-in', ':hover': { backgroundColor: StyleConstants.Colors.ASH, borderColor: StyleConstants.Colors.ASH, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(StyleConstants.Colors.ASH, -10), borderColor: StyleConstants.adjustColor(StyleConstants.Colors.ASH, -10), color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' } }, base: { backgroundColor: 'transparent', color: this.props.primaryColor, fill: this.props.primaryColor, transition: 'all .2s ease-in', ':hover': { color: StyleConstants.adjustColor(this.props.primaryColor, -8), fill: StyleConstants.adjustColor(this.props.primaryColor, -8), transition: 'all .2s ease-in' }, ':active': { color: StyleConstants.adjustColor(this.props.primaryColor, -16), fill: StyleConstants.adjustColor(this.props.primaryColor, -16), transition: 'all .2s ease-in' } }, disabled: { backgroundColor: StyleConstants.Colors.PORCELAIN, borderColor: StyleConstants.Colors.PORCELAIN, color: StyleConstants.Colors.FOG, fill: StyleConstants.Colors.FOG }, icon: { marginTop: -6, marginBottom: -5, marginLeft: -5, marginRight: this.props.children ? 5 : -5 }, spinnerStyles: { verticalAlign: 'initial', marginRight: -5, padding: !this.props.actionText && this.props.children ? 3 : 0 }, actionTextStyles: { display: 'inline-block', paddingLeft: 10 } }; } }); module.exports = Radium(Button);
src/components/Button.js
const React = require('react'); const Radium = require('radium'); const Spin = require('./Spin'); const StyleConstants = require('../constants/Style'); const Icon = require('../components/Icon'); const Button = React.createClass({ propTypes: { actionText: React.PropTypes.string, icon: React.PropTypes.string, isActive: React.PropTypes.bool, primaryColor: React.PropTypes.string, type: React.PropTypes.oneOf([ 'base', 'disabled', 'neutral', 'primary', 'primaryOutline', 'secondary' ]) }, getDefaultProps () { return { isActive: false, primaryColor: StyleConstants.Colors.PRIMARY, type: 'primary' }; }, _renderButtonContent () { const spinnerStyles = { verticalAlign: 'initial', marginTop: -6, marginBottom: -5, marginLeft: -5, marginRight: -5, padding: !this.props.actionText && this.props.children ? 3 : 0 }; const actionTextStyles = { display: 'inline-block', paddingLeft: 10 }; if (this.props.isActive) { return ( <div> <Spin direction='counterclockwise'> <Icon size='20' style={spinnerStyles} type='spinner' /> </Spin> {this.props.actionText ? <div style={actionTextStyles}> {this.props.actionText} </div> : null } </div> ); } else { return this.props.children; } }, render () { const styles = { component: { borderRadius: 2, borderStyle: 'solid', borderWidth: 1, borderColor: 'transparent', display: 'inline-block', padding: '7px 14px', textAlign: 'center', fontSize: StyleConstants.FontSizes.MEDIUM, fontFamily: StyleConstants.Fonts.SEMIBOLD, cursor: 'pointer', transition: 'all .2s ease-in', minWidth: 50 }, primary: { backgroundColor: this.props.primaryColor, borderColor: this.props.primaryColor, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in', ':hover': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -30), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -30), transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -16), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -16), transition: 'all .2s ease-in' } }, primaryOutline: { backgroundColor: 'transparent', borderColor: this.props.primaryColor, color: this.props.primaryColor, fill: this.props.primaryColor, transition: 'all .2s ease-in', ':hover': { backgroundColor: this.props.primaryColor, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(this.props.primaryColor, -16), borderColor: StyleConstants.adjustColor(this.props.primaryColor, -16), color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' } }, secondary: { backgroundColor: 'transparent', borderColor: StyleConstants.Colors.ASH, color: StyleConstants.Colors.ASH, fill: StyleConstants.Colors.ASH, transition: 'all .2s ease-in', ':hover': { backgroundColor: StyleConstants.Colors.ASH, borderColor: StyleConstants.Colors.ASH, color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' }, ':active': { backgroundColor: StyleConstants.adjustColor(StyleConstants.Colors.ASH, -10), borderColor: StyleConstants.adjustColor(StyleConstants.Colors.ASH, -10), color: StyleConstants.Colors.WHITE, fill: StyleConstants.Colors.WHITE, transition: 'all .2s ease-in' } }, base: { backgroundColor: 'transparent', color: this.props.primaryColor, fill: this.props.primaryColor, transition: 'all .2s ease-in', ':hover': { color: StyleConstants.adjustColor(this.props.primaryColor, -8), fill: StyleConstants.adjustColor(this.props.primaryColor, -8), transition: 'all .2s ease-in' }, ':active': { color: StyleConstants.adjustColor(this.props.primaryColor, -16), fill: StyleConstants.adjustColor(this.props.primaryColor, -16), transition: 'all .2s ease-in' } }, disabled: { backgroundColor: StyleConstants.Colors.PORCELAIN, borderColor: StyleConstants.Colors.PORCELAIN, color: StyleConstants.Colors.FOG, fill: StyleConstants.Colors.FOG }, icon: { marginTop: -6, marginBottom: -5, marginLeft: -5, marginRight: this.props.children ? 5 : -5 } }; return ( <div {...this.props} style={[styles.component, styles[this.props.type], this.props.style]}> {this.props.icon && !this.props.isActive ? <Icon size={20} style={styles.icon} type={this.props.icon} /> : null} {this._renderButtonContent()} </div> ); } }); module.exports = Radium(Button);
moves styles into a function
src/components/Button.js
moves styles into a function
<ide><path>rc/components/Button.js <ide> }, <ide> <ide> _renderButtonContent () { <del> const spinnerStyles = { <del> verticalAlign: 'initial', <del> marginTop: -6, <del> marginBottom: -5, <del> marginLeft: -5, <del> marginRight: -5, <del> padding: !this.props.actionText && this.props.children ? 3 : 0 <del> }; <del> <del> const actionTextStyles = { <del> display: 'inline-block', <del> paddingLeft: 10 <del> }; <add> const styles = this.styles(); <ide> <ide> if (this.props.isActive) { <ide> return ( <ide> <div> <ide> <Spin direction='counterclockwise'> <del> <Icon size='20' style={spinnerStyles} type='spinner' /> <add> <Icon size='20' style={[styles.icon, styles.spinnerStyles]} type='spinner' /> <ide> </Spin> <del> {this.props.actionText ? <div style={actionTextStyles}> {this.props.actionText} </div> : null } <add> {this.props.actionText ? <div style={styles.actionTextStyles}> {this.props.actionText} </div> : null } <ide> </div> <ide> ); <ide> } else { <ide> }, <ide> <ide> render () { <del> const styles = { <add> const styles = this.styles(); <add> <add> return ( <add> <div {...this.props} style={[styles.component, styles[this.props.type], this.props.style]}> <add> {this.props.icon && !this.props.isActive ? <Icon size={20} style={styles.icon} type={this.props.icon} /> : null} <add> {this._renderButtonContent()} <add> </div> <add> ); <add> }, <add> <add> styles () { <add> return { <ide> component: { <ide> borderRadius: 2, <ide> borderStyle: 'solid', <ide> marginBottom: -5, <ide> marginLeft: -5, <ide> marginRight: this.props.children ? 5 : -5 <add> }, <add> spinnerStyles: { <add> verticalAlign: 'initial', <add> marginRight: -5, <add> padding: !this.props.actionText && this.props.children ? 3 : 0 <add> }, <add> actionTextStyles: { <add> display: 'inline-block', <add> paddingLeft: 10 <ide> } <ide> }; <del> <del> return ( <del> <div {...this.props} style={[styles.component, styles[this.props.type], this.props.style]}> <del> {this.props.icon && !this.props.isActive ? <Icon size={20} style={styles.icon} type={this.props.icon} /> : null} <del> {this._renderButtonContent()} <del> </div> <del> ); <ide> } <ide> }); <ide>
Java
apache-2.0
error: pathspec 'enhanced/drlvm/trunk/vm/tests/kernel/java/lang/ClassTestGetAnnotation.java' did not match any file(s) known to git
ac7f8d00c882b5ac982aac206d8028cd24b23696
1
freeVM/freeVM,freeVM/freeVM,freeVM/freeVM,freeVM/freeVM,freeVM/freeVM
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * tested class: java.lang.Class * tested method: getAnnotation * */ package java.lang; import junit.framework.TestCase; /** * @tested class: java.lang.Class * @tested method: getClasses */ public class ClassTestGetAnnotation extends TestCase { /* * Regression test for HARMONY-886 * [classlib][core][drlvm] compatibility: Harmony method * Class.getAnnotation(null) return null while RI throws NPE * */ public void test_HARMONY_886() { boolean et = false; try { Object.class.getAnnotation(null); } catch (NullPointerException e) { et = true; } assertTrue("NullPointerException expected", et); } }
enhanced/drlvm/trunk/vm/tests/kernel/java/lang/ClassTestGetAnnotation.java
Regression test for HARMONY-886 " [classlib][core][drlvm] compatibility: Harmony method Class.getAnnotation(null) return null while RI throws NPE" svn path=/harmony/; revision=487309
enhanced/drlvm/trunk/vm/tests/kernel/java/lang/ClassTestGetAnnotation.java
Regression test for HARMONY-886 " [classlib][core][drlvm] compatibility: Harmony method Class.getAnnotation(null) return null while RI throws NPE"
<ide><path>nhanced/drlvm/trunk/vm/tests/kernel/java/lang/ClassTestGetAnnotation.java <add>/* <add> * Licensed to the Apache Software Foundation (ASF) under one or more <add> * contributor license agreements. See the NOTICE file distributed with <add> * this work for additional information regarding copyright ownership. <add> * The ASF licenses this file to You under the Apache License, Version 2.0 <add> * (the "License"); you may not use this file except in compliance with <add> * the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>/** <add> * tested class: java.lang.Class <add> * tested method: getAnnotation <add> * <add> */ <add> <add>package java.lang; <add> <add>import junit.framework.TestCase; <add> <add>/** <add> * @tested class: java.lang.Class <add> * @tested method: getClasses <add> */ <add>public class ClassTestGetAnnotation extends TestCase { <add> <add> /* <add> * Regression test for HARMONY-886 <add> * [classlib][core][drlvm] compatibility: Harmony method <add> * Class.getAnnotation(null) return null while RI throws NPE <add> * <add> */ <add> public void test_HARMONY_886() { <add> boolean et = false; <add> try { <add> Object.class.getAnnotation(null); <add> } catch (NullPointerException e) { <add> et = true; <add> } <add> assertTrue("NullPointerException expected", et); <add> } <add>}
Java
apache-2.0
30726e487fd75a626da5c19236a1abffc65d6bf3
0
consulo/consulo,consulo/consulo,consulo/consulo,consulo/consulo,ernestp/consulo,ernestp/consulo,ernestp/consulo,ernestp/consulo,consulo/consulo,ernestp/consulo,consulo/consulo,ernestp/consulo
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner; import com.intellij.ProjectTopics; import com.intellij.openapi.Disposable; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootAdapter; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.util.ArchiveVfsUtil; import com.intellij.uiDesigner.core.Spacer; import com.intellij.util.PathUtil; import com.intellij.util.containers.ConcurrentWeakHashMap; import com.intellij.util.lang.UrlClassLoader; import com.intellij.util.messages.MessageBusConnection; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.*; /** * @author Anton Katilin * @author Vladimir Kondratyev */ public final class LoaderFactory { private final Project myProject; private final ConcurrentWeakHashMap<Module, ClassLoader> myModule2ClassLoader; private ClassLoader myProjectClassLoader = null; private final MessageBusConnection myConnection; public static LoaderFactory getInstance(final Project project) { return ServiceManager.getService(project, LoaderFactory.class); } public LoaderFactory(final Project project) { myProject = project; myModule2ClassLoader = new ConcurrentWeakHashMap<Module, ClassLoader>(); myConnection = myProject.getMessageBus().connect(); myConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() { public void rootsChanged(final ModuleRootEvent event) { clearClassLoaderCache(); } }); Disposer.register(project, new Disposable() { public void dispose() { myConnection.disconnect(); myModule2ClassLoader.clear(); } }); } @NotNull public ClassLoader getLoader(final VirtualFile formFile) { final Module module = ModuleUtil.findModuleForFile(formFile, myProject); if (module == null) { return getClass().getClassLoader(); } return getLoader(module); } public ClassLoader getLoader(final Module module) { final ClassLoader cachedLoader = myModule2ClassLoader.get(module); if (cachedLoader != null) { return cachedLoader; } final String runClasspath = OrderEnumerator.orderEntries(module).recursively().getPathsList().getPathsString(); final ClassLoader classLoader = createClassLoader(runClasspath, module.getName()); myModule2ClassLoader.put(module, classLoader); return classLoader; } @NotNull public ClassLoader getProjectClassLoader() { if (myProjectClassLoader == null) { final String runClasspath = OrderEnumerator.orderEntries(myProject).withoutSdk().getPathsList().getPathsString(); myProjectClassLoader = createClassLoader(runClasspath, "<project>"); } return myProjectClassLoader; } private static ClassLoader createClassLoader(final String runClasspath, final String moduleName) { final ArrayList<URL> urls = new ArrayList<URL>(); final VirtualFileManager manager = VirtualFileManager.getInstance(); final StringTokenizer tokenizer = new StringTokenizer(runClasspath, File.pathSeparator); while (tokenizer.hasMoreTokens()) { final String s = tokenizer.nextToken(); try { VirtualFile vFile = manager.findFileByUrl(VfsUtil.pathToUrl(s)); VirtualFile archiveFile = ArchiveVfsUtil.getVirtualFileForJar(vFile); if(archiveFile != null) { urls.add(new File(archiveFile.getCanonicalPath()).toURI().toURL()); } else { urls.add(new File(s).toURI().toURL()); } } catch (Exception e) { // ignore ? } } try { urls.add(new File(PathUtil.getJarPathForClass(Spacer.class)).toURI().toURL()); } catch (MalformedURLException ignored) { // ignore } return new DesignTimeClassLoader(urls, LoaderFactory.class.getClassLoader(), moduleName); } public void clearClassLoaderCache() { // clear classes with invalid classloader from UIManager cache final UIDefaults uiDefaults = UIManager.getDefaults(); for (Iterator it = uiDefaults.keySet().iterator(); it.hasNext();) { Object key = it.next(); Object value = uiDefaults.get(key); if (value instanceof Class) { ClassLoader loader = ((Class)value).getClassLoader(); if (loader instanceof DesignTimeClassLoader) { it.remove(); } } } myModule2ClassLoader.clear(); myProjectClassLoader = null; } private static class DesignTimeClassLoader extends UrlClassLoader { private final String myModuleName; public DesignTimeClassLoader(final List<URL> urls, final ClassLoader parent, final String moduleName) { super(build().urls(urls).parent(parent)); myModuleName = moduleName; } @Override public String toString() { return "DesignTimeClassLoader:" + myModuleName; } } }
plugins/ui-designer/src/com/intellij/uiDesigner/LoaderFactory.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner; import com.intellij.ProjectTopics; import com.intellij.openapi.Disposable; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.impl.jar.JarFileSystemImpl; import com.intellij.uiDesigner.core.Spacer; import com.intellij.util.PathUtil; import com.intellij.util.lang.UrlClassLoader; import com.intellij.util.messages.MessageBusConnection; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.*; /** * @author Anton Katilin * @author Vladimir Kondratyev */ public final class LoaderFactory { private final Project myProject; private final WeakHashMap<Module, ClassLoader> myModule2ClassLoader; private ClassLoader myProjectClassLoader = null; private final MessageBusConnection myConnection; public static LoaderFactory getInstance(final Project project) { return ServiceManager.getService(project, LoaderFactory.class); } public LoaderFactory(final Project project) { myProject = project; myModule2ClassLoader = new WeakHashMap<Module, ClassLoader>(); myConnection = myProject.getMessageBus().connect(); myConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() { public void rootsChanged(final ModuleRootEvent event) { clearClassLoaderCache(); } }); Disposer.register(project, new Disposable() { public void dispose() { myConnection.disconnect(); myModule2ClassLoader.clear(); } }); } @NotNull public ClassLoader getLoader(final VirtualFile formFile) { final Module module = ModuleUtil.findModuleForFile(formFile, myProject); if (module == null) { return getClass().getClassLoader(); } return getLoader(module); } public ClassLoader getLoader(final Module module) { final ClassLoader cachedLoader = myModule2ClassLoader.get(module); if (cachedLoader != null) { return cachedLoader; } final String runClasspath = OrderEnumerator.orderEntries(module).recursively().getPathsList().getPathsString(); final ClassLoader classLoader = createClassLoader(runClasspath, module.getName()); myModule2ClassLoader.put(module, classLoader); return classLoader; } @NotNull public ClassLoader getProjectClassLoader() { if (myProjectClassLoader == null) { final String runClasspath = OrderEnumerator.orderEntries(myProject).withoutSdk().getPathsList().getPathsString(); myProjectClassLoader = createClassLoader(runClasspath, "<project>"); } return myProjectClassLoader; } private static ClassLoader createClassLoader(final String runClasspath, final String moduleName) { final ArrayList<URL> urls = new ArrayList<URL>(); final VirtualFileManager manager = VirtualFileManager.getInstance(); final JarFileSystemImpl fileSystem = (JarFileSystemImpl)StandardFileSystems.jar(); final StringTokenizer tokenizer = new StringTokenizer(runClasspath, File.pathSeparator); while (tokenizer.hasMoreTokens()) { final String s = tokenizer.nextToken(); try { VirtualFile vFile = manager.findFileByUrl(VfsUtil.pathToUrl(s)); final File realFile = fileSystem.getMirroredFile(vFile); urls.add(realFile != null ? realFile.toURI().toURL() : new File(s).toURI().toURL()); } catch (Exception e) { // ignore ? } } try { urls.add(new File(PathUtil.getJarPathForClass(Spacer.class)).toURI().toURL()); } catch (MalformedURLException ignored) { // ignore } final URL[] _urls = urls.toArray(new URL[urls.size()]); return new DesignTimeClassLoader(Arrays.asList(_urls), LoaderFactory.class.getClassLoader(), moduleName); } public void clearClassLoaderCache() { // clear classes with invalid classloader from UIManager cache final UIDefaults uiDefaults = UIManager.getDefaults(); for (Iterator it = uiDefaults.keySet().iterator(); it.hasNext();) { Object key = it.next(); Object value = uiDefaults.get(key); if (value instanceof Class) { ClassLoader loader = ((Class)value).getClassLoader(); if (loader instanceof DesignTimeClassLoader) { it.remove(); } } } myModule2ClassLoader.clear(); myProjectClassLoader = null; } private static class DesignTimeClassLoader extends UrlClassLoader { private final String myModuleName; public DesignTimeClassLoader(final List<URL> urls, final ClassLoader parent, final String moduleName) { super(urls, parent); myModuleName = moduleName; } @Override public String toString() { return "DesignTimeClassLoader:" + myModuleName; } } }
dont use getMirrorFile()
plugins/ui-designer/src/com/intellij/uiDesigner/LoaderFactory.java
dont use getMirrorFile()
<ide><path>lugins/ui-designer/src/com/intellij/uiDesigner/LoaderFactory.java <ide> /* <del> * Copyright 2000-2009 JetBrains s.r.o. <add> * Copyright 2000-2013 JetBrains s.r.o. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> import com.intellij.openapi.module.Module; <ide> import com.intellij.openapi.module.ModuleUtil; <ide> import com.intellij.openapi.project.Project; <del>import com.intellij.openapi.roots.*; <add>import com.intellij.openapi.roots.ModuleRootAdapter; <add>import com.intellij.openapi.roots.ModuleRootEvent; <add>import com.intellij.openapi.roots.OrderEnumerator; <ide> import com.intellij.openapi.util.Disposer; <del>import com.intellij.openapi.vfs.*; <del>import com.intellij.openapi.vfs.impl.jar.JarFileSystemImpl; <add>import com.intellij.openapi.vfs.VfsUtil; <add>import com.intellij.openapi.vfs.VirtualFile; <add>import com.intellij.openapi.vfs.VirtualFileManager; <add>import com.intellij.openapi.vfs.util.ArchiveVfsUtil; <ide> import com.intellij.uiDesigner.core.Spacer; <ide> import com.intellij.util.PathUtil; <add>import com.intellij.util.containers.ConcurrentWeakHashMap; <ide> import com.intellij.util.lang.UrlClassLoader; <ide> import com.intellij.util.messages.MessageBusConnection; <ide> import org.jetbrains.annotations.NotNull; <ide> public final class LoaderFactory { <ide> private final Project myProject; <ide> <del> private final WeakHashMap<Module, ClassLoader> myModule2ClassLoader; <add> private final ConcurrentWeakHashMap<Module, ClassLoader> myModule2ClassLoader; <ide> private ClassLoader myProjectClassLoader = null; <ide> private final MessageBusConnection myConnection; <ide> <ide> public static LoaderFactory getInstance(final Project project) { <ide> return ServiceManager.getService(project, LoaderFactory.class); <ide> } <del> <add> <ide> public LoaderFactory(final Project project) { <ide> myProject = project; <del> myModule2ClassLoader = new WeakHashMap<Module, ClassLoader>(); <add> myModule2ClassLoader = new ConcurrentWeakHashMap<Module, ClassLoader>(); <ide> myConnection = myProject.getMessageBus().connect(); <ide> myConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() { <ide> public void rootsChanged(final ModuleRootEvent event) { <ide> private static ClassLoader createClassLoader(final String runClasspath, final String moduleName) { <ide> final ArrayList<URL> urls = new ArrayList<URL>(); <ide> final VirtualFileManager manager = VirtualFileManager.getInstance(); <del> final JarFileSystemImpl fileSystem = (JarFileSystemImpl)StandardFileSystems.jar(); <ide> final StringTokenizer tokenizer = new StringTokenizer(runClasspath, File.pathSeparator); <ide> while (tokenizer.hasMoreTokens()) { <ide> final String s = tokenizer.nextToken(); <ide> try { <ide> VirtualFile vFile = manager.findFileByUrl(VfsUtil.pathToUrl(s)); <del> final File realFile = fileSystem.getMirroredFile(vFile); <del> urls.add(realFile != null ? realFile.toURI().toURL() : new File(s).toURI().toURL()); <add> <add> VirtualFile archiveFile = ArchiveVfsUtil.getVirtualFileForJar(vFile); <add> if(archiveFile != null) { <add> urls.add(new File(archiveFile.getCanonicalPath()).toURI().toURL()); <add> } <add> else { <add> urls.add(new File(s).toURI().toURL()); <add> } <ide> } <ide> catch (Exception e) { <ide> // ignore ? <ide> // ignore <ide> } <ide> <del> final URL[] _urls = urls.toArray(new URL[urls.size()]); <del> return new DesignTimeClassLoader(Arrays.asList(_urls), LoaderFactory.class.getClassLoader(), moduleName); <add> return new DesignTimeClassLoader(urls, LoaderFactory.class.getClassLoader(), moduleName); <ide> } <ide> <ide> public void clearClassLoaderCache() { <ide> private final String myModuleName; <ide> <ide> public DesignTimeClassLoader(final List<URL> urls, final ClassLoader parent, final String moduleName) { <del> super(urls, parent); <add> super(build().urls(urls).parent(parent)); <ide> myModuleName = moduleName; <ide> } <ide>
Java
bsd-3-clause
5fefff696eb5807b059a0a255ce91fbfd7c272c5
0
NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive
package gov.nih.nci.ncia.search; import java.util.*; public class APIURLHolder { private static String url; private static String wadoUrl; private static Map<String, String> userMap=new HashMap<String, String>(); public static String getUrl() { return url; } public static String addUser(String user) { try { for(Map.Entry<String, String> entry : userMap.entrySet()){ System.out.printf("Key : %s and Value: %s %n", entry.getKey(), entry.getValue()); if (entry.getValue().equals(user)){ return entry.getKey(); } } } catch (Exception e) { } UUID userKey = UUID.randomUUID(); userMap.put(userKey.toString(), user); return userKey.toString(); } public static String getUser(String key){ return userMap.get(key); } public static void setUrl(String urlIn){ url=urlIn; System.out.println("-------> api url is "+url); } public static String getWadoUrl() { return wadoUrl; } public static void main(String[] args) { setUrl("http://localhost:45210/nbia/home.jsf"); } }
software/nbia-services-grid/src/gov/nih/nci/ncia/search/APIURLHolder.java
package gov.nih.nci.ncia.search; import java.util.*; public class APIURLHolder { private static String url; private static String wadoUrl; private static Map<String, String> userMap=new HashMap<String, String>(); public static String getUrl() { return url; } public static String addUser(String user) { try { for(Map.Entry<String, String> entry : userMap.entrySet()){ System.out.printf("Key : %s and Value: %s %n", entry.getKey(), entry.getValue()); if (entry.getValue().equals(user)){ return entry.getKey(); } } } catch (Exception e) { } UUID userKey = UUID.randomUUID(); userMap.put(userKey.toString(), user); return userKey.toString(); } public static String getUser(String key){ return userMap.get(key); } public static void setUrl(String urlIn){ wadoUrl=urlIn.substring(0, urlIn.lastIndexOf("/")+1)+"wado"; String temp = urlIn.substring(0, urlIn.lastIndexOf("/")-1); temp = temp.substring(0, temp.lastIndexOf("/")); url=temp; System.out.println("-------> api url is "+url); } public static String getWadoUrl() { return wadoUrl; } public static void main(String[] args) { setUrl("http://localhost:45210/nbia/home.jsf"); } }
change needed for apache proxy
software/nbia-services-grid/src/gov/nih/nci/ncia/search/APIURLHolder.java
change needed for apache proxy
<ide><path>oftware/nbia-services-grid/src/gov/nih/nci/ncia/search/APIURLHolder.java <ide> return userMap.get(key); <ide> } <ide> public static void setUrl(String urlIn){ <del> wadoUrl=urlIn.substring(0, urlIn.lastIndexOf("/")+1)+"wado"; <del> String temp = urlIn.substring(0, urlIn.lastIndexOf("/")-1); <del> temp = temp.substring(0, temp.lastIndexOf("/")); <del> url=temp; <add> url=urlIn; <ide> System.out.println("-------> api url is "+url); <ide> } <ide> public static String getWadoUrl()
Java
agpl-3.0
98be0a867d5b334c2d76a530d36e25897f3f14f8
0
sakazz/exchange,sakazz/exchange
/* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package io.bisq.gui.util.validation; import io.bisq.common.locale.Res; import io.bisq.core.app.BisqEnvironment; import io.bisq.gui.util.validation.altcoins.ByteballAddressValidator; import io.bisq.gui.util.validation.altcoins.NxtReedSolomonValidator; import io.bisq.gui.util.validation.altcoins.OctocoinAddressValidator; import io.bisq.gui.util.validation.altcoins.PNCAddressValidator; import io.bisq.gui.util.validation.params.IOPParams; import io.bisq.gui.util.validation.params.OctocoinParams; import io.bisq.gui.util.validation.params.PNCParams; import io.bisq.gui.util.validation.params.PivxParams; import io.bisq.gui.util.validation.params.WACoinsParams; import io.bisq.gui.util.validation.params.btc.BtcMainNetParams; import lombok.extern.slf4j.Slf4j; import org.bitcoinj.core.Base58; import org.bitcoinj.core.Address; import org.bitcoinj.core.AddressFormatException; import org.bitcoinj.params.MainNetParams; import org.bitcoinj.params.RegTestParams; import org.bitcoinj.params.TestNet3Params; import org.jetbrains.annotations.NotNull; import org.libdohj.params.*; @Slf4j public final class AltCoinAddressValidator extends InputValidator { private String currencyCode; /////////////////////////////////////////////////////////////////////////////////////////// // Public methods /////////////////////////////////////////////////////////////////////////////////////////// public void setCurrencyCode(String currencyCode) { this.currencyCode = currencyCode; } @Override public ValidationResult validate(String input) { ValidationResult validationResult = super.validate(input); if (!validationResult.isValid || currencyCode == null) { return validationResult; } else { // Validation: // 1: With a regex checking the correct structure of an address // 2: If the address contains a checksum, verify the checksum ValidationResult wrongChecksum = new ValidationResult(false, Res.get("validation.altcoin.wrongChecksum")); ValidationResult regexTestFailed = new ValidationResult(false, Res.get("validation.altcoin.wrongStructure", currencyCode)); switch (currencyCode) { case "BTC": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: Address.fromBase58(MainNetParams.get(), input); break; case BTC_TESTNET: Address.fromBase58(TestNet3Params.get(), input); break; case BTC_REGTEST: Address.fromBase58(RegTestParams.get(), input); break; case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: Address.fromBase58(BtcMainNetParams.get(), input); return new ValidationResult(true); } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "LTC": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: case LTC_MAINNET: Address.fromBase58(LitecoinMainNetParams.get(), input); break; case LTC_TESTNET: Address.fromBase58(LitecoinTestNet3Params.get(), input); break; case LTC_REGTEST: Address.fromBase58(LitecoinRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "DOGE": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: case DOGE_MAINNET: Address.fromBase58(DogecoinMainNetParams.get(), input); break; case DOGE_TESTNET: Address.fromBase58(DogecoinTestNet3Params.get(), input); break; case DOGE_REGTEST: Address.fromBase58(DogecoinRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "DASH": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: Address.fromBase58(DashMainNetParams.get(), input); break; case DASH_TESTNET: Address.fromBase58(DashTestNet3Params.get(), input); break; case DASH_REGTEST: Address.fromBase58(DashRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "ETH": // https://github.com/ethereum/web3.js/blob/master/lib/utils/utils.js#L403 if (!input.matches("^(0x)?[0-9a-fA-F]{40}$")) return regexTestFailed; else return new ValidationResult(true); // Example for BTC, though for BTC we use the BitcoinJ library address check case "PIVX": if (input.matches("^[D][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { //noinspection ConstantConditions if (verifyChecksum(input)) { try { Address.fromBase58(PivxParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "IOP": if (input.matches("^[p][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { //noinspection ConstantConditions if (verifyChecksum(input)) { try { Address.fromBase58(IOPParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "888": if (input.matches("^[83][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { if (OctocoinAddressValidator.ValidateAddress(input)) { try { Address.fromBase58(OctocoinParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "ZEC": // We only support t addresses (transparent transactions) if (input.startsWith("t")) return validationResult; else return new ValidationResult(false, Res.get("validation.altcoin.zAddressesNotSupported")); case "GBYTE": return ByteballAddressValidator.validate(input); case "NXT": if (!input.startsWith("NXT-") || !input.equals(input.toUpperCase())) { return regexTestFailed; } try { long accountId = NxtReedSolomonValidator.decode(input.substring(4)); return new ValidationResult(accountId != 0); } catch (NxtReedSolomonValidator.DecodeException e) { return wrongChecksum; } case "PNC": if (input.matches("^[P3][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { if (PNCAddressValidator.ValidateAddress(input)) { try { Address.fromBase58(PNCParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "ZEN": try { // Get the non Base58 form of the address and the bytecode of the first two bytes byte [] byteAddress = Base58.decodeChecked(input); int version0 = byteAddress[0] & 0xFF; int version1 = byteAddress[1] & 0xFF; // We only support public ("zn" (0x20,0x89), "t1" (0x1C,0xB8)) // and multisig ("zs" (0x20,0x96), "t3" (0x1C,0xBD)) addresses // Fail for private addresses if (version0 == 0x16 && version1 == 0x9A) { // Address starts with "zc" return new ValidationResult(false, Res.get("validation.altcoin.zAddressesNotSupported")); } else if (version0 == 0x1C && (version1 == 0xB8 || version1 == 0xBD)) { // "t1" or "t3" address return new ValidationResult(true); } else if (version0 == 0x20 && (version1 == 0x89 || version1 == 0x96)) { // "zn" or "zs" address return new ValidationResult(true); } else { // Unknown Type return new ValidationResult(false); } } catch (AddressFormatException e) { // Unhandled Exception (probably a checksum error) return new ValidationResult(false); } case "WAC": try { Address.fromBase58(WACoinsParams.get(), input); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } return new ValidationResult(true); case "DCT": if (input.matches("^(?=.{5,63}$)([a-z][a-z0-9-]+[a-z0-9])(\\.[a-z][a-z0-9-]+[a-z0-9])*$")) return new ValidationResult(true); else return regexTestFailed; case "OGT": if (input.matches("^J[a-zA-Z0-9]{44,44}$")) return new ValidationResult(true); else return regexTestFailed; default: log.debug("Validation for AltCoinAddress not implemented yet. currencyCode: " + currencyCode); return validationResult; } } } @NotNull private String getErrorMessage(AddressFormatException e) { return Res.get("validation.altcoin.invalidAddress", currencyCode, e.getMessage()); } @SuppressWarnings({"UnusedParameters", "SameReturnValue"}) private boolean verifyChecksum(String input) { // TODO return true; } /////////////////////////////////////////////////////////////////////////////////////////// // Private methods /////////////////////////////////////////////////////////////////////////////////////////// }
gui/src/main/java/io/bisq/gui/util/validation/AltCoinAddressValidator.java
/* * This file is part of Bisq. * * Bisq is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or (at * your option) any later version. * * Bisq is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public * License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Bisq. If not, see <http://www.gnu.org/licenses/>. */ package io.bisq.gui.util.validation; import io.bisq.common.locale.Res; import io.bisq.core.app.BisqEnvironment; import io.bisq.gui.util.validation.altcoins.ByteballAddressValidator; import io.bisq.gui.util.validation.altcoins.NxtReedSolomonValidator; import io.bisq.gui.util.validation.altcoins.OctocoinAddressValidator; import io.bisq.gui.util.validation.altcoins.PNCAddressValidator; import io.bisq.gui.util.validation.params.IOPParams; import io.bisq.gui.util.validation.params.OctocoinParams; import io.bisq.gui.util.validation.params.PNCParams; import io.bisq.gui.util.validation.params.PivxParams; import io.bisq.gui.util.validation.params.WACoinsParams; import io.bisq.gui.util.validation.params.btc.BtcMainNetParams; import lombok.extern.slf4j.Slf4j; import org.bitcoinj.core.Base58; import org.bitcoinj.core.Address; import org.bitcoinj.core.AddressFormatException; import org.bitcoinj.params.MainNetParams; import org.bitcoinj.params.RegTestParams; import org.bitcoinj.params.TestNet3Params; import org.jetbrains.annotations.NotNull; import org.libdohj.params.*; @Slf4j public final class AltCoinAddressValidator extends InputValidator { private String currencyCode; /////////////////////////////////////////////////////////////////////////////////////////// // Public methods /////////////////////////////////////////////////////////////////////////////////////////// public void setCurrencyCode(String currencyCode) { this.currencyCode = currencyCode; } @Override public ValidationResult validate(String input) { ValidationResult validationResult = super.validate(input); if (!validationResult.isValid || currencyCode == null) { return validationResult; } else { // Validation: // 1: With a regex checking the correct structure of an address // 2: If the address contains a checksum, verify the checksum ValidationResult wrongChecksum = new ValidationResult(false, Res.get("validation.altcoin.wrongChecksum")); ValidationResult regexTestFailed = new ValidationResult(false, Res.get("validation.altcoin.wrongStructure", currencyCode)); switch (currencyCode) { case "BTC": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: Address.fromBase58(MainNetParams.get(), input); break; case BTC_TESTNET: Address.fromBase58(TestNet3Params.get(), input); break; case BTC_REGTEST: Address.fromBase58(RegTestParams.get(), input); break; case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: Address.fromBase58(BtcMainNetParams.get(), input); return new ValidationResult(true); } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "LTC": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: case LTC_MAINNET: Address.fromBase58(LitecoinMainNetParams.get(), input); break; case LTC_TESTNET: Address.fromBase58(LitecoinTestNet3Params.get(), input); break; case LTC_REGTEST: Address.fromBase58(LitecoinRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "DOGE": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DASH_MAINNET: case DASH_TESTNET: case DASH_REGTEST: case DOGE_MAINNET: Address.fromBase58(DogecoinMainNetParams.get(), input); break; case DOGE_TESTNET: Address.fromBase58(DogecoinTestNet3Params.get(), input); break; case DOGE_REGTEST: Address.fromBase58(DogecoinRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "DASH": try { switch (BisqEnvironment.getBaseCurrencyNetwork()) { case BTC_MAINNET: case BTC_TESTNET: case BTC_REGTEST: case LTC_MAINNET: case LTC_TESTNET: case LTC_REGTEST: case DOGE_MAINNET: case DOGE_TESTNET: case DOGE_REGTEST: case DASH_MAINNET: Address.fromBase58(DashMainNetParams.get(), input); break; case DASH_TESTNET: Address.fromBase58(DashTestNet3Params.get(), input); break; case DASH_REGTEST: Address.fromBase58(DashRegTestParams.get(), input); break; } return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } case "ETH": // https://github.com/ethereum/web3.js/blob/master/lib/utils/utils.js#L403 if (!input.matches("^(0x)?[0-9a-fA-F]{40}$")) return regexTestFailed; else return new ValidationResult(true); // Example for BTC, though for BTC we use the BitcoinJ library address check case "PIVX": if (input.matches("^[D][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { //noinspection ConstantConditions if (verifyChecksum(input)) { try { Address.fromBase58(PivxParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "IOP": if (input.matches("^[p][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { //noinspection ConstantConditions if (verifyChecksum(input)) { try { Address.fromBase58(IOPParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "888": if (input.matches("^[83][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { if (OctocoinAddressValidator.ValidateAddress(input)) { try { Address.fromBase58(OctocoinParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "ZEC": // We only support t addresses (transparent transactions) if (input.startsWith("t")) return validationResult; else return new ValidationResult(false, Res.get("validation.altcoin.zAddressesNotSupported")); case "GBYTE": return ByteballAddressValidator.validate(input); case "NXT": if (!input.startsWith("NXT-") || !input.equals(input.toUpperCase())) { return regexTestFailed; } try { long accountId = NxtReedSolomonValidator.decode(input.substring(4)); return new ValidationResult(accountId != 0); } catch (NxtReedSolomonValidator.DecodeException e) { return wrongChecksum; } case "PNC": if (input.matches("^[P3][a-km-zA-HJ-NP-Z1-9]{25,34}$")) { if (PNCAddressValidator.ValidateAddress(input)) { try { Address.fromBase58(PNCParams.get(), input); return new ValidationResult(true); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } } else { return wrongChecksum; } } else { return regexTestFailed; } case "ZEN": try { // Get the non Base58 form of the address and the bytecode of the first two bytes byte [] byteAddress = Base58.decodeChecked(input); int version0 = byteAddress[0] & 0xFF; int version1 = byteAddress[1] & 0xFF; // We only support public ("zn" (0x20,0x89), "t1" (0x1C,0xB8)) // and multisig ("zs" (0x20,0x96), "t3" (0x1C,0xBD)) addresses // Fail for private addresses if (version0 == 0x16 && version1 == 0x9A) { // Address starts with "zc" return new ValidationResult(false, Res.get("validation.altcoin.zAddressesNotSupported")); } else if (version0 == 0x1C && (version1 == 0xB8 || version1 == 0xBD)) { // "t1" or "t3" address return new ValidationResult(true); } else if (version0 == 0x20 && (version1 == 0x89 || version1 == 0x96)) { // "zn" or "zs" address return new ValidationResult(true); } else { // Unknown Type return new ValidationResult(false); } } catch (AddressFormatException e) { // Unhandled Exception (probably a checksum error) return new ValidationResult(false); } case "WAC": try { Address.fromBase58(WACoinsParams.get(), input); } catch (AddressFormatException e) { return new ValidationResult(false, getErrorMessage(e)); } return new ValidationResult(true); case "DCT": if (input.matches("^(?=.{5,63}$)([a-z][a-z0-9-]+[a-z0-9])(\\.[a-z][a-z0-9-]+[a-z0-9])*$")) return new ValidationResult(true); else return regexTestFailed; case "OGT": if (input.startsWith("J")&&(input.length()==45)) return new ValidationResult(true); else return regexTestFailed; default: log.debug("Validation for AltCoinAddress not implemented yet. currencyCode: " + currencyCode); return validationResult; } } } @NotNull private String getErrorMessage(AddressFormatException e) { return Res.get("validation.altcoin.invalidAddress", currencyCode, e.getMessage()); } @SuppressWarnings({"UnusedParameters", "SameReturnValue"}) private boolean verifyChecksum(String input) { // TODO return true; } /////////////////////////////////////////////////////////////////////////////////////////// // Private methods /////////////////////////////////////////////////////////////////////////////////////////// }
update AltCoinAddressValidator J[a-zA-Z0-9]{44,44}
gui/src/main/java/io/bisq/gui/util/validation/AltCoinAddressValidator.java
update AltCoinAddressValidator J[a-zA-Z0-9]{44,44}
<ide><path>ui/src/main/java/io/bisq/gui/util/validation/AltCoinAddressValidator.java <ide> return new ValidationResult(true); <ide> else <ide> return regexTestFailed; <del> case "OGT": <del> if (input.startsWith("J")&&(input.length()==45)) <add> case "OGT": <add> if (input.matches("^J[a-zA-Z0-9]{44,44}$")) <ide> return new ValidationResult(true); <ide> else <ide> return regexTestFailed;
Java
mit
41776dfb601be2318ea41a34efc5a178db525765
0
gabrielsson/Clac
package cl.sidan.clac.fragments; import android.app.ActivityManager; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.os.Build; import android.os.Debug; import android.os.Environment; import android.os.Looper; import android.os.StatFs; import android.util.Log; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.Thread.UncaughtExceptionHandler; import java.util.Date; import java.util.Locale; public class MyExceptionHandler implements UncaughtExceptionHandler { private final String NL = "\n"; private static final String[] RECIPIENTS = new String[]{ "[email protected]", "[email protected]", "[email protected] "}; private Context context; private static Context context1; public MyExceptionHandler(Context ctx) { context = ctx; context1 = ctx; } private StatFs getStatFs() { File path = Environment.getDataDirectory(); return new StatFs(path.getPath()); } private long getAvailableInternalMemorySize(StatFs stat) { long blockSize = stat.getBlockSizeLong(); long availableBlocks = stat.getAvailableBlocksLong(); return availableBlocks * blockSize; } private long getTotalInternalMemorySize(StatFs stat) { long blockSize = stat.getBlockSizeLong(); long totalBlocks = stat.getBlockCountLong(); return totalBlocks * blockSize; } private void addHWInformation(StringBuilder message) { message.append("Phone Model: ").append(Build.MODEL).append(NL); message.append("Android Version: ").append(Build.VERSION.RELEASE).append(NL); message.append("Brand: ").append(Build.BRAND).append(NL); message.append("Device: ").append(Build.DEVICE).append(NL); message.append("ID: ").append(Build.ID).append(NL); message.append("Product: ").append(Build.PRODUCT).append(NL); message.append("Model: ").append(Build.MODEL).append(NL); message.append("Board: ").append(Build.BOARD).append(NL); message.append("Host: ").append(Build.HOST).append(NL); message.append("Type: ").append(Build.TYPE).append(NL); StatFs stat = getStatFs(); message.append("Total Internal memory: ").append( getTotalInternalMemorySize(stat)).append(NL); message.append("Available Internal memory: ").append( getAvailableInternalMemorySize(stat)).append(NL); } private void addSWInformation(StringBuilder message) { message.append("Locale: ").append(Locale.getDefault()).append(NL); try { PackageManager pm = context.getPackageManager(); PackageInfo pi = pm.getPackageInfo(context.getPackageName(), 0); message.append("Package: ").append(pi.packageName).append(NL); message.append("Version name: ").append(pi.versionName).append(NL); message.append("Version code: ").append(pi.versionCode).append(NL); message.append("Native Heap Size: ").append(Debug.getNativeHeapSize()).append(NL); message.append("Native Heap Free Size: ").append( Debug.getNativeHeapFreeSize()).append(NL); message.append("Native Heap Allocated Size: ").append( Debug.getNativeHeapAllocatedSize()).append(NL); } catch (Exception e) { Log.e("CustomExceptionHandler", "Error", e); message.append("Could not get Version information for ").append( context.getPackageName()); } message.append("SDK: ").append(Build.VERSION.SDK_INT).append(NL); message.append("Incremental: ").append(Build.VERSION.INCREMENTAL).append(NL); } @Override public final void uncaughtException(Thread t, Throwable e) { try { StringBuilder report = new StringBuilder(); Date curDate = new Date(); report.append("Error Report collected on : ").append(curDate.toString()).append(NL).append(NL); report.append("****** ADDITIONAL INFORMATION ******").append(NL); report.append("To be filled in by user....").append(NL).append(NL).append(NL); report.append("****** DEVICE INFORMATION ******").append(NL); addHWInformation(report); report.append(NL).append(NL); report.append("****** FIRMWARE ******").append(NL); addSWInformation(report); report.append(NL).append(NL); report.append("****** CAUSE OF ERROR ******").append(NL); report.append("Is user a monkey? ").append(ActivityManager.isUserAMonkey() ? "Yes" : "No") .append(NL).append(NL); final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); e.printStackTrace(printWriter); report.append(result.toString()); printWriter.close(); report.append(NL).append(NL); // report.append("****** EXTENDED STACKTRACE ******").append(NL).append(NL); // TO BE ADDED //report.append(NL); report.append("****** END OF REPORT ******"); Log.e(MyExceptionHandler.class.getName(), "ErrorHandler will try to send mail with " + "error-report:"); for( String line : report.toString().split("\n") ) { Log.e(MyExceptionHandler.class.getName(), line); } sendErrorMail(report); } catch (Throwable ignore) { Log.e(MyExceptionHandler.class.getName(), "Error while sending error e-mail", ignore); } } public void sendErrorMail(final StringBuilder errorContent) { final AlertDialog.Builder builder = new AlertDialog.Builder(context); new Thread(){ @Override public void run() { Looper.prepare(); builder.setTitle("Taskigt läge!"); builder.create(); builder.setMessage("Grabbarna har kodat fel och du har upptäckt en bugg..."); builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { System.exit(1); } }); builder.setPositiveButton("Rapportera", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Intent sendIntent = new Intent(Intent.ACTION_SEND); Date curDate = new Date(); String subject = "CLappen krashade " + curDate.toString(); sendIntent.setType("plain/text"); sendIntent.putExtra(Intent.EXTRA_EMAIL, RECIPIENTS); sendIntent.putExtra(Intent.EXTRA_TEXT, NL + NL + errorContent + NL + NL); sendIntent.putExtra(Intent.EXTRA_SUBJECT, subject); context1.startActivity(Intent.createChooser(sendIntent, "Error Report")); System.exit(0); } }); // XXX_TODO: // Vi har fått en felrapport på builder.show(): // android.view.WindowManager$BadTokenException: // Unable to add window -- token android.os.BinderProxy@3dd7bd7d is not valid; // is your activity running? // Detta beror på att vi försöker visa ett dialog meddelande i en asynkron tråd. // Flödet är följande: // 1. Tråd två kör igång och försöker göra arbete // 2. Tråd ett med aktiviteten dör // 3. Tråd två vill visa en dialog i aktiviteten. // Poff! builder.show(); Looper.loop(); } }.start(); } }
app/src/main/java/cl/sidan/clac/fragments/MyExceptionHandler.java
package cl.sidan.clac.fragments; import android.app.ActivityManager; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.os.Build; import android.os.Debug; import android.os.Environment; import android.os.Looper; import android.os.StatFs; import android.util.Log; import java.io.File; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.Thread.UncaughtExceptionHandler; import java.util.Date; import java.util.Locale; public class MyExceptionHandler implements UncaughtExceptionHandler { private final String NL = "\n"; private static final String[] RECIPIENTS = new String[]{ "[email protected]", "[email protected]", "[email protected] "}; private Context context; private static Context context1; public MyExceptionHandler(Context ctx) { context = ctx; context1 = ctx; } private StatFs getStatFs() { File path = Environment.getDataDirectory(); return new StatFs(path.getPath()); } private long getAvailableInternalMemorySize(StatFs stat) { long blockSize = stat.getBlockSizeLong(); long availableBlocks = stat.getAvailableBlocksLong(); return availableBlocks * blockSize; } private long getTotalInternalMemorySize(StatFs stat) { long blockSize = stat.getBlockSizeLong(); long totalBlocks = stat.getBlockCountLong(); return totalBlocks * blockSize; } private void addHWInformation(StringBuilder message) { message.append("Phone Model: ").append(Build.MODEL).append(NL); message.append("Android Version: ").append(Build.VERSION.RELEASE).append(NL); message.append("Brand: ").append(Build.BRAND).append(NL); message.append("Device: ").append(Build.DEVICE).append(NL); message.append("ID: ").append(Build.ID).append(NL); message.append("Product: ").append(Build.PRODUCT).append(NL); message.append("Model: ").append(Build.MODEL).append(NL); message.append("Board: ").append(Build.BOARD).append(NL); message.append("Host: ").append(Build.HOST).append(NL); message.append("Type: ").append(Build.TYPE).append(NL); StatFs stat = getStatFs(); message.append("Total Internal memory: ").append( getTotalInternalMemorySize(stat)).append(NL); message.append("Available Internal memory: ").append( getAvailableInternalMemorySize(stat)).append(NL); } private void addSWInformation(StringBuilder message) { message.append("Locale: ").append(Locale.getDefault()).append(NL); try { PackageManager pm = context.getPackageManager(); PackageInfo pi = pm.getPackageInfo(context.getPackageName(), 0); message.append("Package: ").append(pi.packageName).append(NL); message.append("Version name: ").append(pi.versionName).append(NL); message.append("Version code: ").append(pi.versionCode).append(NL); message.append("Native Heap Size: ").append(Debug.getNativeHeapSize()).append(NL); message.append("Native Heap Free Size: ").append( Debug.getNativeHeapFreeSize()).append(NL); message.append("Native Heap Allocated Size: ").append( Debug.getNativeHeapAllocatedSize()).append(NL); } catch (Exception e) { Log.e("CustomExceptionHandler", "Error", e); message.append("Could not get Version information for ").append( context.getPackageName()); } message.append("SDK: ").append(Build.VERSION.SDK_INT).append(NL); message.append("Incremental: ").append(Build.VERSION.INCREMENTAL).append(NL); } @Override public final void uncaughtException(Thread t, Throwable e) { try { StringBuilder report = new StringBuilder(); Date curDate = new Date(); report.append("Error Report collected on : ").append(curDate.toString()).append(NL).append(NL); report.append("****** ADDITIONAL INFORMATION ******").append(NL); report.append("To be filled in by user....").append(NL).append(NL).append(NL); report.append("****** DEVICE INFORMATION ******").append(NL); addHWInformation(report); report.append(NL).append(NL); report.append("****** FIRMWARE ******").append(NL); addSWInformation(report); report.append(NL).append(NL); report.append("****** CAUSE OF ERROR ******").append(NL); report.append("Is user a monkey? ").append(ActivityManager.isUserAMonkey() ? "Yes" : "No") .append(NL).append(NL); final Writer result = new StringWriter(); final PrintWriter printWriter = new PrintWriter(result); e.printStackTrace(printWriter); report.append(result.toString()); printWriter.close(); report.append(NL).append(NL); // report.append("****** EXTENDED STACKTRACE ******").append(NL).append(NL); // TO BE ADDED //report.append(NL); report.append("****** END OF REPORT ******"); Log.e(MyExceptionHandler.class.getName(), "Error while sendErrorMail. " + report); sendErrorMail(report); } catch (Throwable ignore) { Log.e(MyExceptionHandler.class.getName(), "Error while sending error e-mail", ignore); } } public void sendErrorMail(final StringBuilder errorContent) { final AlertDialog.Builder builder = new AlertDialog.Builder(context); new Thread(){ @Override public void run() { Looper.prepare(); builder.setTitle("Taskigt läge!"); builder.create(); builder.setMessage("Grabbarna har kodat fel och du har upptäckt en bugg..."); builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { System.exit(1); } }); builder.setPositiveButton("Rapportera", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Intent sendIntent = new Intent(Intent.ACTION_SEND); Date curDate = new Date(); String subject = "CLappen krashade " + curDate.toString(); sendIntent.setType("plain/text"); sendIntent.putExtra(Intent.EXTRA_EMAIL, RECIPIENTS); sendIntent.putExtra(Intent.EXTRA_TEXT, NL + NL + errorContent + NL + NL); sendIntent.putExtra(Intent.EXTRA_SUBJECT, subject); context1.startActivity(Intent.createChooser(sendIntent, "Error Report")); System.exit(0); } }); // XXX_TODO: // Vi har fått en felrapport på builder.show(): // android.view.WindowManager$BadTokenException: // Unable to add window -- token android.os.BinderProxy@3dd7bd7d is not valid; // is your activity running? // Detta beror på att vi försöker visa ett dialog meddelande i en asynkron tråd. // Flödet är följande: // 1. Tråd två kör igång och försöker göra arbete // 2. Tråd ett med aktiviteten dör // 3. Tråd två vill visa en dialog i aktiviteten. // Poff! builder.show(); Looper.loop(); } }.start(); } }
Fix MyExceptionHandler to output error locally
app/src/main/java/cl/sidan/clac/fragments/MyExceptionHandler.java
Fix MyExceptionHandler to output error locally
<ide><path>pp/src/main/java/cl/sidan/clac/fragments/MyExceptionHandler.java <ide> <ide> report.append("****** END OF REPORT ******"); <ide> <del> Log.e(MyExceptionHandler.class.getName(), "Error while sendErrorMail. " + report); <add> Log.e(MyExceptionHandler.class.getName(), "ErrorHandler will try to send mail with " + <add> "error-report:"); <add> for( String line : report.toString().split("\n") ) { <add> Log.e(MyExceptionHandler.class.getName(), line); <add> } <ide> sendErrorMail(report); <ide> } catch (Throwable ignore) { <ide> Log.e(MyExceptionHandler.class.getName(), "Error while sending error e-mail", ignore);
Java
apache-2.0
6a4e1ed88c1249bdf7e48a8579686cd75d56be90
0
erdi/grails-core,clockworkorange/grails-core,clockworkorange/grails-core,clockworkorange/grails-core,erdi/grails-core,clockworkorange/grails-core,clockworkorange/grails-core,erdi/grails-core
/* * Copyright 2004-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package grails.orm; import grails.util.ExtendProxy; import groovy.lang.Closure; import groovy.lang.GString; import groovy.lang.MissingMethodException; import groovy.lang.MissingPropertyException; import groovy.util.BuilderSupport; import groovy.util.Proxy; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import org.hibernate.Criteria; import org.hibernate.FetchMode; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.criterion.CriteriaSpecification; import org.hibernate.criterion.Criterion; import org.hibernate.criterion.Junction; import org.hibernate.criterion.Order; import org.hibernate.criterion.ProjectionList; import org.hibernate.criterion.Projections; import org.hibernate.criterion.Restrictions; import org.hibernate.transform.ResultTransformer; import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanWrapper; import org.springframework.beans.BeanWrapperImpl; import org.springframework.orm.hibernate3.SessionHolder; import org.springframework.transaction.support.TransactionSynchronizationManager; /** * <p>Wraps the Hibernate Criteria API in a builder. The builder can be retrieved through the "createCriteria()" dynamic static * method of Grails domain classes (Example in Groovy): * * <pre> * def c = Account.createCriteria() * def results = c { * projections { * groupProperty("branch") * } * like("holderFirstName", "Fred%") * and { * between("balance", 500, 1000) * eq("branch", "London") * } * maxResults(10) * order("holderLastName", "desc") * } * </pre> * * <p>The builder can also be instantiated standalone with a SessionFactory and persistent Class instance: * * <pre> * new HibernateCriteriaBuilder(clazz, sessionFactory).list { * eq("firstName", "Fred") * } * </pre> * * @author Graeme Rocher * @since Oct 10, 2005 */ public class HibernateCriteriaBuilder extends BuilderSupport { public static final String AND = "and"; // builder public static final String IS_NULL = "isNull"; // builder public static final String IS_NOT_NULL = "notNull"; // builder public static final String NOT = "not";// builder public static final String OR = "or"; // builder public static final String ID_EQUALS = "idEq"; // builder public static final String IS_EMPTY = "isEmpty"; //builder public static final String IS_NOT_EMPTY = "isNotEmpty"; //builder public static final String BETWEEN = "between";//method public static final String EQUALS = "eq";//method public static final String EQUALS_PROPERTY = "eqProperty";//method public static final String GREATER_THAN = "gt";//method public static final String GREATER_THAN_PROPERTY = "gtProperty";//method public static final String GREATER_THAN_OR_EQUAL = "ge";//method public static final String GREATER_THAN_OR_EQUAL_PROPERTY = "geProperty";//method public static final String ILIKE = "ilike";//method public static final String IN = "in";//method public static final String LESS_THAN = "lt"; //method public static final String LESS_THAN_PROPERTY = "ltProperty";//method public static final String LESS_THAN_OR_EQUAL = "le";//method public static final String LESS_THAN_OR_EQUAL_PROPERTY = "leProperty";//method public static final String LIKE = "like";//method public static final String NOT_EQUAL = "ne";//method public static final String NOT_EQUAL_PROPERTY = "neProperty";//method public static final String SIZE_EQUALS = "sizeEq"; //method public static final String ORDER_DESCENDING = "desc"; public static final String ORDER_ASCENDING = "asc"; private static final String ROOT_CALL = "doCall"; private static final String LIST_CALL = "list"; private static final String COUNT_CALL = "count"; private static final String GET_CALL = "get"; private static final String SCROLL_CALL = "scroll"; private static final String PROJECTIONS = "projections"; private SessionFactory sessionFactory; private Session session; private Class targetClass; private Criteria criteria; private boolean uniqueResult = false; private Proxy resultProxy = new ExtendProxy(); private Proxy criteriaProxy; private Object parent; private List logicalExpressions = new ArrayList(); private List logicalExpressionArgs = new ArrayList(); private boolean participate; private boolean scroll; private boolean count; private ProjectionList projectionList; private BeanWrapper targetBean; private List aliasStack = new ArrayList(); private static final String ALIAS = "_alias"; private ResultTransformer resultTransformer; public HibernateCriteriaBuilder(Class targetClass, SessionFactory sessionFactory) { super(); this.targetClass = targetClass; this.targetBean = new BeanWrapperImpl(BeanUtils.instantiateClass(targetClass)); this.sessionFactory = sessionFactory; } public HibernateCriteriaBuilder(Class targetClass, SessionFactory sessionFactory, boolean uniqueResult) { super(); this.targetClass = targetClass; this.sessionFactory = sessionFactory; this.uniqueResult = uniqueResult; } public void setUniqueResult(boolean uniqueResult) { this.uniqueResult = uniqueResult; } /** * A projection that selects a property name * @param propertyName The name of the property */ public void property(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [property] must be within a [projections] node")); } else { this.projectionList.add(Projections.property(propertyName)); } } /** * A projection that selects a distince property name * @param propertyName The property name */ public void distinct(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [distinct] must be within a [projections] node")); } else { this.projectionList.add(Projections.distinct(Projections.property(propertyName))); } } /** * A distinct projection that takes a list * * @param propertyNames The list of distince property names */ public void distinct(Collection propertyNames) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [distinct] must be within a [projections] node")); } else { ProjectionList list = Projections.projectionList(); for (Iterator i = propertyNames.iterator(); i.hasNext();) { Object o = i.next(); list.add(Projections.property(o.toString())); } this.projectionList.add(Projections.distinct(list)); } } /** * Adds a projection that allows the criteria to return the property average value * * @param propertyName The name of the property */ public void avg(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [avg] must be within a [projections] node")); } else { this.projectionList.add(Projections.avg(propertyName)); } } /** * Calculates the property name including any alias paths * * @param propertyName The property name * @return The calculated property name */ private String calculatePropertyName(String propertyName) { if(this.aliasStack.size()>0) { return this.aliasStack.get(this.aliasStack.size()-1).toString()+'.'+propertyName; } return propertyName; } /** * Calculates the property value, converting GStrings if necessary * * @param propertyValue The property value * @return The calculated property value */ private Object calculatePropertyValue(Object propertyValue) { if(propertyValue instanceof GString) { return propertyValue.toString(); } return propertyValue; } /** * Adds a projection that allows the criteria to return the property count * * @param propertyName The name of the property */ public void count(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [count] must be within a [projections] node")); } else { this.projectionList.add(Projections.count(propertyName)); } } /** * Adds a projection that allows the criteria to return the distinct property count * * @param propertyName The name of the property */ public void countDistinct(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [countDistinct] must be within a [projections] node")); } else { this.projectionList.add(Projections.countDistinct(propertyName)); } } /** * Adds a projection that allows the criteria's result to be grouped by a property * * @param propertyName The name of the property */ public void groupProperty(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [groupProperty] must be within a [projections] node")); } else { this.projectionList.add(Projections.groupProperty(propertyName)); } } /** * Adds a projection that allows the criteria to retrieve a maximum property value * * @param propertyName The name of the property */ public void max(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [max] must be within a [projections] node")); } else { this.projectionList.add(Projections.max(propertyName)); } } /** * Adds a projection that allows the criteria to retrieve a minimum property value * * @param propertyName The name of the property */ public void min(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [min] must be within a [projections] node")); } else { this.projectionList.add(Projections.min(propertyName)); } } /** * Adds a projection that allows the criteria to return the row count * */ public void rowCount() { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [rowCount] must be within a [projections] node")); } else { this.projectionList.add(Projections.rowCount()); } } /** * Adds a projection that allows the criteria to retrieve the sum of the results of a property * * @param propertyName The name of the property */ public void sum(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [sum] must be within a [projections] node")); } else { this.projectionList.add(Projections.sum(propertyName)); } } /** * Sets the fetch mode of an associated path * * @param associationPath The name of the associated path * @param fetchMode The fetch mode to set */ public void fetchMode(String associationPath, FetchMode fetchMode) { if(criteria!=null) { criteria.setFetchMode(associationPath, fetchMode); } } /** * Creates a Criterion that compares to class properties for equality * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object eqProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [eqProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.") ); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.eqProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that compares to class properties for !equality * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object neProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [neProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.neProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is greater than the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object gtProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [gtProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.gtProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is greater than or equal to the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object geProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [geProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.geProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is less than the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object ltProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ltProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.ltProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is less than or equal to the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object leProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [leProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.leProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "greater than" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object gt(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [gt] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.gt( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "greater than or equal to" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object ge(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ge] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ge( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "less than" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object lt(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [lt] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.lt( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "less than or equal to" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object le(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [le] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.le( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates an "equals" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * * @return A Criterion instance */ public Object eq(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [eq] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.eq( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion with from the specified property name and "like" expression * @param propertyName The property name * @param propertyValue The like value * * @return A Criterion instance */ public Object like(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [like] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.like( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion with from the specified property name and "ilike" (a case sensitive version of "like") expression * @param propertyName The property name * @param propertyValue The ilike value * * @return A Criterion instance */ public Object ilike(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ilike] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ilike( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Applys a "in" contrain on the specified property * @param propertyName The property name * @param values A collection of values * * @return A Criterion instance */ public Object in(String propertyName, Collection values) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [in] with propertyName ["+propertyName+"] and values ["+values+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.in( propertyName, values ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Delegates to in as in is a Groovy keyword **/ public Object inList(String propertyName, Collection values) { return in(propertyName, values); } /** * Delegates to in as in is a Groovy keyword **/ public Object inList(String propertyName, Object[] values) { return in(propertyName, values); } /** * Applys a "in" contrain on the specified property * @param propertyName The property name * @param values A collection of values * * @return A Criterion instance */ public Object in(String propertyName, Object[] values) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [in] with propertyName ["+propertyName+"] and values ["+values+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.in( propertyName, values ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Orders by the specified property name (defaults to ascending) * * @param propertyName The property name to order by * @return A Order instance */ public Object order(String propertyName) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("Call to [order] with propertyName ["+propertyName+"]not allowed here.")); propertyName = calculatePropertyName(propertyName); Order o = Order.asc(propertyName); this.criteria.addOrder(o); return o; } /** * Orders by the specified property name and direction * * @param propertyName The property name to order by * @param direction Either "asc" for ascending or "desc" for descending * * @return A Order instance */ public Object order(String propertyName, String direction) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("Call to [order] with propertyName ["+propertyName+"]not allowed here.")); propertyName = calculatePropertyName(propertyName); Order o; if(direction.equals( ORDER_DESCENDING )) { o = Order.desc(propertyName); } else { o = Order.asc(propertyName); } this.criteria.addOrder(o); return o; } /** * Creates a Criterion that contrains a collection property by size * * @param propertyName The property name * @param size The size to constrain by * * @return A Criterion instance */ public Object sizeEq(String propertyName, int size) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [sizeEq] with propertyName ["+propertyName+"] and size ["+size+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.sizeEq( propertyName, size ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "not equal" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return The criterion object */ public Object ne(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ne] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ne( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } public Object notEqual(String propertyName, Object propertyValue) { return ne(propertyName, propertyValue); } /** * Creates a "between" Criterion based on the property name and specified lo and hi values * @param propertyName The property name * @param lo The low value * @param hi The high value * @return A Criterion instance */ public Object between(String propertyName, Object lo, Object hi) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [between] with propertyName ["+propertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.between( propertyName, lo,hi); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } protected Object createNode(Object name) { return createNode( name, Collections.EMPTY_MAP ); } private boolean validateSimpleExpression() { if(this.criteria == null) return false; return !(!isInsideLogicalExpression() && !(this.parent instanceof Proxy) && this.parent != null && this.aliasStack.size() == 0); } private boolean isInsideLogicalExpression() { if(this.logicalExpressions.size() > 0) { String currentLogicalExpression = (String)this.logicalExpressions.get( this.logicalExpressions.size() - 1 ); if(currentLogicalExpression.equals( AND ) || currentLogicalExpression.equals( OR ) || currentLogicalExpression.equals( NOT )) return true; } return false; } protected Object createNode(Object name, Map attributes) { if(name.equals(ROOT_CALL) || name.equals(LIST_CALL) || name.equals(GET_CALL) || name.equals(COUNT_CALL) || name.equals(SCROLL_CALL)) { if(this.criteria != null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); if(name.equals(GET_CALL)) this.uniqueResult = true; if(name.equals(SCROLL_CALL)) { this.scroll = true; } else if(name.equals(COUNT_CALL)) { this.count = true; } if(TransactionSynchronizationManager.hasResource(sessionFactory)) { this.participate = true; this.session = ((SessionHolder)TransactionSynchronizationManager.getResource(sessionFactory)).getSession(); } else { this.session = sessionFactory.openSession(); } this.criteria = this.session.createCriteria(targetClass); this.criteriaProxy = new ExtendProxy(); this.criteriaProxy.setAdaptee(this.criteria); resultProxy = new ExtendProxy(); this.parent = resultProxy; return resultProxy; } else if(name.equals( AND ) || name.equals( OR ) || name.equals( NOT )) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); this.logicalExpressions.add(name); return name; } else if(name.equals( PROJECTIONS )) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); this.projectionList = Projections.projectionList(); return name; } else if(targetBean.isReadableProperty(name.toString())) { this.criteria.createAlias(name.toString(), name.toString()+ALIAS,CriteriaSpecification.LEFT_JOIN); this.aliasStack.add(name.toString()+ALIAS); return name; } closeSessionFollowingException(); throw new MissingMethodException((String) name, getClass(), new Object[] {}) ; } protected void nodeCompleted(Object parent, Object node) { if(node instanceof Proxy) { if(resultTransformer != null) { this.criteria.setResultTransformer(resultTransformer); } if(!uniqueResult) { if(scroll) { resultProxy.setAdaptee( this.criteria.scroll() ); } else if(count) { this.criteria.setProjection(Projections.rowCount()); resultProxy.setAdaptee( this.criteria.uniqueResult() ); } else { resultProxy.setAdaptee( this.criteria.list() ); } } else { resultProxy.setAdaptee( this.criteria.uniqueResult() ); } this.criteria = null; if(!this.participate) { this.session.close(); } } else if(node.equals( AND ) || node.equals( OR )) { Criterion c = null; if(logicalExpressionArgs.size() == 1 && node.equals(AND)) { c =(Criterion)logicalExpressionArgs.remove(0); } else if(logicalExpressionArgs.size() == 2) { Criterion lhs = (Criterion)logicalExpressionArgs.remove(0); Criterion rhs = (Criterion)logicalExpressionArgs.remove(0); if(node.equals(OR)) { c = Restrictions.or(lhs,rhs); } else { c = Restrictions.and(lhs,rhs); } } else if(logicalExpressionArgs.size() > 2) { if(node.equals(OR)) { c = Restrictions.disjunction(); } else { c = Restrictions.conjunction(); } for (Iterator i = logicalExpressionArgs.iterator(); i.hasNext();) { Criterion criterion = (Criterion) i.next(); ((Junction)c).add(criterion); } } if(c!=null) { if(parent instanceof Proxy) { addToCriteria( c ); } else if(parent.equals( AND ) || parent.equals( OR )) { this.logicalExpressionArgs.add(c ); this.logicalExpressions.remove(this.logicalExpressions.size() - 1); } } } else if(node.equals(NOT)) { if(this.logicalExpressionArgs.size() < 1) throwRuntimeException( new IllegalArgumentException("Logical expression [" + node +"] must contain at least 1 expression")); Criterion c = (Criterion)this.logicalExpressionArgs.remove(this.logicalExpressionArgs.size() - 1); if(parent instanceof Proxy) { addToCriteria( Restrictions.not( c ) ); } else if(parent.equals( AND ) || parent.equals( OR ) || parent.equals( NOT )) { this.logicalExpressionArgs.add( Restrictions.not( c ) ); this.logicalExpressions.remove(this.logicalExpressions.size() - 1); } } else if(node.equals(PROJECTIONS)) { if(this.projectionList != null && this.projectionList.getLength() > 0) { this.criteria.setProjection(this.projectionList); } } else if(targetBean.isReadableProperty(node.toString()) && aliasStack.size() > 0) { aliasStack.remove(aliasStack.size()-1); } super.nodeCompleted(parent, node); } /** * Throws a runtime exception where necessary to ensure the session gets closed */ private void throwRuntimeException(RuntimeException t) { closeSessionFollowingException(); throw t; } private void closeSessionFollowingException() { if(this.session != null && this.session.isOpen() && !this.participate) { this.session.close(); } if(this.criteria != null) { this.criteria = null; } } protected void setParent(Object parent, Object child) { this.parent = parent; } protected Object createNode(Object name, Object value) { return createNode(name, Collections.EMPTY_MAP, value); } protected Object createNode(Object name, Map attributes, Object value) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); Criterion c = null; if(name.equals(ID_EQUALS)) { c = Restrictions.idEq(value); } else { if( name.equals( IS_NULL ) || name.equals( IS_NOT_NULL ) || name.equals( IS_EMPTY ) || name.equals( IS_NOT_EMPTY )) { if(!(value instanceof String)) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] with value ["+value+"] requires a String value.")); if(name.equals( IS_NULL )) { c = Restrictions.isNull( (String)value ) ; } else if(name.equals( IS_NOT_NULL )) { c = Restrictions.isNotNull( (String)value ); } else if(name.equals( IS_EMPTY )) { c = Restrictions.isEmpty( (String)value ); } else if(name.equals( IS_NOT_EMPTY )) { c = Restrictions.isNotEmpty( (String)value ); } } } if(c != null) { if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); } else { addToCriteria( c ); } return c; } else { String nameString = name.toString(); if(value instanceof Closure) { if(targetBean.isReadableProperty(nameString)) { this.criteria.createAlias(nameString, nameString+ALIAS,CriteriaSpecification.LEFT_JOIN); this.aliasStack.add(nameString+ALIAS); return name; } } else if(parent instanceof Proxy) { try { criteriaProxy.setProperty(nameString, value); return criteria; } catch(MissingPropertyException mpe) { throwRuntimeException( new MissingMethodException(nameString, getClass(), new Object[] {value}) ); } } throwRuntimeException( new MissingMethodException(nameString, getClass(), new Object[] {value})); } return c; } private void addToCriteria(Criterion c) { this.criteria.add(c); } }
src/commons/grails/orm/HibernateCriteriaBuilder.java
/* * Copyright 2004-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package grails.orm; import grails.util.ExtendProxy; import groovy.lang.MissingMethodException; import groovy.lang.MissingPropertyException; import groovy.lang.Closure; import groovy.lang.GString; import groovy.util.BuilderSupport; import groovy.util.Proxy; import java.util.*; import org.hibernate.Criteria; import org.hibernate.FetchMode; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.transform.ResultTransformer; import org.hibernate.transform.AliasToEntityMapResultTransformer; import org.hibernate.criterion.*; import org.springframework.orm.hibernate3.SessionHolder; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.beans.BeanWrapperImpl; import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanWrapper; /** * <p>Wraps the Hibernate Criteria API in a builder. The builder can be retrieved through the "createCriteria()" dynamic static * method of Grails domain classes (Example in Groovy): * * <pre> * def c = Account.createCriteria() * def results = c { * projections { * groupProperty("branch") * } * like("holderFirstName", "Fred%") * and { * between("balance", 500, 1000) * eq("branch", "London") * } * maxResults(10) * order("holderLastName", "desc") * } * </pre> * * <p>The builder can also be instantiated standalone with a SessionFactory and persistent Class instance: * * <pre> * new HibernateCriteriaBuilder(clazz, sessionFactory).list { * eq("firstName", "Fred") * } * </pre> * * @author Graeme Rocher * @since Oct 10, 2005 */ public class HibernateCriteriaBuilder extends BuilderSupport { public static final String AND = "and"; // builder public static final String IS_NULL = "isNull"; // builder public static final String IS_NOT_NULL = "notNull"; // builder public static final String NOT = "not";// builder public static final String OR = "or"; // builder public static final String ID_EQUALS = "idEq"; // builder public static final String IS_EMPTY = "isEmpty"; //builder public static final String IS_NOT_EMPTY = "isNotEmpty"; //builder public static final String BETWEEN = "between";//method public static final String EQUALS = "eq";//method public static final String EQUALS_PROPERTY = "eqProperty";//method public static final String GREATER_THAN = "gt";//method public static final String GREATER_THAN_PROPERTY = "gtProperty";//method public static final String GREATER_THAN_OR_EQUAL = "ge";//method public static final String GREATER_THAN_OR_EQUAL_PROPERTY = "geProperty";//method public static final String ILIKE = "ilike";//method public static final String IN = "in";//method public static final String LESS_THAN = "lt"; //method public static final String LESS_THAN_PROPERTY = "ltProperty";//method public static final String LESS_THAN_OR_EQUAL = "le";//method public static final String LESS_THAN_OR_EQUAL_PROPERTY = "leProperty";//method public static final String LIKE = "like";//method public static final String NOT_EQUAL = "ne";//method public static final String NOT_EQUAL_PROPERTY = "neProperty";//method public static final String SIZE_EQUALS = "sizeEq"; //method public static final String ORDER_DESCENDING = "desc"; public static final String ORDER_ASCENDING = "asc"; private static final String ROOT_CALL = "doCall"; private static final String LIST_CALL = "list"; private static final String COUNT_CALL = "count"; private static final String GET_CALL = "get"; private static final String SCROLL_CALL = "scroll"; private static final String PROJECTIONS = "projections"; private SessionFactory sessionFactory; private Session session; private Class targetClass; private Criteria criteria; private boolean uniqueResult = false; private Proxy resultProxy = new ExtendProxy(); private Proxy criteriaProxy; private Object parent; private List logicalExpressions = new ArrayList(); private List logicalExpressionArgs = new ArrayList(); private boolean participate; private boolean scroll; private boolean count; private ProjectionList projectionList; private BeanWrapper targetBean; private List aliasStack = new ArrayList(); private static final String ALIAS = "_alias"; private ResultTransformer resultTransformer; public HibernateCriteriaBuilder(Class targetClass, SessionFactory sessionFactory) { super(); this.targetClass = targetClass; this.targetBean = new BeanWrapperImpl(BeanUtils.instantiateClass(targetClass)); this.sessionFactory = sessionFactory; } public HibernateCriteriaBuilder(Class targetClass, SessionFactory sessionFactory, boolean uniqueResult) { super(); this.targetClass = targetClass; this.sessionFactory = sessionFactory; this.uniqueResult = uniqueResult; } public void setUniqueResult(boolean uniqueResult) { this.uniqueResult = uniqueResult; } /** * A projection that selects a distince property name * @param propertyName The property name */ public void distinct(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [distinct] must be within a [projections] node")); } else { this.projectionList.add(Projections.distinct(Projections.property(propertyName))); } } /** * A distinct projection that takes a list * * @param propertyNames The list of distince property names */ public void distinct(Collection propertyNames) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [distinct] must be within a [projections] node")); } else { ProjectionList list = Projections.projectionList(); for (Iterator i = propertyNames.iterator(); i.hasNext();) { Object o = i.next(); list.add(Projections.property(o.toString())); } this.projectionList.add(Projections.distinct(list)); } } /** * Adds a projection that allows the criteria to return the property average value * * @param propertyName The name of the property */ public void avg(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [avg] must be within a [projections] node")); } else { this.projectionList.add(Projections.avg(propertyName)); } } /** * Calculates the property name including any alias paths * * @param propertyName The property name * @return The calculated property name */ private String calculatePropertyName(String propertyName) { if(this.aliasStack.size()>0) { return this.aliasStack.get(this.aliasStack.size()-1).toString()+'.'+propertyName; } return propertyName; } /** * Calculates the property value, converting GStrings if necessary * * @param propertyValue The property value * @return The calculated property value */ private Object calculatePropertyValue(Object propertyValue) { if(propertyValue instanceof GString) { return propertyValue.toString(); } return propertyValue; } /** * Adds a projection that allows the criteria to return the property count * * @param propertyName The name of the property */ public void count(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [count] must be within a [projections] node")); } else { this.projectionList.add(Projections.count(propertyName)); } } /** * Adds a projection that allows the criteria to return the distinct property count * * @param propertyName The name of the property */ public void countDistinct(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [countDistinct] must be within a [projections] node")); } else { this.projectionList.add(Projections.countDistinct(propertyName)); } } /** * Adds a projection that allows the criteria's result to be grouped by a property * * @param propertyName The name of the property */ public void groupProperty(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [groupProperty] must be within a [projections] node")); } else { this.projectionList.add(Projections.groupProperty(propertyName)); } } /** * Adds a projection that allows the criteria to retrieve a maximum property value * * @param propertyName The name of the property */ public void max(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [max] must be within a [projections] node")); } else { this.projectionList.add(Projections.max(propertyName)); } } /** * Adds a projection that allows the criteria to retrieve a minimum property value * * @param propertyName The name of the property */ public void min(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [min] must be within a [projections] node")); } else { this.projectionList.add(Projections.min(propertyName)); } } /** * Adds a projection that allows the criteria to return the row count * */ public void rowCount() { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [rowCount] must be within a [projections] node")); } else { this.projectionList.add(Projections.rowCount()); } } /** * Adds a projection that allows the criteria to retrieve the sum of the results of a property * * @param propertyName The name of the property */ public void sum(String propertyName) { if(this.projectionList == null) { throwRuntimeException( new IllegalArgumentException("call to [sum] must be within a [projections] node")); } else { this.projectionList.add(Projections.sum(propertyName)); } } /** * Sets the fetch mode of an associated path * * @param associationPath The name of the associated path * @param fetchMode The fetch mode to set */ public void fetchMode(String associationPath, FetchMode fetchMode) { if(criteria!=null) { criteria.setFetchMode(associationPath, fetchMode); } } /** * Creates a Criterion that compares to class properties for equality * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object eqProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [eqProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.") ); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.eqProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that compares to class properties for !equality * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object neProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [neProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.neProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is greater than the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object gtProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [gtProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.gtProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is greater than or equal to the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object geProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [geProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.geProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is less than the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object ltProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ltProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.ltProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion that tests if the first property is less than or equal to the second property * @param propertyName The first property name * @param otherPropertyName The second property name * @return A Criterion instance */ public Object leProperty(String propertyName, String otherPropertyName) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [leProperty] with propertyName ["+propertyName+"] and other property name ["+otherPropertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); otherPropertyName = calculatePropertyName(otherPropertyName); Criterion c = Restrictions.leProperty( propertyName, otherPropertyName ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "greater than" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object gt(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [gt] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.gt( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "greater than or equal to" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object ge(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ge] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ge( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "less than" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object lt(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [lt] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.lt( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "less than or equal to" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return A Criterion instance */ public Object le(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [le] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.le( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates an "equals" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * * @return A Criterion instance */ public Object eq(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [eq] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.eq( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion with from the specified property name and "like" expression * @param propertyName The property name * @param propertyValue The like value * * @return A Criterion instance */ public Object like(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [like] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.like( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a Criterion with from the specified property name and "ilike" (a case sensitive version of "like") expression * @param propertyName The property name * @param propertyValue The ilike value * * @return A Criterion instance */ public Object ilike(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ilike] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ilike( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Applys a "in" contrain on the specified property * @param propertyName The property name * @param values A collection of values * * @return A Criterion instance */ public Object in(String propertyName, Collection values) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [in] with propertyName ["+propertyName+"] and values ["+values+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.in( propertyName, values ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Delegates to in as in is a Groovy keyword **/ public Object inList(String propertyName, Collection values) { return in(propertyName, values); } /** * Delegates to in as in is a Groovy keyword **/ public Object inList(String propertyName, Object[] values) { return in(propertyName, values); } /** * Applys a "in" contrain on the specified property * @param propertyName The property name * @param values A collection of values * * @return A Criterion instance */ public Object in(String propertyName, Object[] values) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [in] with propertyName ["+propertyName+"] and values ["+values+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.in( propertyName, values ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Orders by the specified property name (defaults to ascending) * * @param propertyName The property name to order by * @return A Order instance */ public Object order(String propertyName) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("Call to [order] with propertyName ["+propertyName+"]not allowed here.")); propertyName = calculatePropertyName(propertyName); Order o = Order.asc(propertyName); this.criteria.addOrder(o); return o; } /** * Orders by the specified property name and direction * * @param propertyName The property name to order by * @param direction Either "asc" for ascending or "desc" for descending * * @return A Order instance */ public Object order(String propertyName, String direction) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("Call to [order] with propertyName ["+propertyName+"]not allowed here.")); propertyName = calculatePropertyName(propertyName); Order o; if(direction.equals( ORDER_DESCENDING )) { o = Order.desc(propertyName); } else { o = Order.asc(propertyName); } this.criteria.addOrder(o); return o; } /** * Creates a Criterion that contrains a collection property by size * * @param propertyName The property name * @param size The size to constrain by * * @return A Criterion instance */ public Object sizeEq(String propertyName, int size) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [sizeEq] with propertyName ["+propertyName+"] and size ["+size+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.sizeEq( propertyName, size ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } /** * Creates a "not equal" Criterion based on the specified property name and value * @param propertyName The property name * @param propertyValue The property value * @return The criterion object */ public Object ne(String propertyName, Object propertyValue) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [ne] with propertyName ["+propertyName+"] and value ["+propertyValue+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); propertyValue = calculatePropertyValue(propertyValue); Criterion c = Restrictions.ne( propertyName, propertyValue ); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } public Object notEqual(String propertyName, Object propertyValue) { return ne(propertyName, propertyValue); } /** * Creates a "between" Criterion based on the property name and specified lo and hi values * @param propertyName The property name * @param lo The low value * @param hi The high value * @return A Criterion instance */ public Object between(String propertyName, Object lo, Object hi) { if(!validateSimpleExpression()) { throwRuntimeException( new IllegalArgumentException("Call to [between] with propertyName ["+propertyName+"] not allowed here.")); } propertyName = calculatePropertyName(propertyName); Criterion c = Restrictions.between( propertyName, lo,hi); if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); }else { addToCriteria(c); } return c; } protected Object createNode(Object name) { return createNode( name, Collections.EMPTY_MAP ); } private boolean validateSimpleExpression() { if(this.criteria == null) return false; return !(!isInsideLogicalExpression() && !(this.parent instanceof Proxy) && this.parent != null && this.aliasStack.size() == 0); } private boolean isInsideLogicalExpression() { if(this.logicalExpressions.size() > 0) { String currentLogicalExpression = (String)this.logicalExpressions.get( this.logicalExpressions.size() - 1 ); if(currentLogicalExpression.equals( AND ) || currentLogicalExpression.equals( OR ) || currentLogicalExpression.equals( NOT )) return true; } return false; } protected Object createNode(Object name, Map attributes) { if(name.equals(ROOT_CALL) || name.equals(LIST_CALL) || name.equals(GET_CALL) || name.equals(COUNT_CALL) || name.equals(SCROLL_CALL)) { if(this.criteria != null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); if(name.equals(GET_CALL)) this.uniqueResult = true; if(name.equals(SCROLL_CALL)) { this.scroll = true; } else if(name.equals(COUNT_CALL)) { this.count = true; } if(TransactionSynchronizationManager.hasResource(sessionFactory)) { this.participate = true; this.session = ((SessionHolder)TransactionSynchronizationManager.getResource(sessionFactory)).getSession(); } else { this.session = sessionFactory.openSession(); } this.criteria = this.session.createCriteria(targetClass); this.criteriaProxy = new ExtendProxy(); this.criteriaProxy.setAdaptee(this.criteria); resultProxy = new ExtendProxy(); this.parent = resultProxy; return resultProxy; } else if(name.equals( AND ) || name.equals( OR ) || name.equals( NOT )) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); this.logicalExpressions.add(name); return name; } else if(name.equals( PROJECTIONS )) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); this.projectionList = Projections.projectionList(); return name; } else if(targetBean.isReadableProperty(name.toString())) { this.criteria.createAlias(name.toString(), name.toString()+ALIAS,CriteriaSpecification.LEFT_JOIN); this.aliasStack.add(name.toString()+ALIAS); return name; } closeSessionFollowingException(); throw new MissingMethodException((String) name, getClass(), new Object[] {}) ; } protected void nodeCompleted(Object parent, Object node) { if(node instanceof Proxy) { if(resultTransformer != null) { this.criteria.setResultTransformer(resultTransformer); } if(!uniqueResult) { if(scroll) { resultProxy.setAdaptee( this.criteria.scroll() ); } else if(count) { this.criteria.setProjection(Projections.rowCount()); resultProxy.setAdaptee( this.criteria.uniqueResult() ); } else { resultProxy.setAdaptee( this.criteria.list() ); } } else { resultProxy.setAdaptee( this.criteria.uniqueResult() ); } this.criteria = null; if(!this.participate) { this.session.close(); } } else if(node.equals( AND ) || node.equals( OR )) { Criterion c = null; if(logicalExpressionArgs.size() == 1 && node.equals(AND)) { c =(Criterion)logicalExpressionArgs.remove(0); } else if(logicalExpressionArgs.size() == 2) { Criterion lhs = (Criterion)logicalExpressionArgs.remove(0); Criterion rhs = (Criterion)logicalExpressionArgs.remove(0); if(node.equals(OR)) { c = Restrictions.or(lhs,rhs); } else { c = Restrictions.and(lhs,rhs); } } else if(logicalExpressionArgs.size() > 2) { if(node.equals(OR)) { c = Restrictions.disjunction(); } else { c = Restrictions.conjunction(); } for (Iterator i = logicalExpressionArgs.iterator(); i.hasNext();) { Criterion criterion = (Criterion) i.next(); ((Junction)c).add(criterion); } } if(c!=null) { if(parent instanceof Proxy) { addToCriteria( c ); } else if(parent.equals( AND ) || parent.equals( OR )) { this.logicalExpressionArgs.add(c ); this.logicalExpressions.remove(this.logicalExpressions.size() - 1); } } } else if(node.equals(NOT)) { if(this.logicalExpressionArgs.size() < 1) throwRuntimeException( new IllegalArgumentException("Logical expression [" + node +"] must contain at least 1 expression")); Criterion c = (Criterion)this.logicalExpressionArgs.remove(this.logicalExpressionArgs.size() - 1); if(parent instanceof Proxy) { addToCriteria( Restrictions.not( c ) ); } else if(parent.equals( AND ) || parent.equals( OR ) || parent.equals( NOT )) { this.logicalExpressionArgs.add( Restrictions.not( c ) ); this.logicalExpressions.remove(this.logicalExpressions.size() - 1); } } else if(node.equals(PROJECTIONS)) { if(this.projectionList != null && this.projectionList.getLength() > 0) { this.criteria.setProjection(this.projectionList); } } else if(targetBean.isReadableProperty(node.toString()) && aliasStack.size() > 0) { aliasStack.remove(aliasStack.size()-1); } super.nodeCompleted(parent, node); } /** * Throws a runtime exception where necessary to ensure the session gets closed */ private void throwRuntimeException(RuntimeException t) { closeSessionFollowingException(); throw t; } private void closeSessionFollowingException() { if(this.session != null && this.session.isOpen() && !this.participate) { this.session.close(); } if(this.criteria != null) { this.criteria = null; } } protected void setParent(Object parent, Object child) { this.parent = parent; } protected Object createNode(Object name, Object value) { return createNode(name, Collections.EMPTY_MAP, value); } protected Object createNode(Object name, Map attributes, Object value) { if(this.criteria == null) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] not supported here")); Criterion c = null; if(name.equals(ID_EQUALS)) { c = Restrictions.idEq(value); } else { if( name.equals( IS_NULL ) || name.equals( IS_NOT_NULL ) || name.equals( IS_EMPTY ) || name.equals( IS_NOT_EMPTY )) { if(!(value instanceof String)) throwRuntimeException( new IllegalArgumentException("call to [" + name + "] with value ["+value+"] requires a String value.")); if(name.equals( IS_NULL )) { c = Restrictions.isNull( (String)value ) ; } else if(name.equals( IS_NOT_NULL )) { c = Restrictions.isNotNull( (String)value ); } else if(name.equals( IS_EMPTY )) { c = Restrictions.isEmpty( (String)value ); } else if(name.equals( IS_NOT_EMPTY )) { c = Restrictions.isNotEmpty( (String)value ); } } } if(c != null) { if(isInsideLogicalExpression()) { this.logicalExpressionArgs.add(c); } else { addToCriteria( c ); } return c; } else { String nameString = name.toString(); if(value instanceof Closure) { if(targetBean.isReadableProperty(nameString)) { this.criteria.createAlias(nameString, nameString+ALIAS,CriteriaSpecification.LEFT_JOIN); this.aliasStack.add(nameString+ALIAS); return name; } } else if(parent instanceof Proxy) { try { criteriaProxy.setProperty(nameString, value); return criteria; } catch(MissingPropertyException mpe) { throwRuntimeException( new MissingMethodException(nameString, getClass(), new Object[] {value}) ); } } throwRuntimeException( new MissingMethodException(nameString, getClass(), new Object[] {value})); } return c; } private void addToCriteria(Criterion c) { this.criteria.add(c); } }
fix for GRAILS-320 to allow projection constraint git-svn-id: 29aad96320b2a07b98332cd568fc1316025c072f@1369 1cfb16fd-6d17-0410-8ff1-b7e8e1e2867d
src/commons/grails/orm/HibernateCriteriaBuilder.java
fix for GRAILS-320 to allow projection constraint
<ide><path>rc/commons/grails/orm/HibernateCriteriaBuilder.java <ide> package grails.orm; <ide> <ide> import grails.util.ExtendProxy; <add>import groovy.lang.Closure; <add>import groovy.lang.GString; <ide> import groovy.lang.MissingMethodException; <ide> import groovy.lang.MissingPropertyException; <del>import groovy.lang.Closure; <del>import groovy.lang.GString; <ide> import groovy.util.BuilderSupport; <ide> import groovy.util.Proxy; <ide> <del>import java.util.*; <add>import java.util.ArrayList; <add>import java.util.Collection; <add>import java.util.Collections; <add>import java.util.Iterator; <add>import java.util.List; <add>import java.util.Map; <ide> <ide> import org.hibernate.Criteria; <ide> import org.hibernate.FetchMode; <ide> import org.hibernate.Session; <ide> import org.hibernate.SessionFactory; <add>import org.hibernate.criterion.CriteriaSpecification; <add>import org.hibernate.criterion.Criterion; <add>import org.hibernate.criterion.Junction; <add>import org.hibernate.criterion.Order; <add>import org.hibernate.criterion.ProjectionList; <add>import org.hibernate.criterion.Projections; <add>import org.hibernate.criterion.Restrictions; <ide> import org.hibernate.transform.ResultTransformer; <del>import org.hibernate.transform.AliasToEntityMapResultTransformer; <del>import org.hibernate.criterion.*; <add>import org.springframework.beans.BeanUtils; <add>import org.springframework.beans.BeanWrapper; <add>import org.springframework.beans.BeanWrapperImpl; <ide> import org.springframework.orm.hibernate3.SessionHolder; <ide> import org.springframework.transaction.support.TransactionSynchronizationManager; <del>import org.springframework.beans.BeanWrapperImpl; <del>import org.springframework.beans.BeanUtils; <del>import org.springframework.beans.BeanWrapper; <ide> <ide> /** <ide> * <p>Wraps the Hibernate Criteria API in a builder. The builder can be retrieved through the "createCriteria()" dynamic static <ide> } <ide> <ide> /** <add> * A projection that selects a property name <add> * @param propertyName The name of the property <add> */ <add> public void property(String propertyName) { <add> if(this.projectionList == null) { <add> throwRuntimeException( new IllegalArgumentException("call to [property] must be within a [projections] node")); <add> } <add> else { <add> this.projectionList.add(Projections.property(propertyName)); <add> } <add> } <add> <add> /** <ide> * A projection that selects a distince property name <ide> * @param propertyName The property name <ide> */
Java
mit
3efb02d362f4871b239a47f981ad20a62713beca
0
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
package com.worth.ifs.competition.controller; import com.google.common.collect.Sets; import com.worth.ifs.BaseControllerIntegrationTest; import com.worth.ifs.category.repository.CategoryLinkRepository; import com.worth.ifs.commons.rest.RestResult; import com.worth.ifs.competition.domain.Competition; import com.worth.ifs.competition.domain.Milestone; import com.worth.ifs.competition.repository.CompetitionRepository; import com.worth.ifs.competition.repository.MilestoneRepository; import com.worth.ifs.competition.resource.CompetitionCountResource; import com.worth.ifs.competition.resource.CompetitionResource; import com.worth.ifs.competition.resource.CompetitionSetupSection; import com.worth.ifs.competition.resource.MilestoneResource; import com.worth.ifs.util.fixtures.CompetitionCoFundersFixture; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.Rollback; import org.springframework.transaction.annotation.Transactional; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Set; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; /** * Integration test for testing the rest servcies of the competition controller */ @Rollback @Transactional public class CompetitionControllerIntegrationTest extends BaseControllerIntegrationTest<CompetitionController> { public static final int EXISTING_CATEGORY_LINK_BEFORE_TEST = 2; @Autowired CategoryLinkRepository categoryLinkRepository; @Autowired CompetitionRepository competitionRepository; @Autowired MilestoneRepository milestoneRepository; public static final String COMPETITION_NAME_UPDATED = "Competition name updated"; public static final int INNOVATION_SECTOR_ID = 1; public static final String INNOVATION_SECTOR_NAME = "Health and life sciences"; public static final int INNOVATION_AREA_ID = 9; public static final int INNOVATION_AREA_ID_TWO = 10; public static final String INNOVATION_AREA_NAME = "User Experience"; public static final String EXISTING_COMPETITION_NAME = "Connected digital additive manufacturing"; final LocalDateTime now = LocalDateTime.now(); final LocalDateTime sixDaysAgo = now.minusDays(6); final LocalDateTime fiveDaysAgo = now.minusDays(5); final LocalDateTime fourDaysAgo = now.minusDays(4); final LocalDateTime threeDaysAgo = now.minusDays(3); final LocalDateTime twoDaysAgo = now.minusDays(2); final LocalDateTime oneDayAgo = now.minusDays(1); final LocalDateTime oneDayAhead = now.plusDays(1); final LocalDateTime twoDaysAhead = now.plusDays(2); final LocalDateTime threeDaysAhead = now.plusDays(3); final LocalDateTime fourDaysAhead = now.plusDays(4); final LocalDateTime fiveDaysAhead = now.plusDays(5); final LocalDateTime sixDaysAhead = now.plusDays(6); private static final Long COMPETITION_ID = 1L; @Override @Autowired protected void setControllerUnderTest(CompetitionController controller) { this.controller = controller; } @Before public void setLoggedInUserOnThread() { loginCompAdmin(); } @Rollback @Test public void testGetAllCompetitions() throws Exception { List<CompetitionResource> competitions = getAllCompetitions(2); checkExistingCompetition(competitions.get(0)); } @Rollback @Test public void testGetOneCompetitions() throws Exception { RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(COMPETITION_ID); assertTrue(competitionsResult.isSuccess()); CompetitionResource competition = competitionsResult.getSuccessObject(); checkExistingCompetition(competition); } @Rollback @Test public void testCreateCompetition() throws Exception { getAllCompetitions(2); createNewCompetition(); int expectedCompetitionCount = 3; List<CompetitionResource> competitions = getAllCompetitions(expectedCompetitionCount); checkExistingCompetition(competitions.get(0)); checkNewCompetition(competitions.get(1)); } @Rollback @Test public void testCompetitionCodeGeneration() throws Exception { // Setup test data getAllCompetitions(2); createNewCompetition(); createNewCompetition(); createNewCompetition(); List<CompetitionResource> competitions = getAllCompetitions(5); // Generate number 1 in this month year combination RestResult<String> generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(0).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-1", generatedCode.getSuccessObject()); flushAndClearSession(); // Generate number 2 in this month year combination generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(1).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-2", generatedCode.getSuccessObject()); flushAndClearSession(); // Generate number 3 in this month year combination generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(2).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-3", generatedCode.getSuccessObject()); // if generated twice the first code should not be updated. generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2020, 11, 11, 12, 00), competitions.get(2).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-3", generatedCode.getSuccessObject()); } private List<CompetitionResource> getAllCompetitions(int expectedCompetitionCount) { RestResult<List<CompetitionResource>> allCompetitionsResult = controller.findAll(); assertTrue(allCompetitionsResult.isSuccess()); List<CompetitionResource> competitions = allCompetitionsResult.getSuccessObject(); assertThat("Checking if the amount of competitions is what we expect.", competitions, hasSize(expectedCompetitionCount)); return competitions; } @Rollback @Test public void testUpdateCompetition() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); assertEquals(COMPETITION_NAME_UPDATED, savedCompetition.getName()); } @Rollback @Test public void testUpdateCompetitionCategories() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); checkUpdatedCompetitionCategories(savedCompetition); } @Rollback @Test public void testUpdateCompetitionCoFunders() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); //With one co-funder competition.setCoFunders(CompetitionCoFundersFixture.getNewTestCoFundersResouces(1, competition.getId())); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); assertEquals(1, savedCompetition.getCoFunders().size()); // Now re-insert with 2 co-funders competition.setCoFunders(CompetitionCoFundersFixture.getNewTestCoFundersResouces(2, competition.getId())); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); savedCompetition = saveResult.getSuccessObject(); // we should expect in total two co-funders. assertEquals(2, savedCompetition.getCoFunders().size()); } @Rollback @Test public void testCompetitionCategorySaving() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST, categoryLinkRepository.count()); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); // Check if the categorylink is only stored once. RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 2, categoryLinkRepository.count()); CompetitionResource savedCompetition = saveResult.getSuccessObject(); checkUpdatedCompetitionCategories(savedCompetition); // check that the link is not duplicated saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 2, categoryLinkRepository.count()); // check that the link is removed competition.setInnovationSector(null); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 1, categoryLinkRepository.count()); // check that the link is updated (or removed and added) competition.setInnovationArea(Long.valueOf(INNOVATION_AREA_ID_TWO)); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 1, categoryLinkRepository.count()); getAllCompetitions(3); } @Rollback @Test public void testCompetitionCompletedSections() throws Exception { Long competitionId = 7L; RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(0, competitionsResult.getSuccessObject().getSectionSetupStatus().size()); } @Rollback @Test public void testCompetitionCompleteSection() throws Exception { Long competitionId = 7L; controller.markSectionComplete(competitionId, CompetitionSetupSection.INITIAL_DETAILS); RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(Boolean.TRUE, competitionsResult.getSuccessObject().getSectionSetupStatus().get(CompetitionSetupSection.INITIAL_DETAILS)); } @Rollback @Test public void testCompetitionInCompleteSection() throws Exception { Long competitionId = 7L; controller.markSectionInComplete(competitionId, CompetitionSetupSection.INITIAL_DETAILS); RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(Boolean.FALSE, competitionsResult.getSuccessObject().getSectionSetupStatus().get(CompetitionSetupSection.INITIAL_DETAILS)); } @Rollback @Test public void testFindMethods() throws Exception { List<CompetitionResource> existingComps = getAllCompetitions(2); CompetitionResource notStartedCompetition = createWithDates(oneDayAhead, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead, sixDaysAhead); assertThat(notStartedCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.NOT_STARTED)); CompetitionResource openCompetition = createWithDates(oneDayAgo, oneDayAhead, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(openCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.OPEN)); CompetitionResource closedCompetition = createWithDates(twoDaysAgo, oneDayAgo, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(closedCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.CLOSED)); CompetitionResource inAssessmentCompetition = createWithDates(threeDaysAgo, twoDaysAgo, oneDayAgo, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(inAssessmentCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.IN_ASSESSMENT)); CompetitionResource inPanelCompetition = createWithDates(fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo, fourDaysAhead, fiveDaysAhead); assertThat(inPanelCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.FUNDERS_PANEL)); CompetitionResource assessorFeedbackCompetition = createWithDates(fiveDaysAgo, fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo, fiveDaysAhead); assertThat(assessorFeedbackCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.ASSESSOR_FEEDBACK)); CompetitionResource projectSetup = createWithDates(sixDaysAgo, fiveDaysAgo, fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo); assertThat(projectSetup.getCompetitionStatus(), equalTo(CompetitionResource.Status.PROJECT_SETUP)); CompetitionCountResource counts = controller.count().getSuccessObjectOrThrowException();; List<CompetitionResource> liveCompetitions = controller.live().getSuccessObjectOrThrowException(); Set<Long> liveCompetitionIds = Sets.newHashSet(openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); Set<Long> notLiveCompetitionIds = Sets.newHashSet(notStartedCompetition.getId(), projectSetup.getId()); //Live competitions plus one the test data. assertThat(liveCompetitions.size(), equalTo(liveCompetitionIds.size() + 1)); assertThat(counts.getLiveCount(), equalTo((long) (liveCompetitionIds.size() + 1))); liveCompetitions.stream().forEach(competitionResource -> { //Existing competitions in the db should be ignored. if (!existingComps.get(0).getId().equals(competitionResource.getId()) && !existingComps.get(1).getId().equals(competitionResource.getId())) { assertTrue(liveCompetitionIds.contains(competitionResource.getId())); assertFalse(notLiveCompetitionIds.contains(competitionResource.getId())); } }); List<CompetitionResource> projectSetupCompetitions = controller.projectSetup().getSuccessObjectOrThrowException(); Set<Long> projectSetupCompetitionIds = Sets.newHashSet(projectSetup.getId()); Set<Long> notProjectSetupCompetitionIds = Sets.newHashSet(notStartedCompetition.getId(), openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); assertThat(projectSetupCompetitions.size(), equalTo(projectSetupCompetitionIds.size())); assertThat(counts.getProjectSetupCount(), equalTo((long) projectSetupCompetitionIds.size())); projectSetupCompetitions.stream().forEach(competitionResource -> { assertTrue(projectSetupCompetitionIds.contains(competitionResource.getId())); assertFalse(notProjectSetupCompetitionIds.contains(competitionResource.getId())); }); List<CompetitionResource> upcomingCompetitions = controller.upcoming().getSuccessObjectOrThrowException(); //One existing comp is upcoming and the new one. assertThat(upcomingCompetitions.size(), equalTo(2)); assertThat(counts.getUpcomingCount(), equalTo(2L)); Set<Long> upcomingCompetitionIds = Sets.newHashSet(notStartedCompetition.getId()); Set<Long> notUpcomingCompetitionIds = Sets.newHashSet(projectSetup.getId(), openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); upcomingCompetitions.stream().forEach(competitionResource -> { //Existing competitions in the db should be ignored. if (!existingComps.get(0).getId().equals(competitionResource.getId()) && !existingComps.get(1).getId().equals(competitionResource.getId())) { assertTrue(upcomingCompetitionIds.contains(competitionResource.getId())); assertFalse(notUpcomingCompetitionIds.contains(competitionResource.getId())); } }); } private CompetitionResource createWithDates(LocalDateTime startDate, LocalDateTime endDate, LocalDateTime assessmentStartDate, LocalDateTime assessmentEndDate, LocalDateTime fundersPanelEndDate, LocalDateTime assessorFeedbackDate) { CompetitionResource comp = controller.create().getSuccessObjectOrThrowException(); List<Milestone> milestone = createNewMilestones(comp, startDate, endDate, assessmentStartDate, assessmentEndDate, fundersPanelEndDate, assessorFeedbackDate); List<Long> milestonesIds = new ArrayList<>(); milestone.forEach(m -> { milestonesIds.add(m.getId()); milestoneRepository.save(m); }); controller.saveCompetition(comp, comp.getId()).getSuccessObjectOrThrowException(); //TODO replace with controller endpoint for competition setup finished Competition compEntity = competitionRepository.findById(comp.getId()); compEntity.setStatus(CompetitionResource.Status.COMPETITION_SETUP_FINISHED); competitionRepository.save(compEntity); flushAndClearSession(); return controller.getCompetitionById(comp.getId()).getSuccessObjectOrThrowException(); } private List<Milestone> createNewMilestones(CompetitionResource comp, LocalDateTime startDate, LocalDateTime endDate, LocalDateTime assessmentStartDate, LocalDateTime assessmentEndDate, LocalDateTime fundersPanelEndDate, LocalDateTime assessorFeedbackDate) { LocalDateTime milestoneDate = LocalDateTime.now(); List<MilestoneResource.MilestoneName> milestoneNames = populateMilestoneNames(); List<Milestone> milestones = new ArrayList<>(); Milestone milestone; for(MilestoneResource.MilestoneName milestoneName : milestoneNames) { milestone = new Milestone(); milestone.setName(milestoneName); milestone.setCompetition(assignCompetitionId(comp)); milestone.setDate(milestoneDate); if (milestone.getName().toString().equals("OPEN_DATE")){ milestone.setDate(startDate); } if (milestone.getName().toString().equals("SUBMISSION_DATE")) { milestone.setDate(endDate); } if (milestone.getName().toString().equals("ASSESSOR_ACCEPTS")) { milestone.setDate(assessmentStartDate); } if (milestone.getName().toString().equals("ASSESSOR_DEADLINE")) { milestone.setDate(assessorFeedbackDate); } if (milestone.getName().toString().equals("FUNDERS_PANEL")) { milestone.setDate(assessmentEndDate); } if (milestone.getName().toString().equals("NOTIFICATIONS")){ milestone.setDate(fundersPanelEndDate); } milestones.add(milestone); } return milestones; } private Competition assignCompetitionId(CompetitionResource competition) { Competition newComp = new Competition(); newComp.setId(competition.getId()); return newComp; } private CompetitionResource createNewCompetition() { RestResult<CompetitionResource> competitionsResult = controller.create(); assertTrue(competitionsResult.isSuccess()); CompetitionResource competition = competitionsResult.getSuccessObject(); assertThat(competition.getName(), isEmptyOrNullString()); return competition; } private List<MilestoneResource.MilestoneName> populateMilestoneNames() { return new ArrayList<MilestoneResource.MilestoneName>(EnumSet.allOf(MilestoneResource.MilestoneName.class)); } private void checkUpdatedCompetitionCategories(CompetitionResource savedCompetition) { assertEquals(COMPETITION_NAME_UPDATED, savedCompetition.getName()); assertEquals(INNOVATION_SECTOR_ID, (long) savedCompetition.getInnovationSector()); assertEquals(INNOVATION_SECTOR_NAME, savedCompetition.getInnovationSectorName()); assertEquals(INNOVATION_AREA_ID, (long) savedCompetition.getInnovationArea()); assertEquals(INNOVATION_AREA_NAME, savedCompetition.getInnovationAreaName()); } private void checkExistingCompetition(CompetitionResource competition) { assertThat(competition, notNullValue()); assertThat(competition.getName(), is(EXISTING_COMPETITION_NAME)); assertThat(competition.getCompetitionStatus(), is(CompetitionResource.Status.OPEN)); } private void checkNewCompetition(CompetitionResource competition) { assertThat(competition, notNullValue()); assertThat(competition.getName(), isEmptyOrNullString()); assertThat(competition.getCompetitionStatus(), is(CompetitionResource.Status.COMPETITION_SETUP)); } }
ifs-data-service/src/test/java/com/worth/ifs/competition/controller/CompetitionControllerIntegrationTest.java
package com.worth.ifs.competition.controller; import com.google.common.collect.Sets; import com.worth.ifs.BaseControllerIntegrationTest; import com.worth.ifs.category.repository.CategoryLinkRepository; import com.worth.ifs.commons.rest.RestResult; import com.worth.ifs.competition.domain.Competition; import com.worth.ifs.competition.domain.Milestone; import com.worth.ifs.competition.repository.CompetitionRepository; import com.worth.ifs.competition.repository.MilestoneRepository; import com.worth.ifs.competition.resource.CompetitionCountResource; import com.worth.ifs.competition.resource.CompetitionResource; import com.worth.ifs.competition.resource.CompetitionSetupSection; import com.worth.ifs.competition.resource.MilestoneResource; import com.worth.ifs.util.fixtures.CompetitionCoFundersFixture; import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.Rollback; import org.springframework.transaction.annotation.Transactional; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Set; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; /** * Integration test for testing the rest servcies of the competition controller */ @Rollback @Transactional public class CompetitionControllerIntegrationTest extends BaseControllerIntegrationTest<CompetitionController> { public static final int EXISTING_CATEGORY_LINK_BEFORE_TEST = 2; @Autowired CategoryLinkRepository categoryLinkRepository; @Autowired CompetitionRepository competitionRepository; @Autowired MilestoneRepository milestoneRepository; public static final String COMPETITION_NAME_UPDATED = "Competition name updated"; public static final int INNOVATION_SECTOR_ID = 1; public static final String INNOVATION_SECTOR_NAME = "Health and life sciences"; public static final int INNOVATION_AREA_ID = 9; public static final int INNOVATION_AREA_ID_TWO = 10; public static final String INNOVATION_AREA_NAME = "User Experience"; public static final String EXISTING_COMPETITION_NAME = "Connected digital additive manufacturing"; final LocalDateTime now = LocalDateTime.now(); final LocalDateTime sixDaysAgo = now.minusDays(6); final LocalDateTime fiveDaysAgo = now.minusDays(5); final LocalDateTime fourDaysAgo = now.minusDays(4); final LocalDateTime threeDaysAgo = now.minusDays(3); final LocalDateTime twoDaysAgo = now.minusDays(2); final LocalDateTime oneDayAgo = now.minusDays(1); final LocalDateTime oneDayAhead = now.plusDays(1); final LocalDateTime twoDaysAhead = now.plusDays(2); final LocalDateTime threeDaysAhead = now.plusDays(3); final LocalDateTime fourDaysAhead = now.plusDays(4); final LocalDateTime fiveDaysAhead = now.plusDays(5); final LocalDateTime sixDaysAhead = now.plusDays(6); private static final Long COMPETITION_ID = 1L; @Override @Autowired protected void setControllerUnderTest(CompetitionController controller) { this.controller = controller; } @Before public void setLoggedInUserOnThread() { loginCompAdmin(); } @Rollback @Test public void testGetAllCompetitions() throws Exception { List<CompetitionResource> competitions = getAllCompetitions(2); checkExistingCompetition(competitions.get(0)); } @Rollback @Test public void testGetOneCompetitions() throws Exception { RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(COMPETITION_ID); assertTrue(competitionsResult.isSuccess()); CompetitionResource competition = competitionsResult.getSuccessObject(); checkExistingCompetition(competition); } @Rollback @Test public void testCreateCompetition() throws Exception { getAllCompetitions(2); createNewCompetition(); int expectedCompetitionCount = 3; List<CompetitionResource> competitions = getAllCompetitions(expectedCompetitionCount); checkExistingCompetition(competitions.get(0)); checkNewCompetition(competitions.get(1)); } @Rollback @Test public void testCompetitionCodeGeneration() throws Exception { // Setup test data getAllCompetitions(2); createNewCompetition(); createNewCompetition(); createNewCompetition(); List<CompetitionResource> competitions = getAllCompetitions(5); // Generate number 1 in this month year combination RestResult<String> generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(0).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-1", generatedCode.getSuccessObject()); flushAndClearSession(); // Generate number 2 in this month year combination generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(1).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-2", generatedCode.getSuccessObject()); flushAndClearSession(); // Generate number 3 in this month year combination generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2016, 6, 5, 12, 00), competitions.get(2).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-3", generatedCode.getSuccessObject()); // if generated twice the first code should not be updated. generatedCode = controller.generateCompetitionCode(LocalDateTime.of(2020, 11, 11, 12, 00), competitions.get(2).getId()); assertTrue(generatedCode.isSuccess()); assertEquals("1606-3", generatedCode.getSuccessObject()); } private List<CompetitionResource> getAllCompetitions(int expectedCompetitionCount) { RestResult<List<CompetitionResource>> allCompetitionsResult = controller.findAll(); assertTrue(allCompetitionsResult.isSuccess()); List<CompetitionResource> competitions = allCompetitionsResult.getSuccessObject(); assertThat("Checking if the amount of competitions is what we expect.", competitions, hasSize(expectedCompetitionCount)); return competitions; } @Rollback @Test public void testUpdateCompetition() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); assertEquals(COMPETITION_NAME_UPDATED, savedCompetition.getName()); } @Rollback @Test public void testUpdateCompetitionCategories() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); checkUpdatedCompetitionCategories(savedCompetition); } @Rollback @Test public void testUpdateCompetitionCoFunders() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); //With one co-funder competition.setCoFunders(CompetitionCoFundersFixture.getNewTestCoFundersResouces(1, competition.getId())); RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); getAllCompetitions(3); CompetitionResource savedCompetition = saveResult.getSuccessObject(); assertEquals(1, savedCompetition.getCoFunders().size()); // Now re-insert with 2 co-funders competition.setCoFunders(CompetitionCoFundersFixture.getNewTestCoFundersResouces(2, competition.getId())); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); savedCompetition = saveResult.getSuccessObject(); // we should expect in total two co-funders. assertEquals(2, savedCompetition.getCoFunders().size()); } @Rollback @Test public void testCompetitionCategorySaving() throws Exception { getAllCompetitions(2); // Create new competition CompetitionResource competition = createNewCompetition(); getAllCompetitions(3); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST, categoryLinkRepository.count()); // Update competition competition.setName(COMPETITION_NAME_UPDATED); Long sectorId = Long.valueOf(INNOVATION_SECTOR_ID); Long areaId = Long.valueOf(INNOVATION_AREA_ID); competition.setInnovationSector(sectorId); competition.setInnovationArea(areaId); // Check if the categorylink is only stored once. RestResult<CompetitionResource> saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 2, categoryLinkRepository.count()); CompetitionResource savedCompetition = saveResult.getSuccessObject(); checkUpdatedCompetitionCategories(savedCompetition); // check that the link is not duplicated saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 2, categoryLinkRepository.count()); // check that the link is removed competition.setInnovationSector(null); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 1, categoryLinkRepository.count()); // check that the link is updated (or removed and added) competition.setInnovationArea(Long.valueOf(INNOVATION_AREA_ID_TWO)); saveResult = controller.saveCompetition(competition, competition.getId()); assertTrue("Assert save is success", saveResult.isSuccess()); assertEquals(EXISTING_CATEGORY_LINK_BEFORE_TEST + 1, categoryLinkRepository.count()); getAllCompetitions(3); } @Rollback @Test public void testCompetitionCompletedSections() throws Exception { Long competitionId = 7L; RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(0, competitionsResult.getSuccessObject().getSectionSetupStatus().size()); } @Rollback @Test public void testCompetitionCompleteSection() throws Exception { Long competitionId = 7L; controller.markSectionComplete(competitionId, CompetitionSetupSection.INITIAL_DETAILS); RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(Boolean.TRUE, competitionsResult.getSuccessObject().getSectionSetupStatus().get(CompetitionSetupSection.INITIAL_DETAILS)); } @Rollback @Test public void testCompetitionInCompleteSection() throws Exception { Long competitionId = 7L; controller.markSectionInComplete(competitionId, CompetitionSetupSection.INITIAL_DETAILS); RestResult<CompetitionResource> competitionsResult = controller.getCompetitionById(competitionId); assertEquals(Boolean.FALSE, competitionsResult.getSuccessObject().getSectionSetupStatus().get(CompetitionSetupSection.INITIAL_DETAILS)); } @Rollback @Test public void testFindMethods() throws Exception { List<CompetitionResource> existingComps = getAllCompetitions(2); CompetitionResource notStartedCompetition = createWithDates(oneDayAhead, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead, sixDaysAhead); assertThat(notStartedCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.NOT_STARTED)); CompetitionResource openCompetition = createWithDates(oneDayAgo, oneDayAhead, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(openCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.OPEN)); CompetitionResource closedCompetition = createWithDates(twoDaysAgo, oneDayAgo, twoDaysAhead, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(closedCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.CLOSED)); CompetitionResource inAssessmentCompetition = createWithDates(threeDaysAgo, twoDaysAgo, oneDayAgo, threeDaysAhead, fourDaysAhead, fiveDaysAhead); assertThat(inAssessmentCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.IN_ASSESSMENT)); CompetitionResource inPanelCompetition = createWithDates(fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo, fourDaysAhead, fiveDaysAhead); assertThat(inPanelCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.FUNDERS_PANEL)); CompetitionResource assessorFeedbackCompetition = createWithDates(fiveDaysAgo, fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo, fiveDaysAhead); assertThat(assessorFeedbackCompetition.getCompetitionStatus(), equalTo(CompetitionResource.Status.ASSESSOR_FEEDBACK)); CompetitionResource projectSetup = createWithDates(sixDaysAgo, fiveDaysAgo, fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo); assertThat(projectSetup.getCompetitionStatus(), equalTo(CompetitionResource.Status.PROJECT_SETUP)); CompetitionCountResource counts = null; try { counts = controller.count().getSuccessObjectOrThrowException(); } catch(Exception ex){ ex.printStackTrace(); } // CompetitionCountResource counts = controller.count().getSuccessObjectOrThrowException(); List<CompetitionResource> liveCompetitions = controller.live().getSuccessObjectOrThrowException(); Set<Long> liveCompetitionIds = Sets.newHashSet(openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); Set<Long> notLiveCompetitionIds = Sets.newHashSet(notStartedCompetition.getId(), projectSetup.getId()); //Live competitions plus one the test data. assertThat(liveCompetitions.size(), equalTo(liveCompetitionIds.size() + 1)); assertThat(counts.getLiveCount(), equalTo((long) (liveCompetitionIds.size() + 1))); liveCompetitions.stream().forEach(competitionResource -> { //Existing competitions in the db should be ignored. if (!existingComps.get(0).getId().equals(competitionResource.getId()) && !existingComps.get(1).getId().equals(competitionResource.getId())) { assertTrue(liveCompetitionIds.contains(competitionResource.getId())); assertFalse(notLiveCompetitionIds.contains(competitionResource.getId())); } }); List<CompetitionResource> projectSetupCompetitions = controller.projectSetup().getSuccessObjectOrThrowException(); Set<Long> projectSetupCompetitionIds = Sets.newHashSet(projectSetup.getId()); Set<Long> notProjectSetupCompetitionIds = Sets.newHashSet(notStartedCompetition.getId(), openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); assertThat(projectSetupCompetitions.size(), equalTo(projectSetupCompetitionIds.size())); assertThat(counts.getProjectSetupCount(), equalTo((long) projectSetupCompetitionIds.size())); projectSetupCompetitions.stream().forEach(competitionResource -> { assertTrue(projectSetupCompetitionIds.contains(competitionResource.getId())); assertFalse(notProjectSetupCompetitionIds.contains(competitionResource.getId())); }); List<CompetitionResource> upcomingCompetitions = controller.upcoming().getSuccessObjectOrThrowException(); //One existing comp is upcoming and the new one. assertThat(upcomingCompetitions.size(), equalTo(2)); assertThat(counts.getUpcomingCount(), equalTo(2L)); Set<Long> upcomingCompetitionIds = Sets.newHashSet(notStartedCompetition.getId()); Set<Long> notUpcomingCompetitionIds = Sets.newHashSet(projectSetup.getId(), openCompetition.getId(), closedCompetition.getId(), inAssessmentCompetition.getId(), inPanelCompetition.getId(), assessorFeedbackCompetition.getId()); upcomingCompetitions.stream().forEach(competitionResource -> { //Existing competitions in the db should be ignored. if (!existingComps.get(0).getId().equals(competitionResource.getId()) && !existingComps.get(1).getId().equals(competitionResource.getId())) { assertTrue(upcomingCompetitionIds.contains(competitionResource.getId())); assertFalse(notUpcomingCompetitionIds.contains(competitionResource.getId())); } }); } private CompetitionResource createWithDates(LocalDateTime startDate, LocalDateTime endDate, LocalDateTime assessmentStartDate, LocalDateTime assessmentEndDate, LocalDateTime fundersPanelEndDate, LocalDateTime assessorFeedbackDate) { CompetitionResource comp = controller.create().getSuccessObjectOrThrowException(); List<Milestone> milestone = createNewMilestones(comp, startDate, endDate, assessmentStartDate, assessmentEndDate, fundersPanelEndDate, assessorFeedbackDate); List<Long> milestonesIds = new ArrayList<>(); milestone.forEach(m -> { milestonesIds.add(m.getId()); milestoneRepository.save(m); }); controller.saveCompetition(comp, comp.getId()).getSuccessObjectOrThrowException(); //TODO replace with controller endpoint for competition setup finished Competition compEntity = competitionRepository.findById(comp.getId()); compEntity.setStatus(CompetitionResource.Status.COMPETITION_SETUP_FINISHED); competitionRepository.save(compEntity); flushAndClearSession(); return controller.getCompetitionById(comp.getId()).getSuccessObjectOrThrowException(); } private List<Milestone> createNewMilestones(CompetitionResource comp, LocalDateTime startDate, LocalDateTime endDate, LocalDateTime assessmentStartDate, LocalDateTime assessmentEndDate, LocalDateTime fundersPanelEndDate, LocalDateTime assessorFeedbackDate) { LocalDateTime milestoneDate = LocalDateTime.now(); List<MilestoneResource.MilestoneName> milestoneNames = populateMilestoneNames(); List<Milestone> milestones = new ArrayList<>(); Milestone milestone; for(MilestoneResource.MilestoneName milestoneName : milestoneNames) { milestone = new Milestone(); milestone.setName(milestoneName); milestone.setCompetition(assignCompetitionId(comp)); milestone.setDate(milestoneDate); if (milestone.getName().toString().equals("OPEN_DATE")){ milestone.setDate(startDate); } if (milestone.getName().toString().equals("SUBMISSION_DATE")) { milestone.setDate(endDate); } if (milestone.getName().toString().equals("ASSESSOR_ACCEPTS")) { milestone.setDate(assessmentStartDate); } if (milestone.getName().toString().equals("ASSESSOR_DEADLINE")) { milestone.setDate(assessorFeedbackDate); } if (milestone.getName().toString().equals("FUNDERS_PANEL")) { milestone.setDate(assessmentEndDate); } if (milestone.getName().toString().equals("NOTIFICATIONS")){ milestone.setDate(fundersPanelEndDate); } milestones.add(milestone); } return milestones; } private Competition assignCompetitionId(CompetitionResource competition) { Competition newComp = new Competition(); newComp.setId(competition.getId()); return newComp; } private CompetitionResource createNewCompetition() { RestResult<CompetitionResource> competitionsResult = controller.create(); assertTrue(competitionsResult.isSuccess()); CompetitionResource competition = competitionsResult.getSuccessObject(); assertThat(competition.getName(), isEmptyOrNullString()); return competition; } private List<MilestoneResource.MilestoneName> populateMilestoneNames() { return new ArrayList<MilestoneResource.MilestoneName>(EnumSet.allOf(MilestoneResource.MilestoneName.class)); } private void checkUpdatedCompetitionCategories(CompetitionResource savedCompetition) { assertEquals(COMPETITION_NAME_UPDATED, savedCompetition.getName()); assertEquals(INNOVATION_SECTOR_ID, (long) savedCompetition.getInnovationSector()); assertEquals(INNOVATION_SECTOR_NAME, savedCompetition.getInnovationSectorName()); assertEquals(INNOVATION_AREA_ID, (long) savedCompetition.getInnovationArea()); assertEquals(INNOVATION_AREA_NAME, savedCompetition.getInnovationAreaName()); } private void checkExistingCompetition(CompetitionResource competition) { assertThat(competition, notNullValue()); assertThat(competition.getName(), is(EXISTING_COMPETITION_NAME)); assertThat(competition.getCompetitionStatus(), is(CompetitionResource.Status.OPEN)); } private void checkNewCompetition(CompetitionResource competition) { assertThat(competition, notNullValue()); assertThat(competition.getName(), isEmptyOrNullString()); assertThat(competition.getCompetitionStatus(), is(CompetitionResource.Status.COMPETITION_SETUP)); } }
INFUND-2993-create-a-competition-step-4-milestones Small refactoring
ifs-data-service/src/test/java/com/worth/ifs/competition/controller/CompetitionControllerIntegrationTest.java
INFUND-2993-create-a-competition-step-4-milestones Small refactoring
<ide><path>fs-data-service/src/test/java/com/worth/ifs/competition/controller/CompetitionControllerIntegrationTest.java <ide> CompetitionResource projectSetup = createWithDates(sixDaysAgo, fiveDaysAgo, fourDaysAgo, threeDaysAgo, twoDaysAgo, oneDayAgo); <ide> assertThat(projectSetup.getCompetitionStatus(), equalTo(CompetitionResource.Status.PROJECT_SETUP)); <ide> <del> CompetitionCountResource counts = null; <del> try { <del> <del> counts = controller.count().getSuccessObjectOrThrowException(); <del> } <del> catch(Exception ex){ <del> ex.printStackTrace(); <del> } <del>// CompetitionCountResource counts = controller.count().getSuccessObjectOrThrowException(); <add> CompetitionCountResource counts = controller.count().getSuccessObjectOrThrowException();; <ide> <ide> List<CompetitionResource> liveCompetitions = controller.live().getSuccessObjectOrThrowException(); <ide>
Java
agpl-3.0
35e55b80703bf5ebf6cbc0bcffacd5f2f2891100
0
joval/jWSMV,joval/jWSMV,joval/jWSMV
// Copyright (C) 2012 jOVAL.org. All rights reserved. // This software is licensed under the AGPL 3.0 license available at http://www.joval.org/agpl_v3.txt package jwsmv.cim; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.dmtf.wsman.AttributableDuration; import org.dmtf.wsman.AttributableURI; import org.dmtf.wsman.OptionSet; import org.dmtf.wsman.OptionType; import jwsmv.Constants; import jwsmv.wsman.FaultException; import jwsmv.wsman.Port; /** * A WSMV-based implementation of the WMI StdRegProv class. * * @author David A. Solin * @version %I% %G% */ public class StdRegProv implements Constants { public static final long HKEY_CLASSES_ROOT = 0x80000000L; public static final long HKEY_CURRENT_USER = 0x80000001L; public static final long HKEY_LOCAL_MACHINE = 0x80000002L; public static final long HKEY_USERS = 0x80000003L; public static final long HKEY_CURRENT_CONFIG= 0x80000005L; public static final long HKEY_DYN_DATA = 0x80000006L; public static final int REG_NONE = 0; public static final int REG_SZ = 1; public static final int REG_EXPAND_SZ = 2; public static final int REG_BINARY = 3; public static final int REG_DWORD = 4; public static final int REG_MULTI_SZ = 7; public static final int REG_QWORD = 11; static final String CLASS_URI = "http://schemas.microsoft.com/wbem/wsman/1/wmi/root/cimv2/StdRegProv"; static final DocumentBuilder BUILDER; static { try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); BUILDER = dbf.newDocumentBuilder(); } catch (Exception e) { throw new RuntimeException(e); } } private Port port; private Document doc; private String arch = null; /** * Create a new Registry, using the default provider architecture. */ public StdRegProv(Port port) { this.port = port; doc = BUILDER.newDocument(); } /** * Create a new Registry using the specified provider architecture. * * @param view Use 32 or 64. // // DAS: One cannot select the provider architecture via MS-WSMV, on account of Microsoft internal defect // ID #SR112120710065406, so for now, I have commented out this constructor. // public StdRegProv(Port port, int view) throws IllegalArgumentException { this(port); switch(view) { case 32: arch = "32"; break; case 64: arch = "64"; break; default: throw new IllegalArgumentException(Integer.toString(view)); } } */ /** * List all the subkeys under a registry key. * * @param hive one of the HKEY_* constants * @param subkey the path of the subkey to enumerate */ public String[] enumKey(long hive, String subkey) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element params = doc.createElementNS(CLASS_URI, "EnumKey_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); Object result = port.dispatch(CLASS_URI + "/EnumKey", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("EnumKey_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); int len = nodes.getLength(); ArrayList<String> subkeys = new ArrayList<String>(len); for (int i=0; i < len; i++) { subkeys.add(nodes.item(i).getTextContent()); } return subkeys.toArray(new String[len]); case 2: throw new NoSuchElementException(subkey); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * List all the values (and their types) stored under a registry key. * * @param hive one of the HKEY_* constants * @param subkey the path of the subkey whose values will be enumerated */ public Value[] enumValues(long hive, String subkey) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element params = doc.createElementNS(CLASS_URI, "EnumValues_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); Object result = port.dispatch(CLASS_URI + "/EnumValues", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("EnumValues_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); int len = nodes.getLength(); ArrayList<Value> values = new ArrayList<Value>(len); for (int i=0; i < len; i++) { Value value = new Value(); value.name = nodes.item(i).getTextContent(); values.add(value); } nodes = elt.getElementsByTagNameNS(CLASS_URI, "Types"); for (int i=0; i < len; i++) { values.get(i).type = Integer.parseInt(nodes.item(i).getTextContent()); } return values.toArray(new Value[len]); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_BINARY value from the registry. */ public byte[] getBinaryValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetBinaryValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetBinaryValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetBinaryValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); byte[] data = new byte[len]; for (int i=0; i < len; i++) { data[i] = (byte)(0xFF & Short.parseShort(nodes.item(i).getTextContent())); } return data; case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_DWORD value from the registry. */ public int getDwordValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetDWORDValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetDWORDValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetDWORDValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); if (len == 1) { return Integer.parseInt(nodes.item(0).getTextContent()); } else { throw new Exception("Unexpected return value quantity: " + len); } case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_EXPAND_SZ value from the registry. The returned value will be expanded. */ public String getExpandedStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetExpandedStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetExpandedStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetExpandedStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 1) { return nodes.item(0).getTextContent(); } else { throw new Exception("Unexpected return value quantity: " + len); } case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_MULTI_SZ value from the registry. */ public String[] getMultiStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetMultiStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetMultiStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetMultiStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 0) { return null; } else { String[] data = new String[len]; for (int i=0; i < len; i++) { data[i] = nodes.item(i).getTextContent(); } return data; } case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_SZ value from the registry. */ public String getStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 1) { return nodes.item(0).getTextContent(); } else { throw new Exception("Unexpected return value quantity: " + len); } case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_QWORD value from the registry. */ public long getQwordValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetQWORDValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetQWORDValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetQWORDValue_OUTPUT".equals(elt.getLocalName())) { int hResult = (int)getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); if (len == 1) { return Long.parseLong(nodes.item(0).getTextContent()); } else { throw new Exception("Unexpected return value quantity: " + len); } case 2: throw new NoSuchElementException(value); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Container for information about a registry value. */ public class Value { private int type; private String name; private Value() {} public String getName() { return name; } /** * The REG_* constant corresponding to the type of this value. */ public int getType() { return type; } @Override public String toString() { String sType = null; switch(type) { case StdRegProv.REG_NONE: sType = "REG_NONE "; break; case StdRegProv.REG_DWORD: sType = "REG_DWORD "; break; case StdRegProv.REG_BINARY: sType = "REG_BINARY "; break; case StdRegProv.REG_SZ: sType = "REG_SZ "; break; case StdRegProv.REG_EXPAND_SZ: sType = "REG_EXPAND_SZ "; break; case StdRegProv.REG_MULTI_SZ: sType = "REG_MULTI_SZ "; break; case StdRegProv.REG_QWORD: sType = "REG_QWORD "; break; } return new StringBuffer(sType).append(name).toString(); } } // Private /** * Get the call result code from the element. */ private long getHResult(Element elt) throws IllegalArgumentException { NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "ReturnValue"); int len = nodes.getLength(); if (len == 1) { return Long.parseLong(nodes.item(0).getTextContent()); } else { throw new IllegalArgumentException("Unexpected return value quantity: " + len); } } /** * Get dispatch headers for invoking methods of the StdRegProv WMI class. */ private List<Object> getDispatchHeaders() { List<Object> headers = new ArrayList<Object>(); AttributableURI uri = Factories.WSMAN.createAttributableURI(); uri.setValue(CLASS_URI); uri.getOtherAttributes().put(MUST_UNDERSTAND, "true"); headers.add(Factories.WSMAN.createResourceURI(uri)); // // Set the appropriate provider architecture using an OptionSet, if one was specified. // if (arch != null) { OptionSet options = Factories.WSMAN.createOptionSet(); headers.add(options); OptionType architecture = Factories.WSMAN.createOptionType(); architecture.setName("wmi:__ProviderArchitecture"); architecture.setType(new QName(XMLNS, "int")); architecture.setValue(arch); options.getOption().add(architecture); } AttributableDuration duration = Factories.WSMAN.createAttributableDuration(); duration.setValue(Factories.XMLDT.newDuration(60000)); headers.add(Factories.WSMAN.createOperationTimeout(duration)); return headers; } }
src/jwsmv/cim/StdRegProv.java
// Copyright (C) 2012 jOVAL.org. All rights reserved. // This software is licensed under the AGPL 3.0 license available at http://www.joval.org/agpl_v3.txt package jwsmv.cim; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.dmtf.wsman.AttributableDuration; import org.dmtf.wsman.AttributableURI; import org.dmtf.wsman.OptionSet; import org.dmtf.wsman.OptionType; import jwsmv.Constants; import jwsmv.wsman.FaultException; import jwsmv.wsman.Port; /** * A WSMV-based implementation of the WMI StdRegProv class. * * @author David A. Solin * @version %I% %G% */ public class StdRegProv implements Constants { public static final long HKEY_CLASSES_ROOT = 0x80000000L; public static final long HKEY_CURRENT_USER = 0x80000001L; public static final long HKEY_LOCAL_MACHINE = 0x80000002L; public static final long HKEY_USERS = 0x80000003L; public static final long HKEY_CURRENT_CONFIG= 0x80000005L; public static final long HKEY_DYN_DATA = 0x80000006L; public static final int REG_NONE = 0; public static final int REG_DWORD = 1; public static final int REG_BINARY = 2; public static final int REG_SZ = 3; public static final int REG_EXPAND_SZ = 4; public static final int REG_MULTI_SZ = 5; public static final int REG_QWORD = 6; static final String CLASS_URI = "http://schemas.microsoft.com/wbem/wsman/1/wmi/root/cimv2/StdRegProv"; static final DocumentBuilder BUILDER; static { try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); BUILDER = dbf.newDocumentBuilder(); } catch (Exception e) { throw new RuntimeException(e); } } private Port port; private Document doc; private String arch = null; /** * Create a new Registry, using the default provider architecture. */ public StdRegProv(Port port) { this.port = port; doc = BUILDER.newDocument(); } /** * Create a new Registry using the specified provider architecture. * * @param view Use 32 or 64. // // DAS: One cannot select the provider architecture via MS-WSMV, on account of Microsoft internal defect // ID #SR112120710065406, so for now, I have commented out this constructor. // public StdRegProv(Port port, int view) throws IllegalArgumentException { this(port); switch(view) { case 32: arch = "32"; break; case 64: arch = "64"; break; default: throw new IllegalArgumentException(Integer.toString(view)); } } */ /** * List all the subkeys under a registry key. * * @param hive one of the HKEY_* constants * @param subkey the path of the subkey to enumerate */ public String[] enumKey(long hive, String subkey) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element params = doc.createElementNS(CLASS_URI, "EnumKey_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); Object result = port.dispatch(CLASS_URI + "/EnumKey", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("EnumKey_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); int len = nodes.getLength(); ArrayList<String> subkeys = new ArrayList<String>(len); for (int i=0; i < len; i++) { subkeys.add(nodes.item(i).getTextContent()); } return subkeys.toArray(new String[len]); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * List all the values (and their types) stored under a registry key. * * @param hive one of the HKEY_* constants * @param subkey the path of the subkey whose values will be enumerated */ public Value[] enumValues(long hive, String subkey) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element params = doc.createElementNS(CLASS_URI, "EnumValues_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); Object result = port.dispatch(CLASS_URI + "/EnumValues", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("EnumValues_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); int len = nodes.getLength(); ArrayList<Value> values = new ArrayList<Value>(len); for (int i=0; i < len; i++) { Value value = new Value(); value.name = nodes.item(i).getTextContent(); values.add(value); } nodes = elt.getElementsByTagNameNS(CLASS_URI, "Types"); for (int i=0; i < len; i++) { values.get(i).type = Integer.parseInt(nodes.item(i).getTextContent()); } return values.toArray(new Value[len]); default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_BINARY value from the registry. */ public byte[] getBinaryValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetBinaryValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetBinaryValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetBinaryValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); byte[] data = new byte[len]; for (int i=0; i < len; i++) { data[i] = (byte)(0xFF & Short.parseShort(nodes.item(i).getTextContent())); } return data; default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_DWORD value from the registry. */ public int getDwordValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetDWORDValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetDWORDValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetDWORDValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); if (len == 1) { return Integer.parseInt(nodes.item(0).getTextContent()); } else { throw new Exception("Unexpected return value quantity: " + len); } default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_EXPAND_SZ value from the registry. The returned value will be expanded. */ public String getExpandedStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetExpandedStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetExpandedStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetExpandedStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 1) { return nodes.item(0).getTextContent(); } else { throw new Exception("Unexpected return value quantity: " + len); } default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_MULTI_SZ value from the registry. */ public String[] getMultiStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetMultiStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetMultiStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetMultiStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 0) { return null; } else { String[] data = new String[len]; for (int i=0; i < len; i++) { data[i] = nodes.item(i).getTextContent(); } return data; } default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_SZ value from the registry. */ public String getStringValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetStringValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetStringValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetStringValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); int len = nodes.getLength(); if (len == 1) { return nodes.item(0).getTextContent(); } else { throw new Exception("Unexpected return value quantity: " + len); } default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Get a REG_QWORD value from the registry. */ public long getQwordValue(long hive, String subkey, String value) throws Exception { Element defKey = doc.createElementNS(CLASS_URI, "hDefKey"); defKey.setTextContent(Long.toString(hive)); Element subKeyName = doc.createElementNS(CLASS_URI, "sSubKeyName"); subKeyName.setTextContent(subkey); Element valueName = doc.createElementNS(CLASS_URI, "sValueName"); valueName.setTextContent(value); Element params = doc.createElementNS(CLASS_URI, "GetQWORDValue_INPUT"); params.appendChild(defKey); params.appendChild(subKeyName); params.appendChild(valueName); Object result = port.dispatch(CLASS_URI + "/GetQWORDValue", getDispatchHeaders(), params); if (result instanceof Element) { Element elt = (Element)result; if ("GetQWORDValue_OUTPUT".equals(elt.getLocalName())) { int hResult = getHResult(elt); switch(hResult) { case 0: NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); int len = nodes.getLength(); if (len == 1) { return Long.parseLong(nodes.item(0).getTextContent()); } else { throw new Exception("Unexpected return value quantity: " + len); } default: throw new Exception("Unexpected result code: " + hResult); } } else { throw new Exception("Unexpected element: " + elt.getLocalName()); } } else { throw new Exception("Unexpected return type: " + result.getClass().getName()); } } /** * Container for information about a registry value. */ public class Value { private int type; private String name; private Value() {} public String getName() { return name; } /** * The REG_* constant corresponding to the type of this value. */ public int getType() { return type; } @Override public String toString() { String sType = null; switch(type) { case StdRegProv.REG_NONE: sType = "REG_NONE "; break; case StdRegProv.REG_DWORD: sType = "REG_DWORD "; break; case StdRegProv.REG_BINARY: sType = "REG_BINARY "; break; case StdRegProv.REG_SZ: sType = "REG_SZ "; break; case StdRegProv.REG_EXPAND_SZ: sType = "REG_EXPAND_SZ "; break; case StdRegProv.REG_MULTI_SZ: sType = "REG_MULTI_SZ "; break; case StdRegProv.REG_QWORD: sType = "REG_QWORD "; break; } return new StringBuffer(sType).append(name).toString(); } } // Private /** * Get the call result code from the element. */ private int getHResult(Element elt) throws IllegalArgumentException { NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "ReturnValue"); int len = nodes.getLength(); if (len == 1) { return Integer.parseInt(nodes.item(0).getTextContent()); } else { throw new IllegalArgumentException("Unexpected return value quantity: " + len); } } /** * Get dispatch headers for invoking methods of the StdRegProv WMI class. */ private List<Object> getDispatchHeaders() { List<Object> headers = new ArrayList<Object>(); AttributableURI uri = Factories.WSMAN.createAttributableURI(); uri.setValue(CLASS_URI); uri.getOtherAttributes().put(MUST_UNDERSTAND, "true"); headers.add(Factories.WSMAN.createResourceURI(uri)); // // Set the appropriate provider architecture using an OptionSet, if one was specified. // if (arch != null) { OptionSet options = Factories.WSMAN.createOptionSet(); headers.add(options); OptionType architecture = Factories.WSMAN.createOptionType(); architecture.setName("wmi:__ProviderArchitecture"); architecture.setType(new QName(XMLNS, "int")); architecture.setValue(arch); options.getOption().add(architecture); } AttributableDuration duration = Factories.WSMAN.createAttributableDuration(); duration.setValue(Factories.XMLDT.newDuration(60000)); headers.add(Factories.WSMAN.createOperationTimeout(duration)); return headers; } }
REG_* int types were incorrect.
src/jwsmv/cim/StdRegProv.java
REG_* int types were incorrect.
<ide><path>rc/jwsmv/cim/StdRegProv.java <ide> public static final long HKEY_DYN_DATA = 0x80000006L; <ide> <ide> public static final int REG_NONE = 0; <del> public static final int REG_DWORD = 1; <del> public static final int REG_BINARY = 2; <del> public static final int REG_SZ = 3; <del> public static final int REG_EXPAND_SZ = 4; <del> public static final int REG_MULTI_SZ = 5; <del> public static final int REG_QWORD = 6; <add> public static final int REG_SZ = 1; <add> public static final int REG_EXPAND_SZ = 2; <add> public static final int REG_BINARY = 3; <add> public static final int REG_DWORD = 4; <add> public static final int REG_MULTI_SZ = 7; <add> public static final int REG_QWORD = 11; <ide> <ide> static final String CLASS_URI = "http://schemas.microsoft.com/wbem/wsman/1/wmi/root/cimv2/StdRegProv"; <ide> static final DocumentBuilder BUILDER; <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("EnumKey_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); <ide> subkeys.add(nodes.item(i).getTextContent()); <ide> } <ide> return subkeys.toArray(new String[len]); <add> <add> case 2: <add> throw new NoSuchElementException(subkey); <ide> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("EnumValues_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sNames"); <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetBinaryValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); <ide> } <ide> return data; <ide> <add> case 2: <add> throw new NoSuchElementException(value); <add> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> } <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetDWORDValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); <ide> throw new Exception("Unexpected return value quantity: " + len); <ide> } <ide> <add> case 2: <add> throw new NoSuchElementException(value); <add> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> } <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetExpandedStringValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); <ide> throw new Exception("Unexpected return value quantity: " + len); <ide> } <ide> <add> case 2: <add> throw new NoSuchElementException(value); <add> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> } <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetMultiStringValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); <ide> return data; <ide> } <ide> <add> case 2: <add> throw new NoSuchElementException(value); <add> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> } <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetStringValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "sValue"); <ide> throw new Exception("Unexpected return value quantity: " + len); <ide> } <ide> <add> case 2: <add> throw new NoSuchElementException(value); <add> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> } <ide> if (result instanceof Element) { <ide> Element elt = (Element)result; <ide> if ("GetQWORDValue_OUTPUT".equals(elt.getLocalName())) { <del> int hResult = getHResult(elt); <add> int hResult = (int)getHResult(elt); <ide> switch(hResult) { <ide> case 0: <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "uValue"); <ide> } else { <ide> throw new Exception("Unexpected return value quantity: " + len); <ide> } <add> <add> case 2: <add> throw new NoSuchElementException(value); <ide> <ide> default: <ide> throw new Exception("Unexpected result code: " + hResult); <ide> /** <ide> * Get the call result code from the element. <ide> */ <del> private int getHResult(Element elt) throws IllegalArgumentException { <add> private long getHResult(Element elt) throws IllegalArgumentException { <ide> NodeList nodes = elt.getElementsByTagNameNS(CLASS_URI, "ReturnValue"); <ide> int len = nodes.getLength(); <ide> if (len == 1) { <del> return Integer.parseInt(nodes.item(0).getTextContent()); <add> return Long.parseLong(nodes.item(0).getTextContent()); <ide> } else { <ide> throw new IllegalArgumentException("Unexpected return value quantity: " + len); <ide> }
Java
apache-2.0
error: pathspec 'ontology-rest/src/test/java/uk/ac/ebi/quickgo/ontology/controller/CoTermControllerFailedLoadIT.java' did not match any file(s) known to git
f07bea4ced456a39d174a379bd20d30f6bf59f5f
1
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
package uk.ac.ebi.quickgo.ontology.controller; import uk.ac.ebi.quickgo.ontology.OntologyREST; import java.util.Arrays; import java.util.stream.Collectors; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * @author Tony Wardell * Date: 07/10/2016 * Time: 16:32 * Created with IntelliJ IDEA. */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(classes = {OntologyREST.class}) @WebAppConfiguration @TestPropertySource(properties = {"coterm.source.manual=fu", "coterm.source.all=bar"}) public class CoTermControllerFailedLoadIT { private static final String RESOURCE_URL = "/ontology/go/coterms"; private static final String VALID_GO_TERM = "GO:7777771"; @Autowired private WebApplicationContext webApplicationContext; private MockMvc mockMvc; @Before public void setup() { mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext) .build(); } @Test public void internalServerErrorIfCoTermFilesNotLoaded() throws Exception { ResultActions response = mockMvc.perform(get(buildPathToResource(VALID_GO_TERM))); response.andDo(print()) .andExpect(status().isInternalServerError()); } private String buildPathToResource(String id, String... args) { return RESOURCE_URL + "/" + id + Arrays.stream(args) .collect(Collectors.joining("&", "?", "")); } }
ontology-rest/src/test/java/uk/ac/ebi/quickgo/ontology/controller/CoTermControllerFailedLoadIT.java
Test calling findCoTerms if the data has not be loaded to the CoTerm repository.
ontology-rest/src/test/java/uk/ac/ebi/quickgo/ontology/controller/CoTermControllerFailedLoadIT.java
Test calling findCoTerms if the data has not be loaded to the CoTerm repository.
<ide><path>ntology-rest/src/test/java/uk/ac/ebi/quickgo/ontology/controller/CoTermControllerFailedLoadIT.java <add>package uk.ac.ebi.quickgo.ontology.controller; <add> <add>import uk.ac.ebi.quickgo.ontology.OntologyREST; <add> <add>import java.util.Arrays; <add>import java.util.stream.Collectors; <add>import org.junit.Before; <add>import org.junit.Test; <add>import org.junit.runner.RunWith; <add>import org.springframework.beans.factory.annotation.Autowired; <add>import org.springframework.boot.test.SpringApplicationConfiguration; <add>import org.springframework.test.context.TestPropertySource; <add>import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; <add>import org.springframework.test.context.web.WebAppConfiguration; <add>import org.springframework.test.web.servlet.MockMvc; <add>import org.springframework.test.web.servlet.ResultActions; <add>import org.springframework.test.web.servlet.setup.MockMvcBuilders; <add>import org.springframework.web.context.WebApplicationContext; <add> <add>import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; <add>import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; <add>import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; <add> <add>/** <add> * @author Tony Wardell <add> * Date: 07/10/2016 <add> * Time: 16:32 <add> * Created with IntelliJ IDEA. <add> */ <add>@RunWith(SpringJUnit4ClassRunner.class) <add>@SpringApplicationConfiguration(classes = {OntologyREST.class}) <add>@WebAppConfiguration <add>@TestPropertySource(properties = {"coterm.source.manual=fu", "coterm.source.all=bar"}) <add>public class CoTermControllerFailedLoadIT { <add> <add> private static final String RESOURCE_URL = "/ontology/go/coterms"; <add> private static final String VALID_GO_TERM = "GO:7777771"; <add> <add> @Autowired <add> private WebApplicationContext webApplicationContext; <add> private MockMvc mockMvc; <add> <add> @Before <add> public void setup() { <add> mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext) <add> .build(); <add> } <add> <add> @Test <add> public void internalServerErrorIfCoTermFilesNotLoaded() throws Exception { <add> ResultActions response = mockMvc.perform(get(buildPathToResource(VALID_GO_TERM))); <add> response.andDo(print()) <add> .andExpect(status().isInternalServerError()); <add> } <add> <add> private String buildPathToResource(String id, String... args) { <add> return RESOURCE_URL + "/" + id + Arrays.stream(args) <add> .collect(Collectors.joining("&", "?", "")); <add> } <add> <add>}
Java
apache-2.0
6bfc36d9c9e93f87e4316b10453dc94433b84b58
0
fabric8io/kubernetes-client,fabric8io/kubernetes-client,fabric8io/kubernetes-client
/** * Copyright 2018 The original authors. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.dekorate.crd.apt; import io.dekorate.Resources; import io.dekorate.crd.annotation.Autodetect; import io.dekorate.crd.annotation.Crd; import io.dekorate.crd.config.CustomResourceConfig; import io.dekorate.crd.config.CustomResourceConfigBuilder; import io.dekorate.crd.config.Keys; import io.dekorate.crd.config.Scope; import io.dekorate.crd.handler.CustomResourceHandler; import io.dekorate.crd.util.Types; import io.fabric8.kubernetes.model.annotation.Group; import io.fabric8.kubernetes.model.annotation.Kind; import io.fabric8.kubernetes.model.annotation.Plural; import io.fabric8.kubernetes.model.annotation.Singular; import io.fabric8.kubernetes.model.annotation.Version; import io.sundr.codegen.CodegenContext; import io.sundr.codegen.functions.ElementTo; import io.sundr.codegen.model.TypeDef; import io.sundr.codegen.utils.ModelUtils; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.Optional; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedAnnotationTypes; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.lang.model.type.MirroredTypeException; @SupportedAnnotationTypes({ "io.fabric8.kubernetes.model.annotation.Group", "io.fabric8.kubernetes.model.annotation.Version"}) public class CustomResourceAnnotationProcessor extends AbstractProcessor { private final Resources resources = new Resources(); public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { // write files return true; } CodegenContext.create(processingEnv.getElementUtils(), processingEnv.getTypeUtils()); Set<TypeElement> annotatedTypes = new LinkedHashSet<>(); //Collect all annotated types. for (TypeElement annotation : annotations) { for (Element element : roundEnv.getElementsAnnotatedWith(annotation)) { if (element instanceof TypeElement) { annotatedTypes.add((TypeElement) element); } } } //Add annotated types for (TypeElement type : annotatedTypes) { add(type); } return false; } public static <T> T firstOf(Optional<T>... optionals) { return Arrays.stream(optionals).filter(Optional::isPresent).map(Optional::get).findFirst() .orElse(null); } public void add(Element element) { System.out.println("Generating " + element.getSimpleName()); Optional<Crd> crd = Optional.ofNullable(element.getAnnotation(Crd.class)); Optional<Group> group = Optional.ofNullable(element.getAnnotation(Group.class)); Optional<Version> version = Optional.ofNullable(element.getAnnotation(Version.class)); Optional<Kind> kind = Optional.ofNullable(element.getAnnotation(Kind.class)); Optional<Plural> plural = Optional.ofNullable(element.getAnnotation(Plural.class)); Optional<Singular> singular = Optional.ofNullable(element.getAnnotation(Singular.class)); String statusClassName; try { statusClassName = crd.map(Crd::status).map(Class::getCanonicalName) .orElse(Autodetect.class.getCanonicalName()); } catch (MirroredTypeException e) { statusClassName = e.getTypeMirror().toString(); } if (element instanceof TypeElement) { TypeDef definition = ElementTo.TYPEDEF.apply((TypeElement) element); String className = ModelUtils.getClassName(element); CustomResourceConfig config = new CustomResourceConfigBuilder() .withKind(firstOf(kind.map(Kind::value), crd.map(Crd::kind))) .withGroup(firstOf(group.map(Group::value), crd.map(Crd::group))) .withVersion(firstOf(version.map(Version::value), crd.map(Crd::version))) .withPlural(firstOf(plural.map(Plural::value), crd.map(Crd::plural))) .withName(firstOf(singular.map(Singular::value), crd.map(Crd::name))) .withScope(firstOf(crd.map(Crd::scope), Optional.of(Types.isNamespaced(definition) ? Scope.Namespaced : Scope.Cluster))) .withServed(firstOf(crd.map(Crd::served), Optional.of(true))) .withStorage(firstOf(crd.map(Crd::storage), Optional.of(false))) .withStatusClassName(statusClassName) .withNewScale() .endScale() // .accept(new AddClassNameConfigurator(className)) .addToAttributes(Keys.TYPE_DEFINITION, definition).build(); new CustomResourceHandler(resources).handle(config); } } }
crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java
/** * Copyright 2018 The original authors. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.dekorate.crd.apt; import io.dekorate.Resources; import io.dekorate.crd.annotation.Autodetect; import io.dekorate.crd.annotation.Crd; import io.dekorate.crd.config.CustomResourceConfig; import io.dekorate.crd.config.CustomResourceConfigBuilder; import io.dekorate.crd.config.Keys; import io.dekorate.crd.config.Scope; import io.dekorate.crd.handler.CustomResourceHandler; import io.dekorate.crd.util.Types; import io.fabric8.kubernetes.model.annotation.Group; import io.fabric8.kubernetes.model.annotation.Kind; import io.fabric8.kubernetes.model.annotation.Plural; import io.fabric8.kubernetes.model.annotation.Singular; import io.fabric8.kubernetes.model.annotation.Version; import io.sundr.codegen.CodegenContext; import io.sundr.codegen.functions.ElementTo; import io.sundr.codegen.model.TypeDef; import io.sundr.codegen.utils.ModelUtils; import java.util.Arrays; import java.util.LinkedHashSet; import java.util.Optional; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedAnnotationTypes; import javax.lang.model.element.Element; import javax.lang.model.element.TypeElement; import javax.lang.model.type.MirroredTypeException; @SupportedAnnotationTypes({ "io.fabric8.kubernetes.model.annotation.Group", "io.fabric8.kubernetes.model.annotation.Version"}) public class CustomResourceAnnotationProcessor extends AbstractProcessor { private final Resources resources = new Resources(); public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { // write files return true; } CodegenContext.create(processingEnv.getElementUtils(), processingEnv.getTypeUtils()); Set<TypeElement> annotatedTypes = new LinkedHashSet<>(); //Collect all annotated types. for (TypeElement annotation : annotations) { for (Element element : roundEnv.getElementsAnnotatedWith(annotation)) { if (element instanceof TypeElement) { annotatedTypes.add((TypeElement) element); } } } //Add annotated types for (TypeElement type : annotatedTypes) { add(type); } return false; } public static <T> T firstOf(Optional<T>... optionals) { return Arrays.stream(optionals).filter(Optional::isPresent).map(Optional::get).findFirst() .orElse(null); } public void add(Element element) { Optional<Crd> crd = Optional.ofNullable(element.getAnnotation(Crd.class)); Optional<Group> group = Optional.ofNullable(element.getAnnotation(Group.class)); Optional<Version> version = Optional.ofNullable(element.getAnnotation(Version.class)); Optional<Kind> kind = Optional.ofNullable(element.getAnnotation(Kind.class)); Optional<Plural> plural = Optional.ofNullable(element.getAnnotation(Plural.class)); Optional<Singular> singular = Optional.ofNullable(element.getAnnotation(Singular.class)); String statusClassName; try { statusClassName = crd.map(Crd::status).map(Class::getCanonicalName) .orElse(Autodetect.class.getCanonicalName()); } catch (MirroredTypeException e) { statusClassName = e.getTypeMirror().toString(); } if (element instanceof TypeElement) { TypeDef definition = ElementTo.TYPEDEF.apply((TypeElement) element); String className = ModelUtils.getClassName(element); CustomResourceConfig config = new CustomResourceConfigBuilder() .withKind(firstOf(kind.map(Kind::value), crd.map(Crd::kind))) .withGroup(firstOf(group.map(Group::value), crd.map(Crd::group))) .withVersion(firstOf(version.map(Version::value), crd.map(Crd::version))) .withPlural(firstOf(plural.map(Plural::value), crd.map(Crd::plural))) .withName(firstOf(singular.map(Singular::value), crd.map(Crd::name))) .withScope(firstOf(crd.map(Crd::scope), Optional.of(Types.isNamespaced(definition) ? Scope.Namespaced : Scope.Cluster))) .withServed(firstOf(crd.map(Crd::served), Optional.of(true))) .withStorage(firstOf(crd.map(Crd::storage), Optional.of(false))) .withStatusClassName(statusClassName) .withNewScale() .endScale() // .accept(new AddClassNameConfigurator(className)) .addToAttributes(Keys.TYPE_DEFINITION, definition).build(); new CustomResourceHandler(resources).handle(config); } } }
feat: add output of currently generated element
crd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java
feat: add output of currently generated element
<ide><path>rd-generator/src/main/java/io/dekorate/crd/apt/CustomResourceAnnotationProcessor.java <ide> } <ide> <ide> public void add(Element element) { <add> System.out.println("Generating " + element.getSimpleName()); <ide> Optional<Crd> crd = Optional.ofNullable(element.getAnnotation(Crd.class)); <ide> Optional<Group> group = Optional.ofNullable(element.getAnnotation(Group.class)); <ide> Optional<Version> version = Optional.ofNullable(element.getAnnotation(Version.class));
Java
unlicense
89cffca36499eeb526545b056d65775a0816ee3d
0
bmhm/bukkit-plugin-example
/** * Bukkit-Plugin-Example by Benjamin Marwell. * * To the extent possible under law, the person who associated CC0 with * Bukkit-Plugin-Example has waived all copyright and related or neighboring rights * to Bukkit-Plugin-Example. * * You should have received a copy of the CC0 legalcode along with this * work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. */ package de.bmarwell.bukkit.listener; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.player.PlayerTeleportEvent; import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause; /** * React on player teleport events. * * @since 0.0.1. */ public class PlayerTeleportListener implements Listener { @EventHandler public void onPlayerTeleport(PlayerTeleportEvent event) { // if this event was cancelled, why bother? if (event.isCancelled()) { return; } // If this is not an ender teleport, don't bother. if (!TeleportCause.ENDER_PEARL.equals(event.getCause())) { return; } Player teleportedPlayer = event.getPlayer(); Location origin = event.getFrom(); boolean teleportSuccess = teleportedPlayer.teleport(origin, TeleportCause.PLUGIN); // TODO: log teleportSuccess; } }
src/main/java/de/bmarwell/bukkit/listener/PlayerTeleportListener.java
/** * Bukkit-Plugin-Example by Benjamin Marwell. * * To the extent possible under law, the person who associated CC0 with * Bukkit-Plugin-Example has waived all copyright and related or neighboring rights * to Bukkit-Plugin-Example. * * You should have received a copy of the CC0 legalcode along with this * work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. */ package de.bmarwell.bukkit.listener; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.player.PlayerTeleportEvent; /** * React on player teleport events. * * @since 0.0.1. */ public class PlayerTeleportListener implements Listener { @EventHandler public void onPlayerTeleport(PlayerTeleportEvent event) { // if this event was cancelled, why bother? if (event.isCancelled()) { return; } // Undo all teleports manually. Player teleportedPlayer = event.getPlayer(); Location origin = event.getFrom(); event.getCause(); boolean teleportSuccess = teleportedPlayer.teleport(origin); // TODO: log teleportSuccess; } }
Only undo enderperl teleport events, and set a proper cause.
src/main/java/de/bmarwell/bukkit/listener/PlayerTeleportListener.java
Only undo enderperl teleport events, and set a proper cause.
<ide><path>rc/main/java/de/bmarwell/bukkit/listener/PlayerTeleportListener.java <ide> import org.bukkit.event.EventHandler; <ide> import org.bukkit.event.Listener; <ide> import org.bukkit.event.player.PlayerTeleportEvent; <add>import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause; <ide> <ide> /** <ide> * React on player teleport events. <ide> return; <ide> } <ide> <del> // Undo all teleports manually. <add> // If this is not an ender teleport, don't bother. <add> if (!TeleportCause.ENDER_PEARL.equals(event.getCause())) { <add> return; <add> } <add> <ide> Player teleportedPlayer = event.getPlayer(); <ide> Location origin = event.getFrom(); <del> event.getCause(); <ide> <del> boolean teleportSuccess = teleportedPlayer.teleport(origin); <add> boolean teleportSuccess = teleportedPlayer.teleport(origin, TeleportCause.PLUGIN); <ide> // TODO: log teleportSuccess; <ide> } <ide>
Java
apache-2.0
error: pathspec 'src/main/java/info/u_team/u_team_core/util/io/NBTStreamUtil.java' did not match any file(s) known to git
758b9506bc00314c929e3c6ba2486a7fa9d69a69
1
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
package info.u_team.u_team_core.util.io; import java.io.*; import net.minecraft.nbt.*; public class NBTStreamUtil { public static NBTTagCompound readNBTFromFile(File file) throws IOException { return readNBTFromStream(new FileInputStream(file)); } public static NBTTagCompound readNBTFromStream(InputStream stream) throws IOException { NBTTagCompound tag; try { tag = CompressedStreamTools.readCompressed(stream); } finally { stream.close(); } return tag; } public static void writeNBTToFile(NBTTagCompound tag, File file) throws IOException { writeNBTToStream(tag, new FileOutputStream(file)); } public static void writeNBTToStream(NBTTagCompound tag, OutputStream stream) throws IOException { try { CompressedStreamTools.writeCompressed(tag, stream); } finally { stream.flush(); stream.close(); } } }
src/main/java/info/u_team/u_team_core/util/io/NBTStreamUtil.java
Added NBTStreamUtil
src/main/java/info/u_team/u_team_core/util/io/NBTStreamUtil.java
Added NBTStreamUtil
<ide><path>rc/main/java/info/u_team/u_team_core/util/io/NBTStreamUtil.java <add>package info.u_team.u_team_core.util.io; <add> <add>import java.io.*; <add> <add>import net.minecraft.nbt.*; <add> <add>public class NBTStreamUtil { <add> <add> public static NBTTagCompound readNBTFromFile(File file) throws IOException { <add> return readNBTFromStream(new FileInputStream(file)); <add> } <add> <add> public static NBTTagCompound readNBTFromStream(InputStream stream) throws IOException { <add> NBTTagCompound tag; <add> try { <add> tag = CompressedStreamTools.readCompressed(stream); <add> } finally { <add> stream.close(); <add> } <add> return tag; <add> } <add> <add> public static void writeNBTToFile(NBTTagCompound tag, File file) throws IOException { <add> writeNBTToStream(tag, new FileOutputStream(file)); <add> } <add> <add> public static void writeNBTToStream(NBTTagCompound tag, OutputStream stream) throws IOException { <add> try { <add> CompressedStreamTools.writeCompressed(tag, stream); <add> } finally { <add> stream.flush(); <add> stream.close(); <add> } <add> } <add> <add>}
JavaScript
apache-2.0
fce6b4e5eef8c2d5bd8305eec042294cec133df9
0
citruspay/open-javascript,citruspay/open-javascript
import {urlReEx} from '../constants'; const regEXMap = { url : urlReEx }; //todo: Check why is this done window.r2 = regEXMap.url; export const baseSchema = { 'mainObjectCheck':{ keysCheck: ['merchantTxnId', 'amount', 'currency', 'userDetails', 'returnUrl', 'notifyUrl', 'requestSignature', 'merchantAccessKey', 'customParameters', 'requestOrigin', 'mode', 'offerToken'] }, returnUrl: { presence: true, custFormat: { pattern: regEXMap.url, message: 'should be proper URL string' } }, notifyUrl: { custFormat: { pattern: regEXMap.url, message: 'should be proper URL string' } }, requestSignature: { presence: true }, merchantTxnId : { presence: true }, amount: { presence: true }, //todo: can write custom validator as - numOrStr: true //currency: {presence: true}, //merchantAccessKey: { presence: true }, userDetails: { presence: true, keysCheck: ['email', 'firstName', 'lastName', 'address', 'mobileNo'] }, 'userDetails.email': { //presence: true, email: true }, 'userDetails.address': { keysCheck: ['street1', 'street2', 'city', 'state', 'country', 'zip'] }, 'userDetails.mobileNo' : { length: {maximum: 15} } };
js/validation/validation-schema.js
import {urlReEx} from '../constants'; const regEXMap = { url : urlReEx }; //todo: Check why is this done window.r2 = regEXMap.url; export const baseSchema = { 'mainObjectCheck':{ keysCheck: ['merchantTxnId', 'amount', 'currency', 'userDetails', 'returnUrl', 'notifyUrl', 'requestSignature', 'merchantAccessKey', 'customParameters', 'requestOrigin', 'mode', 'offerToken'] }, returnUrl: { presence: true, custFormat: { pattern: regEXMap.url, message: 'should be proper URL string' } }, notifyUrl: { custFormat: { pattern: regEXMap.url, message: 'should be proper URL string' } }, requestSignature: { presence: true }, merchantTxnId : { presence: true }, amount: { presence: true }, //todo: can write custom validator as - numOrStr: true //currency: {presence: true}, //merchantAccessKey: { presence: true }, userDetails: { presence: true, keysCheck: ['email', 'firstName', 'lastName', 'address', 'mobileNo'] }, 'userDetails.email': { presence: true, email: true }, 'userDetails.address': { keysCheck: ['street1', 'street2', 'city', 'state', 'country', 'zip'] }, 'userDetails.mobileNo' : { length: {maximum: 15} } };
email and mobile made non mandatory
js/validation/validation-schema.js
email and mobile made non mandatory
<ide><path>s/validation/validation-schema.js <ide> presence: true, <ide> keysCheck: ['email', 'firstName', 'lastName', 'address', 'mobileNo'] <ide> }, <del> 'userDetails.email': { presence: true, email: true }, <add> 'userDetails.email': { //presence: true, <add> email: true }, <ide> 'userDetails.address': { <ide> keysCheck: ['street1', 'street2', 'city', 'state', 'country', 'zip'] <ide> },
Java
mit
error: pathspec 'maven-plugin/src/main/java/hudson/maven/settings/SettingsProviderUtils.java' did not match any file(s) known to git
e96e01c6391608297c7786b03e4f96f653c052f6
1
Jimilian/jenkins,ndeloof/jenkins,liorhson/jenkins,ns163/jenkins,seanlin816/jenkins,daniel-beck/jenkins,evernat/jenkins,amuniz/jenkins,ErikVerheul/jenkins,MichaelPranovich/jenkins_sc,olivergondza/jenkins,SenolOzer/jenkins,liupugong/jenkins,escoem/jenkins,christ66/jenkins,oleg-nenashev/jenkins,arcivanov/jenkins,seanlin816/jenkins,mrooney/jenkins,hashar/jenkins,v1v/jenkins,pjanouse/jenkins,patbos/jenkins,gitaccountforprashant/gittest,ns163/jenkins,rsandell/jenkins,verbitan/jenkins,albers/jenkins,amruthsoft9/Jenkis,azweb76/jenkins,varmenise/jenkins,seanlin816/jenkins,elkingtonmcb/jenkins,wangyikai/jenkins,h4ck3rm1k3/jenkins,gitaccountforprashant/gittest,rashmikanta-1984/jenkins,liupugong/jenkins,tastatur/jenkins,deadmoose/jenkins,hplatou/jenkins,ydubreuil/jenkins,abayer/jenkins,liorhson/jenkins,msrb/jenkins,mattclark/jenkins,batmat/jenkins,lvotypko/jenkins,synopsys-arc-oss/jenkins,jzjzjzj/jenkins,akshayabd/jenkins,keyurpatankar/hudson,Vlatombe/jenkins,lvotypko/jenkins3,aduprat/jenkins,fbelzunc/jenkins,paulwellnerbou/jenkins,lilyJi/jenkins,ajshastri/jenkins,wuwen5/jenkins,rsandell/jenkins,paulmillar/jenkins,MadsNielsen/jtemp,petermarcoen/jenkins,andresrc/jenkins,AustinKwang/jenkins,Wilfred/jenkins,KostyaSha/jenkins,ChrisA89/jenkins,SenolOzer/jenkins,ajshastri/jenkins,Wilfred/jenkins,guoxu0514/jenkins,SebastienGllmt/jenkins,gitaccountforprashant/gittest,escoem/jenkins,wangyikai/jenkins,tfennelly/jenkins,MadsNielsen/jtemp,ndeloof/jenkins,jk47/jenkins,scoheb/jenkins,patbos/jenkins,ajshastri/jenkins,samatdav/jenkins,paulmillar/jenkins,KostyaSha/jenkins,vjuranek/jenkins,mdonohue/jenkins,csimons/jenkins,albers/jenkins,rashmikanta-1984/jenkins,h4ck3rm1k3/jenkins,ydubreuil/jenkins,samatdav/jenkins,pjanouse/jenkins,ChrisA89/jenkins,godfath3r/jenkins,lordofthejars/jenkins,paulwellnerbou/jenkins,petermarcoen/jenkins,lvotypko/jenkins,akshayabd/jenkins,alvarolobato/jenkins,kzantow/jenkins,ajshastri/jenkins,rashmikanta-1984/jenkins,recena/jenkins,jcsirot/jenkins,patbos/jenkins,singh88/jenkins,bkmeneguello/jenkins,csimons/jenkins,iqstack/jenkins,nandan4/Jenkins,olivergondza/jenkins,sathiya-mit/jenkins,jk47/jenkins,dennisjlee/jenkins,varmenise/jenkins,aheritier/jenkins,verbitan/jenkins,DoctorQ/jenkins,lvotypko/jenkins,Vlatombe/jenkins,luoqii/jenkins,vlajos/jenkins,khmarbaise/jenkins,Ykus/jenkins,petermarcoen/jenkins,batmat/jenkins,pjanouse/jenkins,paulwellnerbou/jenkins,FTG-003/jenkins,rsandell/jenkins,lvotypko/jenkins2,MichaelPranovich/jenkins_sc,protazy/jenkins,jglick/jenkins,mpeltonen/jenkins,jk47/jenkins,seanlin816/jenkins,hplatou/jenkins,damianszczepanik/jenkins,morficus/jenkins,tangkun75/jenkins,ErikVerheul/jenkins,MarkEWaite/jenkins,azweb76/jenkins,kohsuke/hudson,aduprat/jenkins,dennisjlee/jenkins,bpzhang/jenkins,bpzhang/jenkins,Krasnyanskiy/jenkins,FarmGeek4Life/jenkins,paulwellnerbou/jenkins,aldaris/jenkins,brunocvcunha/jenkins,svanoort/jenkins,shahharsh/jenkins,everyonce/jenkins,scoheb/jenkins,soenter/jenkins,evernat/jenkins,tastatur/jenkins,h4ck3rm1k3/jenkins,evernat/jenkins,svanoort/jenkins,wuwen5/jenkins,DanielWeber/jenkins,6WIND/jenkins,ns163/jenkins,abayer/jenkins,vijayto/jenkins,christ66/jenkins,jzjzjzj/jenkins,varmenise/jenkins,vijayto/jenkins,nandan4/Jenkins,tangkun75/jenkins,deadmoose/jenkins,elkingtonmcb/jenkins,lilyJi/jenkins,mrobinet/jenkins,arcivanov/jenkins,svanoort/jenkins,azweb76/jenkins,ns163/jenkins,gusreiber/jenkins,synopsys-arc-oss/jenkins,christ66/jenkins,samatdav/jenkins,hashar/jenkins,deadmoose/jenkins,lindzh/jenkins,ChrisA89/jenkins,gusreiber/jenkins,thomassuckow/jenkins,hplatou/jenkins,stephenc/jenkins,jenkinsci/jenkins,FTG-003/jenkins,viqueen/jenkins,jhoblitt/jenkins,soenter/jenkins,patbos/jenkins,paulmillar/jenkins,dariver/jenkins,godfath3r/jenkins,jtnord/jenkins,my7seven/jenkins,batmat/jenkins,wangyikai/jenkins,batmat/jenkins,oleg-nenashev/jenkins,jenkinsci/jenkins,ErikVerheul/jenkins,Krasnyanskiy/jenkins,jglick/jenkins,iqstack/jenkins,6WIND/jenkins,thomassuckow/jenkins,dennisjlee/jenkins,olivergondza/jenkins,jtnord/jenkins,daspilker/jenkins,hemantojhaa/jenkins,aquarellian/jenkins,ChrisA89/jenkins,arunsingh/jenkins,intelchen/jenkins,christ66/jenkins,github-api-test-org/jenkins,protazy/jenkins,aquarellian/jenkins,jenkinsci/jenkins,MichaelPranovich/jenkins_sc,tfennelly/jenkins,jcarrothers-sap/jenkins,6WIND/jenkins,pselle/jenkins,keyurpatankar/hudson,kohsuke/hudson,damianszczepanik/jenkins,my7seven/jenkins,msrb/jenkins,SenolOzer/jenkins,stephenc/jenkins,viqueen/jenkins,fbelzunc/jenkins,synopsys-arc-oss/jenkins,lvotypko/jenkins3,abayer/jenkins,ChrisA89/jenkins,292388900/jenkins,Wilfred/jenkins,soenter/jenkins,brunocvcunha/jenkins,wuwen5/jenkins,MarkEWaite/jenkins,duzifang/my-jenkins,recena/jenkins,tastatur/jenkins,jzjzjzj/jenkins,dbroady1/jenkins,wangyikai/jenkins,damianszczepanik/jenkins,github-api-test-org/jenkins,ajshastri/jenkins,rlugojr/jenkins,rsandell/jenkins,h4ck3rm1k3/jenkins,intelchen/jenkins,andresrc/jenkins,dariver/jenkins,Jochen-A-Fuerbacher/jenkins,nandan4/Jenkins,bpzhang/jenkins,dbroady1/jenkins,gorcz/jenkins,KostyaSha/jenkins,morficus/jenkins,KostyaSha/jenkins,gusreiber/jenkins,6WIND/jenkins,mrobinet/jenkins,mattclark/jenkins,AustinKwang/jenkins,arcivanov/jenkins,v1v/jenkins,jpbriend/jenkins,vjuranek/jenkins,lindzh/jenkins,hemantojhaa/jenkins,scoheb/jenkins,lordofthejars/jenkins,wangyikai/jenkins,ikedam/jenkins,ns163/jenkins,jpbriend/jenkins,tangkun75/jenkins,amuniz/jenkins,ajshastri/jenkins,gitaccountforprashant/gittest,AustinKwang/jenkins,FarmGeek4Life/jenkins,lordofthejars/jenkins,MichaelPranovich/jenkins_sc,arcivanov/jenkins,hashar/jenkins,dennisjlee/jenkins,jglick/jenkins,evernat/jenkins,paulmillar/jenkins,mrooney/jenkins,jenkinsci/jenkins,abayer/jenkins,DanielWeber/jenkins,samatdav/jenkins,varmenise/jenkins,jcarrothers-sap/jenkins,mrobinet/jenkins,maikeffi/hudson,paulmillar/jenkins,olivergondza/jenkins,viqueen/jenkins,Vlatombe/jenkins,kzantow/jenkins,yonglehou/jenkins,keyurpatankar/hudson,mrobinet/jenkins,lvotypko/jenkins2,scoheb/jenkins,jzjzjzj/jenkins,mrooney/jenkins,Jimilian/jenkins,jpbriend/jenkins,MadsNielsen/jtemp,my7seven/jenkins,arunsingh/jenkins,MarkEWaite/jenkins,seanlin816/jenkins,csimons/jenkins,recena/jenkins,bkmeneguello/jenkins,rashmikanta-1984/jenkins,stephenc/jenkins,h4ck3rm1k3/jenkins,Krasnyanskiy/jenkins,fbelzunc/jenkins,MarkEWaite/jenkins,morficus/jenkins,goldchang/jenkins,khmarbaise/jenkins,viqueen/jenkins,jpederzolli/jenkins-1,NehemiahMi/jenkins,alvarolobato/jenkins,1and1/jenkins,hashar/jenkins,liupugong/jenkins,liorhson/jenkins,rlugojr/jenkins,tangkun75/jenkins,ikedam/jenkins,liupugong/jenkins,intelchen/jenkins,SebastienGllmt/jenkins,ns163/jenkins,tastatur/jenkins,shahharsh/jenkins,samatdav/jenkins,pselle/jenkins,pjanouse/jenkins,elkingtonmcb/jenkins,jglick/jenkins,292388900/jenkins,godfath3r/jenkins,kohsuke/hudson,tfennelly/jenkins,DoctorQ/jenkins,chbiel/jenkins,arunsingh/jenkins,FTG-003/jenkins,ErikVerheul/jenkins,shahharsh/jenkins,FarmGeek4Life/jenkins,svanoort/jenkins,fbelzunc/jenkins,luoqii/jenkins,andresrc/jenkins,sathiya-mit/jenkins,pselle/jenkins,christ66/jenkins,luoqii/jenkins,kohsuke/hudson,duzifang/my-jenkins,NehemiahMi/jenkins,ErikVerheul/jenkins,NehemiahMi/jenkins,tfennelly/jenkins,lvotypko/jenkins2,kzantow/jenkins,dennisjlee/jenkins,noikiy/jenkins,vlajos/jenkins,alvarolobato/jenkins,thomassuckow/jenkins,dbroady1/jenkins,luoqii/jenkins,dariver/jenkins,lilyJi/jenkins,bpzhang/jenkins,khmarbaise/jenkins,jk47/jenkins,escoem/jenkins,ydubreuil/jenkins,andresrc/jenkins,synopsys-arc-oss/jenkins,jcsirot/jenkins,paulmillar/jenkins,amuniz/jenkins,godfath3r/jenkins,jtnord/jenkins,kzantow/jenkins,lvotypko/jenkins2,brunocvcunha/jenkins,vijayto/jenkins,vlajos/jenkins,mpeltonen/jenkins,ErikVerheul/jenkins,6WIND/jenkins,liorhson/jenkins,akshayabd/jenkins,jcsirot/jenkins,thomassuckow/jenkins,Jochen-A-Fuerbacher/jenkins,gusreiber/jenkins,jhoblitt/jenkins,Jimilian/jenkins,recena/jenkins,huybrechts/hudson,csimons/jenkins,evernat/jenkins,mattclark/jenkins,scoheb/jenkins,kzantow/jenkins,daspilker/jenkins,kohsuke/hudson,CodeShane/jenkins,viqueen/jenkins,hplatou/jenkins,lvotypko/jenkins2,aquarellian/jenkins,mcanthony/jenkins,FTG-003/jenkins,gorcz/jenkins,Ykus/jenkins,bpzhang/jenkins,gitaccountforprashant/gittest,jglick/jenkins,vvv444/jenkins,Jochen-A-Fuerbacher/jenkins,lvotypko/jenkins3,wuwen5/jenkins,viqueen/jenkins,viqueen/jenkins,SenolOzer/jenkins,mdonohue/jenkins,lordofthejars/jenkins,verbitan/jenkins,lilyJi/jenkins,lvotypko/jenkins3,godfath3r/jenkins,alvarolobato/jenkins,lilyJi/jenkins,guoxu0514/jenkins,daniel-beck/jenkins,petermarcoen/jenkins,aldaris/jenkins,jpederzolli/jenkins-1,hplatou/jenkins,vjuranek/jenkins,escoem/jenkins,csimons/jenkins,292388900/jenkins,tfennelly/jenkins,brunocvcunha/jenkins,pjanouse/jenkins,intelchen/jenkins,daspilker/jenkins,MarkEWaite/jenkins,tastatur/jenkins,patbos/jenkins,ikedam/jenkins,arunsingh/jenkins,huybrechts/hudson,gusreiber/jenkins,guoxu0514/jenkins,vlajos/jenkins,gorcz/jenkins,alvarolobato/jenkins,elkingtonmcb/jenkins,SebastienGllmt/jenkins,MadsNielsen/jtemp,duzifang/my-jenkins,sathiya-mit/jenkins,mcanthony/jenkins,MarkEWaite/jenkins,oleg-nenashev/jenkins,github-api-test-org/jenkins,akshayabd/jenkins,keyurpatankar/hudson,wuwen5/jenkins,jzjzjzj/jenkins,AustinKwang/jenkins,samatdav/jenkins,jtnord/jenkins,seanlin816/jenkins,batmat/jenkins,duzifang/my-jenkins,aquarellian/jenkins,shahharsh/jenkins,jcsirot/jenkins,jpbriend/jenkins,Jochen-A-Fuerbacher/jenkins,jzjzjzj/jenkins,maikeffi/hudson,arunsingh/jenkins,duzifang/my-jenkins,1and1/jenkins,khmarbaise/jenkins,morficus/jenkins,verbitan/jenkins,goldchang/jenkins,hplatou/jenkins,intelchen/jenkins,vvv444/jenkins,vijayto/jenkins,ydubreuil/jenkins,singh88/jenkins,sathiya-mit/jenkins,guoxu0514/jenkins,KostyaSha/jenkins,lvotypko/jenkins,DoctorQ/jenkins,SenolOzer/jenkins,ikedam/jenkins,mcanthony/jenkins,soenter/jenkins,aduprat/jenkins,akshayabd/jenkins,1and1/jenkins,gorcz/jenkins,goldchang/jenkins,duzifang/my-jenkins,daniel-beck/jenkins,FarmGeek4Life/jenkins,NehemiahMi/jenkins,oleg-nenashev/jenkins,azweb76/jenkins,FTG-003/jenkins,noikiy/jenkins,noikiy/jenkins,CodeShane/jenkins,jcarrothers-sap/jenkins,mdonohue/jenkins,github-api-test-org/jenkins,DoctorQ/jenkins,aldaris/jenkins,noikiy/jenkins,Jimilian/jenkins,gitaccountforprashant/gittest,albers/jenkins,stephenc/jenkins,292388900/jenkins,my7seven/jenkins,tastatur/jenkins,nandan4/Jenkins,vijayto/jenkins,vjuranek/jenkins,MadsNielsen/jtemp,alvarolobato/jenkins,akshayabd/jenkins,KostyaSha/jenkins,dbroady1/jenkins,rsandell/jenkins,FarmGeek4Life/jenkins,amruthsoft9/Jenkis,wuwen5/jenkins,DanielWeber/jenkins,protazy/jenkins,paulmillar/jenkins,brunocvcunha/jenkins,olivergondza/jenkins,tfennelly/jenkins,scoheb/jenkins,vjuranek/jenkins,SenolOzer/jenkins,MarkEWaite/jenkins,amruthsoft9/Jenkis,1and1/jenkins,ydubreuil/jenkins,andresrc/jenkins,MarkEWaite/jenkins,my7seven/jenkins,292388900/jenkins,rashmikanta-1984/jenkins,albers/jenkins,pselle/jenkins,aldaris/jenkins,kzantow/jenkins,ndeloof/jenkins,KostyaSha/jenkins,lvotypko/jenkins3,noikiy/jenkins,mcanthony/jenkins,msrb/jenkins,rashmikanta-1984/jenkins,CodeShane/jenkins,bkmeneguello/jenkins,jpbriend/jenkins,evernat/jenkins,liorhson/jenkins,Wilfred/jenkins,Jochen-A-Fuerbacher/jenkins,KostyaSha/jenkins,mdonohue/jenkins,arcivanov/jenkins,godfath3r/jenkins,jk47/jenkins,tangkun75/jenkins,lordofthejars/jenkins,1and1/jenkins,v1v/jenkins,patbos/jenkins,svanoort/jenkins,aheritier/jenkins,escoem/jenkins,batmat/jenkins,Ykus/jenkins,elkingtonmcb/jenkins,Ykus/jenkins,luoqii/jenkins,thomassuckow/jenkins,albers/jenkins,sathiya-mit/jenkins,dariver/jenkins,Vlatombe/jenkins,github-api-test-org/jenkins,rlugojr/jenkins,guoxu0514/jenkins,SebastienGllmt/jenkins,oleg-nenashev/jenkins,singh88/jenkins,azweb76/jenkins,msrb/jenkins,MichaelPranovich/jenkins_sc,fbelzunc/jenkins,duzifang/my-jenkins,guoxu0514/jenkins,AustinKwang/jenkins,292388900/jenkins,jtnord/jenkins,6WIND/jenkins,daniel-beck/jenkins,dennisjlee/jenkins,gusreiber/jenkins,batmat/jenkins,everyonce/jenkins,pselle/jenkins,chbiel/jenkins,yonglehou/jenkins,mattclark/jenkins,jcarrothers-sap/jenkins,hemantojhaa/jenkins,mrooney/jenkins,oleg-nenashev/jenkins,liupugong/jenkins,lilyJi/jenkins,vvv444/jenkins,svanoort/jenkins,alvarolobato/jenkins,verbitan/jenkins,tangkun75/jenkins,thomassuckow/jenkins,verbitan/jenkins,goldchang/jenkins,iqstack/jenkins,aheritier/jenkins,ErikVerheul/jenkins,aheritier/jenkins,wangyikai/jenkins,pselle/jenkins,292388900/jenkins,mdonohue/jenkins,goldchang/jenkins,ChrisA89/jenkins,ndeloof/jenkins,maikeffi/hudson,scoheb/jenkins,aldaris/jenkins,Jimilian/jenkins,vlajos/jenkins,vijayto/jenkins,synopsys-arc-oss/jenkins,fbelzunc/jenkins,ydubreuil/jenkins,chbiel/jenkins,synopsys-arc-oss/jenkins,aldaris/jenkins,dariver/jenkins,thomassuckow/jenkins,Jimilian/jenkins,hemantojhaa/jenkins,rashmikanta-1984/jenkins,bkmeneguello/jenkins,damianszczepanik/jenkins,Vlatombe/jenkins,liorhson/jenkins,olivergondza/jenkins,Wilfred/jenkins,1and1/jenkins,lilyJi/jenkins,everyonce/jenkins,MichaelPranovich/jenkins_sc,jhoblitt/jenkins,lindzh/jenkins,msrb/jenkins,petermarcoen/jenkins,CodeShane/jenkins,DoctorQ/jenkins,everyonce/jenkins,lvotypko/jenkins3,NehemiahMi/jenkins,maikeffi/hudson,tastatur/jenkins,vvv444/jenkins,DanielWeber/jenkins,amruthsoft9/Jenkis,sathiya-mit/jenkins,h4ck3rm1k3/jenkins,goldchang/jenkins,huybrechts/hudson,gorcz/jenkins,vjuranek/jenkins,dennisjlee/jenkins,mcanthony/jenkins,lvotypko/jenkins,bkmeneguello/jenkins,NehemiahMi/jenkins,chbiel/jenkins,CodeShane/jenkins,DoctorQ/jenkins,daspilker/jenkins,lvotypko/jenkins3,lindzh/jenkins,DanielWeber/jenkins,yonglehou/jenkins,mcanthony/jenkins,dbroady1/jenkins,seanlin816/jenkins,jhoblitt/jenkins,iqstack/jenkins,daniel-beck/jenkins,vlajos/jenkins,Ykus/jenkins,huybrechts/hudson,kohsuke/hudson,pselle/jenkins,damianszczepanik/jenkins,arunsingh/jenkins,aduprat/jenkins,Jimilian/jenkins,vjuranek/jenkins,aduprat/jenkins,jhoblitt/jenkins,MadsNielsen/jtemp,github-api-test-org/jenkins,morficus/jenkins,nandan4/Jenkins,aheritier/jenkins,keyurpatankar/hudson,mattclark/jenkins,hemantojhaa/jenkins,singh88/jenkins,mdonohue/jenkins,dariver/jenkins,aduprat/jenkins,aduprat/jenkins,aldaris/jenkins,v1v/jenkins,mpeltonen/jenkins,SebastienGllmt/jenkins,escoem/jenkins,arcivanov/jenkins,damianszczepanik/jenkins,liupugong/jenkins,MichaelPranovich/jenkins_sc,aheritier/jenkins,iqstack/jenkins,dariver/jenkins,SenolOzer/jenkins,csimons/jenkins,iqstack/jenkins,my7seven/jenkins,jcarrothers-sap/jenkins,dbroady1/jenkins,rsandell/jenkins,v1v/jenkins,chbiel/jenkins,hplatou/jenkins,khmarbaise/jenkins,csimons/jenkins,Jochen-A-Fuerbacher/jenkins,patbos/jenkins,wuwen5/jenkins,varmenise/jenkins,huybrechts/hudson,morficus/jenkins,ndeloof/jenkins,amruthsoft9/Jenkis,hemantojhaa/jenkins,jhoblitt/jenkins,mrobinet/jenkins,varmenise/jenkins,vvv444/jenkins,mpeltonen/jenkins,keyurpatankar/hudson,DanielWeber/jenkins,oleg-nenashev/jenkins,Jochen-A-Fuerbacher/jenkins,1and1/jenkins,everyonce/jenkins,ikedam/jenkins,varmenise/jenkins,recena/jenkins,chbiel/jenkins,synopsys-arc-oss/jenkins,6WIND/jenkins,jcsirot/jenkins,ndeloof/jenkins,protazy/jenkins,intelchen/jenkins,msrb/jenkins,damianszczepanik/jenkins,stephenc/jenkins,khmarbaise/jenkins,amruthsoft9/Jenkis,yonglehou/jenkins,everyonce/jenkins,jpederzolli/jenkins-1,goldchang/jenkins,daspilker/jenkins,chbiel/jenkins,lvotypko/jenkins,ikedam/jenkins,svanoort/jenkins,jenkinsci/jenkins,gitaccountforprashant/gittest,khmarbaise/jenkins,kohsuke/hudson,damianszczepanik/jenkins,godfath3r/jenkins,ikedam/jenkins,elkingtonmcb/jenkins,tfennelly/jenkins,liupugong/jenkins,mattclark/jenkins,lvotypko/jenkins2,shahharsh/jenkins,mrobinet/jenkins,jcsirot/jenkins,sathiya-mit/jenkins,AustinKwang/jenkins,huybrechts/hudson,msrb/jenkins,abayer/jenkins,protazy/jenkins,liorhson/jenkins,daniel-beck/jenkins,intelchen/jenkins,evernat/jenkins,recena/jenkins,ajshastri/jenkins,fbelzunc/jenkins,abayer/jenkins,DanielWeber/jenkins,everyonce/jenkins,bpzhang/jenkins,nandan4/Jenkins,Krasnyanskiy/jenkins,mpeltonen/jenkins,elkingtonmcb/jenkins,deadmoose/jenkins,FTG-003/jenkins,brunocvcunha/jenkins,stephenc/jenkins,jpederzolli/jenkins-1,bpzhang/jenkins,FarmGeek4Life/jenkins,dbroady1/jenkins,samatdav/jenkins,singh88/jenkins,gorcz/jenkins,yonglehou/jenkins,shahharsh/jenkins,v1v/jenkins,akshayabd/jenkins,paulwellnerbou/jenkins,albers/jenkins,wangyikai/jenkins,christ66/jenkins,jzjzjzj/jenkins,daniel-beck/jenkins,keyurpatankar/hudson,nandan4/Jenkins,stephenc/jenkins,arunsingh/jenkins,jcarrothers-sap/jenkins,ChrisA89/jenkins,Vlatombe/jenkins,recena/jenkins,soenter/jenkins,kzantow/jenkins,amuniz/jenkins,jenkinsci/jenkins,jpederzolli/jenkins-1,bkmeneguello/jenkins,mattclark/jenkins,lindzh/jenkins,rlugojr/jenkins,mrobinet/jenkins,ikedam/jenkins,NehemiahMi/jenkins,arcivanov/jenkins,goldchang/jenkins,jenkinsci/jenkins,ndeloof/jenkins,amuniz/jenkins,DoctorQ/jenkins,escoem/jenkins,vijayto/jenkins,mpeltonen/jenkins,Wilfred/jenkins,iqstack/jenkins,rsandell/jenkins,jtnord/jenkins,mrooney/jenkins,azweb76/jenkins,aheritier/jenkins,amruthsoft9/Jenkis,shahharsh/jenkins,lordofthejars/jenkins,hemantojhaa/jenkins,maikeffi/hudson,ns163/jenkins,hashar/jenkins,noikiy/jenkins,hashar/jenkins,soenter/jenkins,deadmoose/jenkins,maikeffi/hudson,vvv444/jenkins,andresrc/jenkins,github-api-test-org/jenkins,mrooney/jenkins,christ66/jenkins,singh88/jenkins,maikeffi/hudson,jk47/jenkins,pjanouse/jenkins,amuniz/jenkins,yonglehou/jenkins,paulwellnerbou/jenkins,MadsNielsen/jtemp,jglick/jenkins,Ykus/jenkins,SebastienGllmt/jenkins,jtnord/jenkins,jzjzjzj/jenkins,aquarellian/jenkins,luoqii/jenkins,petermarcoen/jenkins,albers/jenkins,jcarrothers-sap/jenkins,jk47/jenkins,AustinKwang/jenkins,Ykus/jenkins,brunocvcunha/jenkins,huybrechts/hudson,daspilker/jenkins,petermarcoen/jenkins,Vlatombe/jenkins,guoxu0514/jenkins,my7seven/jenkins,lordofthejars/jenkins,rlugojr/jenkins,jcarrothers-sap/jenkins,hashar/jenkins,daniel-beck/jenkins,lindzh/jenkins,abayer/jenkins,jenkinsci/jenkins,mcanthony/jenkins,rsandell/jenkins,singh88/jenkins,pjanouse/jenkins,v1v/jenkins,aquarellian/jenkins,azweb76/jenkins,Wilfred/jenkins,jpbriend/jenkins,gorcz/jenkins,lvotypko/jenkins,verbitan/jenkins,protazy/jenkins,deadmoose/jenkins,luoqii/jenkins,Krasnyanskiy/jenkins,jpederzolli/jenkins-1,soenter/jenkins,vvv444/jenkins,olivergondza/jenkins,rlugojr/jenkins,maikeffi/hudson,paulwellnerbou/jenkins,SebastienGllmt/jenkins,jglick/jenkins,Krasnyanskiy/jenkins,gorcz/jenkins,bkmeneguello/jenkins,h4ck3rm1k3/jenkins,morficus/jenkins,CodeShane/jenkins,mrooney/jenkins,noikiy/jenkins,github-api-test-org/jenkins,vlajos/jenkins,aquarellian/jenkins,daspilker/jenkins,jpbriend/jenkins,deadmoose/jenkins,andresrc/jenkins,jcsirot/jenkins,mdonohue/jenkins,shahharsh/jenkins,CodeShane/jenkins,mpeltonen/jenkins,protazy/jenkins,gusreiber/jenkins,jpederzolli/jenkins-1,FarmGeek4Life/jenkins,jhoblitt/jenkins,amuniz/jenkins,yonglehou/jenkins,Krasnyanskiy/jenkins,tangkun75/jenkins,DoctorQ/jenkins,ydubreuil/jenkins,FTG-003/jenkins,rlugojr/jenkins,keyurpatankar/hudson,kohsuke/hudson,lvotypko/jenkins2,lindzh/jenkins
/* * Copyright 20011 Talend, Olivier Lamy * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hudson.maven.settings; import hudson.ExtensionList; import hudson.FilePath; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.jenkinsci.lib.configprovider.ConfigProvider; import org.jenkinsci.lib.configprovider.model.Config; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; /** * @author Olivier Lamy * @since 1.426 */ public class SettingsProviderUtils { /** * utility method to retrieve Config of type (MavenSettingsProvider etc..) * @param settingsConfigId * @param type * @return Config */ public static Config findConfig(String settingsConfigId, Class<?> type) { ExtensionList<ConfigProvider> configProviders = ConfigProvider.all(); if (configProviders != null && configProviders.size() > 0) { for (ConfigProvider configProvider : configProviders) { if (type.isAssignableFrom( configProvider.getClass() ) ) { if ( configProvider.isResponsibleFor( settingsConfigId ) ) { return configProvider.getConfigById( settingsConfigId ); } } } } return null; } /** * * @param config * @param workspace */ public static FilePath copyConfigContentToFilePath(Config config, FilePath workspace) throws IOException, InterruptedException { File tmpContentFile = null; ByteArrayInputStream bs = null; try { tmpContentFile = File.createTempFile( "config", "tmp" ); FilePath filePath = new FilePath( workspace, tmpContentFile.getName() ); bs = new ByteArrayInputStream(config.content.getBytes()); filePath.copyFrom(bs); return filePath; } finally { FileUtils.deleteQuietly( tmpContentFile ); IOUtils.closeQuietly( bs ); } } /** * * @return a temp file which must be deleted after use */ public static File copyConfigContentToFile(Config config) throws IOException{ File tmpContentFile = File.createTempFile( "config", "tmp" ); FileUtils.writeStringToFile( tmpContentFile, config.content ); return tmpContentFile; } }
maven-plugin/src/main/java/hudson/maven/settings/SettingsProviderUtils.java
miss to add a file
maven-plugin/src/main/java/hudson/maven/settings/SettingsProviderUtils.java
miss to add a file
<ide><path>aven-plugin/src/main/java/hudson/maven/settings/SettingsProviderUtils.java <add>/* <add> * Copyright 20011 Talend, Olivier Lamy <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add>package hudson.maven.settings; <add> <add>import hudson.ExtensionList; <add>import hudson.FilePath; <add>import org.apache.commons.io.FileUtils; <add>import org.apache.commons.io.IOUtils; <add>import org.jenkinsci.lib.configprovider.ConfigProvider; <add>import org.jenkinsci.lib.configprovider.model.Config; <add> <add>import java.io.ByteArrayInputStream; <add>import java.io.File; <add>import java.io.IOException; <add> <add>/** <add> * @author Olivier Lamy <add> * @since 1.426 <add> */ <add>public class SettingsProviderUtils { <add> <add> /** <add> * utility method to retrieve Config of type (MavenSettingsProvider etc..) <add> * @param settingsConfigId <add> * @param type <add> * @return Config <add> */ <add> public static Config findConfig(String settingsConfigId, Class<?> type) { <add> ExtensionList<ConfigProvider> configProviders = ConfigProvider.all(); <add> if (configProviders != null && configProviders.size() > 0) { <add> for (ConfigProvider configProvider : configProviders) { <add> if (type.isAssignableFrom( configProvider.getClass() ) ) { <add> if ( configProvider.isResponsibleFor( settingsConfigId ) ) { <add> return configProvider.getConfigById( settingsConfigId ); <add> } <add> } <add> } <add> } <add> return null; <add> } <add> <add> /** <add> * <add> * @param config <add> * @param workspace <add> */ <add> public static FilePath copyConfigContentToFilePath(Config config, FilePath workspace) throws IOException, InterruptedException { <add> File tmpContentFile = null; <add> ByteArrayInputStream bs = null; <add> <add> try { <add> tmpContentFile = File.createTempFile( "config", "tmp" ); <add> FilePath filePath = new FilePath( workspace, tmpContentFile.getName() ); <add> bs = new ByteArrayInputStream(config.content.getBytes()); <add> filePath.copyFrom(bs); <add> return filePath; <add> } finally { <add> FileUtils.deleteQuietly( tmpContentFile ); <add> IOUtils.closeQuietly( bs ); <add> } <add> } <add> <add> /** <add> * <add> * @return a temp file which must be deleted after use <add> */ <add> public static File copyConfigContentToFile(Config config) throws IOException{ <add> <add> File tmpContentFile = File.createTempFile( "config", "tmp" ); <add> FileUtils.writeStringToFile( tmpContentFile, config.content ); <add> return tmpContentFile; <add> } <add>}
Java
apache-2.0
25990ea15f87d2d5c25c6a38325e3ebbf0653d22
0
jerome79/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,codeaudit/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,jerome79/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.master.config.impl; import static com.google.common.collect.Maps.newHashMap; import static com.opengamma.util.functional.Functional.functional; import java.util.Collection; import java.util.List; import java.util.Map; import com.opengamma.core.change.ChangeManager; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.config.impl.ConfigItem; import com.opengamma.id.ObjectId; import com.opengamma.id.UniqueId; import com.opengamma.id.VersionCorrection; import com.opengamma.master.VersionedSource; import com.opengamma.master.config.ConfigDocument; import com.opengamma.master.config.ConfigMaster; import com.opengamma.master.config.ConfigSearchRequest; import com.opengamma.master.config.ConfigSearchResult; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.PublicSPI; /** * A {@code ConfigSource} implemented using an underlying {@code ConfigMaster}. * <p> * The {@link ConfigSource} interface provides securities to the engine via a narrow API. * This class provides the source on top of a standard {@link ConfigMaster}. * <p> * This implementation supports the concept of fixing the version. * This allows the version to be set in the constructor, and applied automatically to the methods. * Some methods on {@code ConfigSource} specify their own version requirements, which are respected. */ @PublicSPI public class MasterConfigSource implements ConfigSource, VersionedSource { /** * The config master. */ private final ConfigMaster _configMaster; /** * The version-correction locator to search at, null to not override versions. */ private volatile VersionCorrection _versionCorrection; /** * Creates an instance with an underlying config master which does not override versions. * * @param configMaster the config master, not null */ public MasterConfigSource(final ConfigMaster configMaster) { this(configMaster, null); } /** * Creates an instance with an underlying config master optionally overriding the requested version. * * @param configMaster the config master, not null * @param versionCorrection the version-correction locator to search at, null to not override versions */ public MasterConfigSource(final ConfigMaster configMaster, VersionCorrection versionCorrection) { ArgumentChecker.notNull(configMaster, "configMaster"); _configMaster = configMaster; _versionCorrection = versionCorrection; } //------------------------------------------------------------------------- /** * Gets the underlying config master. * * @return the config master, not null */ public ConfigMaster getMaster() { return _configMaster; } /** * Gets the version-correction locator to search at. * * @return the version-correction locator to search at, null if not overriding versions */ public VersionCorrection getVersionCorrection() { return _versionCorrection; } /** * Gets the change manager. * * @return the change manager, not null */ @Override public ChangeManager changeManager() { return getMaster().changeManager(); } /** * Sets the version-correction locator to search at. * * @param versionCorrection the version-correction locator to search at, null to not override versions */ @Override public void setVersionCorrection(final VersionCorrection versionCorrection) { _versionCorrection = versionCorrection; } //------------------------------------------------------------------------- /** * Search for configuration elements using a request object. * * @param <R> the type of configuration element * @param request the request object with value for search fields, not null * @return all configuration elements matching the request, not null */ public <R> List<ConfigItem<R>> search(final ConfigSearchRequest<R> request) { ArgumentChecker.notNull(request, "request"); ArgumentChecker.notNull(request.getType(), "request.type"); request.setVersionCorrection(getVersionCorrection()); ConfigSearchResult<R> searchResult = getMaster().search(request); return searchResult.getValues(); } //------------------------------------------------------------------------- @Override @SuppressWarnings("unchecked") public <R> R getConfig(Class<R> clazz, UniqueId uniqueId) { ConfigItem<?> item = getMaster().get(uniqueId).getConfig(); if (clazz.isAssignableFrom(item.getType())) { return (R) item.getValue(); } else { return null; } } @Override public ConfigItem<?> get(ObjectId objectId, VersionCorrection versionCorrection) { return getMaster().get(objectId, versionCorrection).getConfig(); } @Override public ConfigItem<?> get(UniqueId uniqueId) { return getMaster().get(uniqueId).getConfig(); } @Override public <R> R getConfig(Class<R> clazz, String configName, VersionCorrection versionCorrection) { ConfigItem<R> result = get(clazz, configName, versionCorrection); if (result != null) { return result.getValue(); } return null; } @SuppressWarnings("unchecked") @Override public <R> R getConfig(Class<R> clazz, ObjectId objectId, VersionCorrection versionCorrection) { ConfigItem<?> item = getMaster().get(objectId, versionCorrection).getConfig(); if (clazz.isAssignableFrom(item.getType())) { return (R) item.getValue(); } else { return null; } } @Override public <R> ConfigItem<R> get(Class<R> clazz, String configName, VersionCorrection versionCorrection) { ConfigSearchRequest<R> searchRequest = new ConfigSearchRequest<R>(clazz); searchRequest.setName(configName); searchRequest.setVersionCorrection(versionCorrection); return functional(getMaster().search(searchRequest).getValues()).first(); } @Override public <R> Collection<ConfigItem<R>> getAll(Class<R> clazz, VersionCorrection versionCorrection) { ConfigSearchRequest<R> searchRequest = new ConfigSearchRequest<R>(clazz); searchRequest.setType(clazz); searchRequest.setVersionCorrection(versionCorrection); return getMaster().search(searchRequest).getValues(); } @Override public <R> R getLatestByName(Class<R> clazz, String name) { return getConfig(clazz, name, VersionCorrection.LATEST); } @Override public Map<UniqueId, ConfigItem<?>> get(Collection<UniqueId> uniqueIds) { Map<UniqueId, ConfigDocument> result = getMaster().get(uniqueIds); Map<UniqueId, ConfigItem<?>> map = newHashMap(); for (UniqueId uid : result.keySet()) { map.put(uid, result.get(uid).getConfig()); } return map; } //------------------------------------------------------------------------- @Override public String toString() { String str = "MasterConfigSource[" + getMaster(); if (getVersionCorrection() != null) { str += ",versionCorrection=" + getVersionCorrection(); } return str + "]"; } }
projects/OG-Master/src/main/java/com/opengamma/master/config/impl/MasterConfigSource.java
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.master.config.impl; import static com.google.common.collect.Maps.newHashMap; import static com.opengamma.util.functional.Functional.functional; import java.util.Collection; import java.util.List; import java.util.Map; import com.opengamma.core.change.BasicChangeManager; import com.opengamma.core.change.ChangeManager; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.config.impl.ConfigItem; import com.opengamma.id.ObjectId; import com.opengamma.id.UniqueId; import com.opengamma.id.VersionCorrection; import com.opengamma.master.VersionedSource; import com.opengamma.master.config.ConfigDocument; import com.opengamma.master.config.ConfigMaster; import com.opengamma.master.config.ConfigSearchRequest; import com.opengamma.master.config.ConfigSearchResult; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.PublicSPI; /** * A {@code ConfigSource} implemented using an underlying {@code ConfigMaster}. * <p> * The {@link ConfigSource} interface provides securities to the engine via a narrow API. * This class provides the source on top of a standard {@link ConfigMaster}. * <p> * This implementation supports the concept of fixing the version. * This allows the version to be set in the constructor, and applied automatically to the methods. * Some methods on {@code ConfigSource} specify their own version requirements, which are respected. */ @PublicSPI public class MasterConfigSource implements ConfigSource, VersionedSource { /** * The config master. */ private final ConfigMaster _configMaster; /** * The version-correction locator to search at, null to not override versions. */ private volatile VersionCorrection _versionCorrection; /** * The change manager. */ private ChangeManager _changeManager = new BasicChangeManager(); /** * Creates an instance with an underlying config master which does not override versions. * * @param configMaster the config master, not null */ public MasterConfigSource(final ConfigMaster configMaster) { this(configMaster, null); } /** * Creates an instance with an underlying config master optionally overriding the requested version. * * @param configMaster the config master, not null * @param versionCorrection the version-correction locator to search at, null to not override versions */ public MasterConfigSource(final ConfigMaster configMaster, VersionCorrection versionCorrection) { ArgumentChecker.notNull(configMaster, "configMaster"); _configMaster = configMaster; _versionCorrection = versionCorrection; } //------------------------------------------------------------------------- /** * Gets the underlying config master. * * @return the config master, not null */ public ConfigMaster getMaster() { return _configMaster; } /** * Gets the version-correction locator to search at. * * @return the version-correction locator to search at, null if not overriding versions */ public VersionCorrection getVersionCorrection() { return _versionCorrection; } /** * Gets the change manager. * * @return the change manager, not null */ @Override public ChangeManager changeManager() { return _changeManager; } /** * Sets the change manager. * * @param changeManager the change manager, not null */ public void setChangeManager(final ChangeManager changeManager) { ArgumentChecker.notNull(changeManager, "changeManager"); _changeManager = changeManager; } /** * Sets the version-correction locator to search at. * * @param versionCorrection the version-correction locator to search at, null to not override versions */ @Override public void setVersionCorrection(final VersionCorrection versionCorrection) { _versionCorrection = versionCorrection; } //------------------------------------------------------------------------- /** * Search for configuration elements using a request object. * * @param <R> the type of configuration element * @param request the request object with value for search fields, not null * @return all configuration elements matching the request, not null */ public <R> List<ConfigItem<R>> search(final ConfigSearchRequest<R> request) { ArgumentChecker.notNull(request, "request"); ArgumentChecker.notNull(request.getType(), "request.type"); request.setVersionCorrection(getVersionCorrection()); ConfigSearchResult<R> searchResult = getMaster().search(request); return searchResult.getValues(); } //------------------------------------------------------------------------- @Override @SuppressWarnings("unchecked") public <R> R getConfig(Class<R> clazz, UniqueId uniqueId) { ConfigItem<?> item = getMaster().get(uniqueId).getConfig(); if (clazz.isAssignableFrom(item.getType())) { return (R) item.getValue(); } else { return null; } } @Override public ConfigItem<?> get(ObjectId objectId, VersionCorrection versionCorrection) { return getMaster().get(objectId, versionCorrection).getConfig(); } @Override public ConfigItem<?> get(UniqueId uniqueId) { return getMaster().get(uniqueId).getConfig(); } @Override public <R> R getConfig(Class<R> clazz, String configName, VersionCorrection versionCorrection) { ConfigItem<R> result = get(clazz, configName, versionCorrection); if (result != null) { return result.getValue(); } return null; } @SuppressWarnings("unchecked") @Override public <R> R getConfig(Class<R> clazz, ObjectId objectId, VersionCorrection versionCorrection) { ConfigItem<?> item = getMaster().get(objectId, versionCorrection).getConfig(); if (clazz.isAssignableFrom(item.getType())) { return (R) item.getValue(); } else { return null; } } @Override public <R> ConfigItem<R> get(Class<R> clazz, String configName, VersionCorrection versionCorrection) { ConfigSearchRequest<R> searchRequest = new ConfigSearchRequest<R>(clazz); searchRequest.setName(configName); searchRequest.setVersionCorrection(versionCorrection); return functional(getMaster().search(searchRequest).getValues()).first(); } @Override public <R> Collection<ConfigItem<R>> getAll(Class<R> clazz, VersionCorrection versionCorrection) { ConfigSearchRequest<R> searchRequest = new ConfigSearchRequest<R>(clazz); searchRequest.setType(clazz); searchRequest.setVersionCorrection(versionCorrection); return getMaster().search(searchRequest).getValues(); } @Override public <R> R getLatestByName(Class<R> clazz, String name) { return getConfig(clazz, name, VersionCorrection.LATEST); } @Override public Map<UniqueId, ConfigItem<?>> get(Collection<UniqueId> uniqueIds) { Map<UniqueId, ConfigDocument> result = getMaster().get(uniqueIds); Map<UniqueId, ConfigItem<?>> map = newHashMap(); for (UniqueId uid : result.keySet()) { map.put(uid, result.get(uid).getConfig()); } return map; } //------------------------------------------------------------------------- @Override public String toString() { String str = "MasterConfigSource[" + getMaster(); if (getVersionCorrection() != null) { str += ",versionCorrection=" + getVersionCorrection(); } return str + "]"; } }
[PLAT-2783] Pass through to the change manager from the underlying config master.
projects/OG-Master/src/main/java/com/opengamma/master/config/impl/MasterConfigSource.java
[PLAT-2783] Pass through to the change manager from the underlying config master.
<ide><path>rojects/OG-Master/src/main/java/com/opengamma/master/config/impl/MasterConfigSource.java <ide> import java.util.List; <ide> import java.util.Map; <ide> <del>import com.opengamma.core.change.BasicChangeManager; <ide> import com.opengamma.core.change.ChangeManager; <ide> import com.opengamma.core.config.ConfigSource; <ide> import com.opengamma.core.config.impl.ConfigItem; <ide> private volatile VersionCorrection _versionCorrection; <ide> <ide> /** <del> * The change manager. <del> */ <del> private ChangeManager _changeManager = new BasicChangeManager(); <del> <del> /** <ide> * Creates an instance with an underlying config master which does not override versions. <ide> * <ide> * @param configMaster the config master, not null <ide> */ <ide> @Override <ide> public ChangeManager changeManager() { <del> return _changeManager; <del> } <del> <del> /** <del> * Sets the change manager. <del> * <del> * @param changeManager the change manager, not null <del> */ <del> public void setChangeManager(final ChangeManager changeManager) { <del> ArgumentChecker.notNull(changeManager, "changeManager"); <del> _changeManager = changeManager; <add> return getMaster().changeManager(); <ide> } <ide> <ide> /**
Java
bsd-3-clause
58636a0f65972582e50980a0baffe8121d448a80
0
xs2maverick/javacc-svn,javacc/javacc,javacc/javacc,javacc/javacc,amremam2004/javacc,pandaforks/javacc,amremam2004/javacc,pandaforks/javacc,pandaforks/javacc,xs2maverick/javacc-svn,amremam2004/javacc,xs2maverick/javacc-svn,pandaforks/javacc,xs2maverick/javacc-svn
// Copyright 2011 Google Inc. All Rights Reserved. // Author: [email protected] (Sreeni Viswanadha) package org.javacc.parser; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import static org.javacc.parser.JavaCCGlobals.*; /** * Generate the parser. */ public class ParseGenCPP extends ParseGen { public void start() throws MetaParseException { Token t = null; if (JavaCCErrors.get_error_count() != 0) throw new MetaParseException(); List tn = new ArrayList(toolNames); tn.add(toolName); switchToStaticsFile(); boolean implementsExists = false; switchToIncludeFile(); //standard includes genCodeLine("#include \"JavaCC.h\""); genCodeLine("#include \"CharStream.h\""); genCodeLine("#include \"Token.h\""); //genCodeLine("#include \"TokenMgrError.h\""); //genCodeLine("#include \"ParseException.h\""); genCodeLine("#include \"TokenManager.h\""); genCodeLine("#include \"" + cu_name + "TokenManager.h\""); if (Options.stringValue(Options.USEROPTION__CPP_PARSER_INCLUDES).length() > 0) { genCodeLine("#include \"" + Options.stringValue(Options.USEROPTION__CPP_PARSER_INCLUDES) + "\"\n"); } genCodeLine("#include \"" + cu_name + "Constants.h\""); if (jjtreeGenerated) { genCodeLine("#include \"JJT" + cu_name + "State.h\""); } genCodeLine("#include \"ErrorHandler.h\""); if (jjtreeGenerated) { genCodeLine("#include \"" + cu_name + "Tree.h\""); } if (Options.stringValue(Options.USEROPTION_CPP_NAMESPACE).length() > 0) { genCodeLine("namespace " + Options.stringValue("NAMESPACE_OPEN")); } genCodeLine(" struct JJCalls {"); genCodeLine(" int gen;"); genCodeLine(" int arg;"); genCodeLine(" Token* first;"); genCodeLine(" JJCalls* next;"); genCodeLine(" ~JJCalls() { if (next) delete next; }"); genCodeLine(" JJCalls() { next = NULL; arg = 0; gen = -1; first = NULL; }"); genCodeLine(" };"); genCodeLine(""); String superClass = Options.stringValue(Options.USEROPTION__PARSER_SUPER_CLASS); genClassStart("", cu_name, new String[]{}, superClass == null ? new String[0] : new String[] { "public " + superClass}); switchToMainFile(); if (cu_to_insertion_point_2.size() != 0) { printTokenSetup((Token)(cu_to_insertion_point_2.get(0))); for (Iterator it = cu_to_insertion_point_2.iterator(); it.hasNext();) { t = (Token)it.next(); printToken(t); } } switchToMainFile(); /*genCodeLine("typedef class _LookaheadSuccess { } *LookaheadSuccess; // Dummy class"); genCodeLine(" static LookaheadSuccess jj_ls = new _LookaheadSuccess();");*/ genCodeLine(""); genCodeLine(""); new ParseEngine().build(this); switchToIncludeFile(); genCodeLine(" public: TokenManager *token_source;"); genCodeLine(" public: CharStream *jj_input_stream;"); genCodeLine(" /** Current token. */"); genCodeLine(" public: Token *token;"); genCodeLine(" /** Next token. */"); genCodeLine(" public: Token *jj_nt;"); genCodeLine(" private: int jj_ntk;"); genCodeLine(" private: JJCalls jj_2_rtns[" + (jj2index + 1) + "];"); genCodeLine(" private: bool jj_rescan;"); genCodeLine(" private: int jj_gc;"); genCodeLine(" private: Token *jj_scanpos, *jj_lastpos;"); genCodeLine(" private: int jj_la;"); genCodeLine(" /** Whether we are looking ahead. */"); genCodeLine(" private: bool jj_lookingAhead;"); genCodeLine(" private: bool jj_semLA;"); genCodeLine(" private: int jj_gen;"); genCodeLine(" private: int jj_la1[" + (maskindex + 1) + "];"); genCodeLine(" private: ErrorHandler *errorHandler;"); genCodeLine(" private: bool errorHandlerCreated;"); genCodeLine(" protected: bool hasError;"); genCodeLine(" public: void setErrorHandler(ErrorHandler *eh) {"); genCodeLine(" if (errorHandlerCreated) delete errorHandler;"); genCodeLine(" errorHandler = eh;"); genCodeLine(" errorHandlerCreated = false;"); genCodeLine(" }"); int tokenMaskSize = (tokenCount-1)/32 + 1; if (Options.getErrorReporting() && tokenMaskSize > 0) { switchToStaticsFile(); for (int i = 0; i < tokenMaskSize; i++) { if (maskVals.size() > 0) { genCodeLine(" unsigned int jj_la1_" + i + "[] = {"); for (Iterator it = maskVals.iterator(); it.hasNext();) { int[] tokenMask = (int[])(it.next()); genCode("0x" + Integer.toHexString(tokenMask[i]) + ","); } genCodeLine("};"); } } } genCodeLine(""); genCodeLine(" /** Constructor with user supplied TokenManager. */"); switchToIncludeFile(); // TEMP genCodeLine(" Token *head; "); genCodeLine(" public: "); generateMethodDefHeader("", cu_name, cu_name + "(TokenManager *tm)"); if (superClass != null) { genCodeLine(" : " + superClass + "()"); } genCodeLine("{"); genCodeLine(" head = NULL;"); genCodeLine(" errorHandlerCreated = false;"); genCodeLine(" ReInit(tm);"); genCodeLine("}"); switchToIncludeFile(); genCodeLine(" public: virtual ~" + cu_name + "();"); switchToMainFile(); genCodeLine(" " + cu_name + "::~" +cu_name + "()"); genCodeLine("{"); genCodeLine(" clear();"); genCodeLine("}"); generateMethodDefHeader("void", cu_name, "ReInit(TokenManager *tm)"); genCodeLine("{"); genCodeLine(" clear();"); genCodeLine(" errorHandler = new ErrorHandler();"); genCodeLine(" errorHandlerCreated = true;"); genCodeLine(" hasError = false;"); genCodeLine(" token_source = tm;"); genCodeLine(" head = token = new Token();"); genCodeLine(" token->kind = 0;"); genCodeLine(" token->next = NULL;"); genCodeLine(" jj_lookingAhead = false;"); genCodeLine(" jj_rescan = false;"); genCodeLine(" jj_done = false;"); genCodeLine(" jj_scanpos = jj_lastpos = NULL;"); genCodeLine(" jj_gc = 0;"); genCodeLine(" jj_kind = -1;"); genCodeLine(" trace_indent = 0;"); genCodeLine(" trace_enabled = " + Options.getDebugParser() + ";"); if (Options.getCacheTokens()) { genCodeLine(" token->next = jj_nt = token_source->getNextToken();"); } else { genCodeLine(" jj_ntk = -1;"); } if (jjtreeGenerated) { genCodeLine(" jjtree.reset();"); } if (Options.getErrorReporting()) { genCodeLine(" jj_gen = 0;"); if (maskindex > 0) { genCodeLine(" for (int i = 0; i < " + maskindex + "; i++) jj_la1[i] = -1;"); } } genCodeLine(" }"); genCodeLine(""); //Add clear function for deconstructor and ReInit generateMethodDefHeader("void", cu_name, "clear()"); genCodeLine("{"); genCodeLine(" //Since token manager was generate from outside,"); genCodeLine(" //parser should not take care of deleting"); genCodeLine(" //if (token_source) delete token_source;"); genCodeLine(" if (head) {"); genCodeLine(" Token *next, *t = head;"); genCodeLine(" while (t) {"); genCodeLine(" next = t->next;"); genCodeLine(" delete t;"); genCodeLine(" t = next;"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" if (errorHandlerCreated) {"); genCodeLine(" delete errorHandler;"); genCodeLine(" }"); genCodeLine("}"); genCodeLine(""); generateMethodDefHeader("Token *", cu_name, "jj_consume_token(int kind)", "ParseException"); genCodeLine(" {"); if (Options.getCacheTokens()) { genCodeLine(" Token *oldToken = token;"); genCodeLine(" if ((token = jj_nt)->next != NULL) jj_nt = jj_nt->next;"); genCodeLine(" else jj_nt = jj_nt->next = token_source->getNextToken();"); } else { genCodeLine(" Token *oldToken;"); genCodeLine(" if ((oldToken = token)->next != NULL) token = token->next;"); genCodeLine(" else token = token->next = token_source->getNextToken();"); genCodeLine(" jj_ntk = -1;"); } genCodeLine(" if (token->kind == kind) {"); if (Options.getErrorReporting()) { genCodeLine(" jj_gen++;"); if (jj2index != 0) { genCodeLine(" if (++jj_gc > 100) {"); genCodeLine(" jj_gc = 0;"); genCodeLine(" for (int i = 0; i < " + jj2index + "; i++) {"); genCodeLine(" JJCalls *c = &jj_2_rtns[i];"); genCodeLine(" while (c != NULL) {"); genCodeLine(" if (c->gen < jj_gen) c->first = NULL;"); genCodeLine(" c = c->next;"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" }"); } } if (Options.getDebugParser()) { genCodeLine(" trace_token(token, \"\");"); } genCodeLine(" return token;"); genCodeLine(" }"); if (Options.getCacheTokens()) { genCodeLine(" jj_nt = token;"); } genCodeLine(" token = oldToken;"); if (Options.getErrorReporting()) { genCodeLine(" jj_kind = kind;"); } //genCodeLine(" throw generateParseException();"); genCodeLine(" JAVACC_STRING_TYPE image = kind >= 0 ? tokenImage[kind] : tokenImage[0];"); genCodeLine(" errorHandler->handleUnexpectedToken(kind, image.substr(1, image.size() - 2), getToken(1), this), hasError = true;"); genCodeLine(" return token;"); genCodeLine(" }"); genCodeLine(""); if (jj2index != 0) { switchToMainFile(); generateMethodDefHeader("bool ", cu_name, "jj_scan_token(int kind)"); genCodeLine("{"); genCodeLine(" if (jj_scanpos == jj_lastpos) {"); genCodeLine(" jj_la--;"); genCodeLine(" if (jj_scanpos->next == NULL) {"); genCodeLine(" jj_lastpos = jj_scanpos = jj_scanpos->next = token_source->getNextToken();"); genCodeLine(" } else {"); genCodeLine(" jj_lastpos = jj_scanpos = jj_scanpos->next;"); genCodeLine(" }"); genCodeLine(" } else {"); genCodeLine(" jj_scanpos = jj_scanpos->next;"); genCodeLine(" }"); if (Options.getErrorReporting()) { genCodeLine(" if (jj_rescan) {"); genCodeLine(" int i = 0; Token *tok = token;"); genCodeLine(" while (tok != NULL && tok != jj_scanpos) { i++; tok = tok->next; }"); genCodeLine(" if (tok != NULL) jj_add_error_token(kind, i);"); if (Options.getDebugLookahead()) { genCodeLine(" } else {"); genCodeLine(" trace_scan(jj_scanpos, kind);"); } genCodeLine(" }"); } else if (Options.getDebugLookahead()) { genCodeLine(" trace_scan(jj_scanpos, kind);"); } genCodeLine(" if (jj_scanpos->kind != kind) return true;"); //genCodeLine(" if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;"); genCodeLine(" if (jj_la == 0 && jj_scanpos == jj_lastpos) { return jj_done = true; }"); genCodeLine(" return false;"); genCodeLine(" }"); genCodeLine(""); } genCodeLine(""); genCodeLine("/** Get the next Token. */"); generateMethodDefHeader("Token *", cu_name, "getNextToken()"); genCodeLine("{"); if (Options.getCacheTokens()) { genCodeLine(" if ((token = jj_nt)->next != NULL) jj_nt = jj_nt->next;"); genCodeLine(" else jj_nt = jj_nt->next = token_source->getNextToken();"); } else { genCodeLine(" if (token->next != NULL) token = token->next;"); genCodeLine(" else token = token->next = token_source->getNextToken();"); genCodeLine(" jj_ntk = -1;"); } if (Options.getErrorReporting()) { genCodeLine(" jj_gen++;"); } if (Options.getDebugParser()) { genCodeLine(" trace_token(token, \" (in getNextToken)\");"); } genCodeLine(" return token;"); genCodeLine(" }"); genCodeLine(""); genCodeLine("/** Get the specific Token. */"); generateMethodDefHeader("Token *", cu_name, "getToken(int index)"); genCodeLine("{"); if (lookaheadNeeded) { genCodeLine(" Token *t = jj_lookingAhead ? jj_scanpos : token;"); } else { genCodeLine(" Token *t = token;"); } genCodeLine(" for (int i = 0; i < index; i++) {"); genCodeLine(" if (t->next != NULL) t = t->next;"); genCodeLine(" else t = t->next = token_source->getNextToken();"); genCodeLine(" }"); genCodeLine(" return t;"); genCodeLine(" }"); genCodeLine(""); if (!Options.getCacheTokens()) { generateMethodDefHeader("int", cu_name, "jj_ntk_f()"); genCodeLine("{"); genCodeLine(" if ((jj_nt=token->next) == NULL)"); genCodeLine(" return (jj_ntk = (token->next=token_source->getNextToken())->kind);"); genCodeLine(" else"); genCodeLine(" return (jj_ntk = jj_nt->kind);"); genCodeLine(" }"); genCodeLine(""); } switchToIncludeFile(); genCodeLine(" private: int jj_kind;"); if (Options.getErrorReporting()) { genCodeLine(" int **jj_expentries;"); genCodeLine(" int *jj_expentry;"); if (jj2index != 0) { switchToStaticsFile(); // For now we don't support ERROR_REPORTING in the C++ version. //genCodeLine(" static int *jj_lasttokens = new int[100];"); //genCodeLine(" static int jj_endpos;"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "jj_add_error_token(int kind, int pos)"); genCodeLine(" {"); // For now we don't support ERROR_REPORTING in the C++ version. //genCodeLine(" if (pos >= 100) return;"); //genCodeLine(" if (pos == jj_endpos + 1) {"); //genCodeLine(" jj_lasttokens[jj_endpos++] = kind;"); //genCodeLine(" } else if (jj_endpos != 0) {"); //genCodeLine(" jj_expentry = new int[jj_endpos];"); //genCodeLine(" for (int i = 0; i < jj_endpos; i++) {"); //genCodeLine(" jj_expentry[i] = jj_lasttokens[i];"); //genCodeLine(" }"); //genCodeLine(" jj_entries_loop: for (java.util.Iterator it = jj_expentries.iterator(); it.hasNext();) {"); //genCodeLine(" int[] oldentry = (int[])(it->next());"); //genCodeLine(" if (oldentry.length == jj_expentry.length) {"); //genCodeLine(" for (int i = 0; i < jj_expentry.length; i++) {"); //genCodeLine(" if (oldentry[i] != jj_expentry[i]) {"); //genCodeLine(" continue jj_entries_loop;"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" jj_expentries.add(jj_expentry);"); //genCodeLine(" break jj_entries_loop;"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;"); //genCodeLine(" }"); genCodeLine(" }"); } genCodeLine(""); genCodeLine(" /** Generate ParseException. */"); generateMethodDefHeader("protected: virtual void ", cu_name, "parseError()"); genCodeLine(" {"); genCodeLine(" fprintf(stderr, \"Parse error at: %d:%d, after token: %s encountered: %s\\n\", token->beginLine, token->beginColumn, addUnicodeEscapes(token->image).c_str(), addUnicodeEscapes(getToken(1)->image).c_str());"); genCodeLine(" }"); /*generateMethodDefHeader("ParseException", cu_name, "generateParseException()"); genCodeLine(" {"); //genCodeLine(" jj_expentries.clear();"); //genCodeLine(" bool[] la1tokens = new boolean[" + tokenCount + "];"); //genCodeLine(" if (jj_kind >= 0) {"); //genCodeLine(" la1tokens[jj_kind] = true;"); //genCodeLine(" jj_kind = -1;"); //genCodeLine(" }"); //genCodeLine(" for (int i = 0; i < " + maskindex + "; i++) {"); //genCodeLine(" if (jj_la1[i] == jj_gen) {"); //genCodeLine(" for (int j = 0; j < 32; j++) {"); //for (int i = 0; i < (tokenCount-1)/32 + 1; i++) { //genCodeLine(" if ((jj_la1_" + i + "[i] & (1<<j)) != 0) {"); //genCode(" la1tokens["); //if (i != 0) { //genCode((32*i) + "+"); //} //genCodeLine("j] = true;"); //genCodeLine(" }"); //} //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" for (int i = 0; i < " + tokenCount + "; i++) {"); //genCodeLine(" if (la1tokens[i]) {"); //genCodeLine(" jj_expentry = new int[1];"); //genCodeLine(" jj_expentry[0] = i;"); //genCodeLine(" jj_expentries.add(jj_expentry);"); //genCodeLine(" }"); //genCodeLine(" }"); //if (jj2index != 0) { //genCodeLine(" jj_endpos = 0;"); //genCodeLine(" jj_rescan_token();"); //genCodeLine(" jj_add_error_token(0, 0);"); //} //genCodeLine(" int exptokseq[][1] = new int[1];"); //genCodeLine(" for (int i = 0; i < jj_expentries.size(); i++) {"); //if (!Options.getGenerateGenerics()) //genCodeLine(" exptokseq[i] = (int[])jj_expentries.get(i);"); //else //genCodeLine(" exptokseq[i] = jj_expentries.get(i);"); //genCodeLine(" }"); genCodeLine(" return new _ParseException();");//token, NULL, tokenImage);"); genCodeLine(" }"); */ } else { genCodeLine(" /** Generate ParseException. */"); generateMethodDefHeader("protected: virtual void ", cu_name, "parseError()"); genCodeLine(" {"); genCodeLine(" fprintf(stderr, \"Parse error at: %d:%d, after token: %s encountered: %s\\n\", token->beginLine, token->beginColumn, addUnicodeEscapes(token->image).c_str(), addUnicodeEscapes(getToken(1)->image).c_str());"); genCodeLine(" }"); /*generateMethodDefHeader("ParseException", cu_name, "generateParseException()"); genCodeLine(" {"); genCodeLine(" Token *errortok = token->next;"); if (Options.getKeepLineColumn()) genCodeLine(" int line = errortok.beginLine, column = errortok.beginColumn;"); genCodeLine(" JAVACC_STRING_TYPE mess = (errortok->kind == 0) ? tokenImage[0] : errortok->image;"); if (Options.getKeepLineColumn()) genCodeLine(" return new _ParseException();");// + //"\"Parse error at line \" + line + \", column \" + column + \". " + //"Encountered: \" + mess);"); else genCodeLine(" return new _ParseException();");//\"Parse error at <unknown location>. " + //"Encountered: \" + mess);"); genCodeLine(" }");*/ } genCodeLine(""); switchToIncludeFile(); genCodeLine(" private: int trace_indent;"); genCodeLine(" private: bool trace_enabled;"); if (Options.getDebugParser()) { genCodeLine(""); genCodeLine("/** Enable tracing. */"); generateMethodDefHeader("public: virtual void", cu_name, "enable_tracing()"); genCodeLine(" {"); genCodeLine(" trace_enabled = true;"); genCodeLine(" }"); genCodeLine(""); genCodeLine("/** Disable tracing. */"); generateMethodDefHeader("public: virtual void", cu_name, "disable_tracing()"); genCodeLine(" {"); genCodeLine(" trace_enabled = false;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_call(const char *s)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Call: %s\\n\", s);"); genCodeLine(" }"); genCodeLine(" trace_indent = trace_indent + 2;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_return(const char *s)"); genCodeLine(" {"); genCodeLine(" trace_indent = trace_indent - 2;"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Return: %s\\n\", s);"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_token(Token *t, const char *where)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Consumed token: <kind: %d(%s), \\\"%s\\\"\", t->kind, addUnicodeEscapes(tokenImage[t->kind]).c_str(), addUnicodeEscapes(t->image).c_str());"); //genCodeLine(" if (t->kind != 0 && !tokenImage[t->kind].equals(\"\\\"\" + t->image + \"\\\"\")) {"); //genCodeLine(" System.out.print(\": \\\"\" + t->image + \"\\\"\");"); //genCodeLine(" }"); genCodeLine(" printf(\" at line %d column %d> %s\\n\", t->beginLine, t->beginColumn, where);"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_scan(Token *t1, int t2)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Visited token: <Kind: %d(%s), \\\"%s\\\"\", t1->kind, addUnicodeEscapes(tokenImage[t1->kind]).c_str(), addUnicodeEscapes(t1->image).c_str());"); //genCodeLine(" if (t1->kind != 0 && !tokenImage[t1->kind].equals(\"\\\"\" + t1->image + \"\\\"\")) {"); //genCodeLine(" System.out.print(\": \\\"\" + t1->image + \"\\\"\");"); //genCodeLine(" }"); genCodeLine(" printf(\" at line %d column %d>; Expected token: %s\\n\", t1->beginLine, t1->beginColumn, addUnicodeEscapes(tokenImage[t2]).c_str());"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); } else { genCodeLine(" /** Enable tracing. */"); generateMethodDefHeader("public: void", cu_name, "enable_tracing()"); genCodeLine(" {"); genCodeLine(" }"); genCodeLine(""); genCodeLine(" /** Disable tracing. */"); generateMethodDefHeader("public: void", cu_name, "disable_tracing()"); genCodeLine(" {"); genCodeLine(" }"); genCodeLine(""); } if (jj2index != 0 && Options.getErrorReporting()) { generateMethodDefHeader("void", cu_name, "jj_rescan_token()"); genCodeLine("{"); genCodeLine(" jj_rescan = true;"); genCodeLine(" for (int i = 0; i < " + jj2index + "; i++) {"); //genCodeLine(" try {"); genCodeLine(" JJCalls *p = &jj_2_rtns[i];"); genCodeLine(" do {"); genCodeLine(" if (p->gen > jj_gen) {"); genCodeLine(" jj_la = p->arg; jj_lastpos = jj_scanpos = p->first;"); genCodeLine(" switch (i) {"); for (int i = 0; i < jj2index; i++) { genCodeLine(" case " + i + ": jj_3_" + (i+1) + "(); break;"); } genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" p = p->next;"); genCodeLine(" } while (p != NULL);"); //genCodeLine(" } catch(LookaheadSuccess ls) { }"); genCodeLine(" }"); genCodeLine(" jj_rescan = false;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "jj_save(int index, int xla)"); genCodeLine("{"); genCodeLine(" JJCalls *p = &jj_2_rtns[index];"); genCodeLine(" while (p->gen > jj_gen) {"); genCodeLine(" if (p->next == NULL) { p = p->next = new JJCalls(); break; }"); genCodeLine(" p = p->next;"); genCodeLine(" }"); genCodeLine(" p->gen = jj_gen + xla - jj_la; p->first = token; p->arg = xla;"); genCodeLine(" }"); genCodeLine(""); } if (cu_from_insertion_point_2.size() != 0) { printTokenSetup((Token)(cu_from_insertion_point_2.get(0))); ccol = 1; for (Iterator it = cu_from_insertion_point_2.iterator(); it.hasNext();) { t = (Token)it.next(); printToken(t); } printTrailingComments(t); } genCodeLine(""); // in the include file close the class signature switchToIncludeFile(); // copy other stuff Token t1 = JavaCCGlobals.otherLanguageDeclTokenBeg; Token t2 = JavaCCGlobals.otherLanguageDeclTokenEnd; while(t1 != t2) { printToken(t1); t1 = t1.next; } genCodeLine("\n"); if (jjtreeGenerated) { genCodeLine("JJT" + cu_name + "State jjtree;"); genCodeLine("\n"); } genCodeLine("private: bool jj_done;"); genCodeLine(/*{*/ "\n};"); saveOutput(Options.getOutputDirectory() + File.separator + cu_name + getFileExtension(Options.getOutputLanguage())); } public static void reInit() { lookaheadNeeded = false; } }
src/main/java/org/javacc/parser/ParseGenCPP.java
// Copyright 2011 Google Inc. All Rights Reserved. // Author: [email protected] (Sreeni Viswanadha) package org.javacc.parser; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import static org.javacc.parser.JavaCCGlobals.*; /** * Generate the parser. */ public class ParseGenCPP extends ParseGen { public void start() throws MetaParseException { Token t = null; if (JavaCCErrors.get_error_count() != 0) throw new MetaParseException(); List tn = new ArrayList(toolNames); tn.add(toolName); switchToStaticsFile(); boolean implementsExists = false; switchToIncludeFile(); //standard includes genCodeLine("#include \"JavaCC.h\""); genCodeLine("#include \"CharStream.h\""); genCodeLine("#include \"Token.h\""); //genCodeLine("#include \"TokenMgrError.h\""); //genCodeLine("#include \"ParseException.h\""); genCodeLine("#include \"TokenManager.h\""); genCodeLine("#include \"" + cu_name + "TokenManager.h\""); if (Options.stringValue(Options.USEROPTION__CPP_PARSER_INCLUDES).length() > 0) { genCodeLine("#include \"" + Options.stringValue(Options.USEROPTION__CPP_PARSER_INCLUDES) + "\"\n"); } genCodeLine("#include \"" + cu_name + "Constants.h\""); if (jjtreeGenerated) { genCodeLine("#include \"JJT" + cu_name + "State.h\""); } genCodeLine("#include \"ErrorHandler.h\""); if (jjtreeGenerated) { genCodeLine("#include \"" + cu_name + "Tree.h\""); } if (Options.stringValue(Options.USEROPTION_CPP_NAMESPACE).length() > 0) { genCodeLine("namespace " + Options.stringValue("NAMESPACE_OPEN")); } genCodeLine(" struct JJCalls {"); genCodeLine(" int gen;"); genCodeLine(" Token *first;"); genCodeLine(" int arg;"); genCodeLine(" JJCalls *next;"); genCodeLine(" ~JJCalls() { if (next) delete next; }"); genCodeLine(" JJCalls() { next = NULL; arg = 0; gen = -1; first = NULL; }"); genCodeLine(" };"); genCodeLine(""); String superClass = Options.stringValue(Options.USEROPTION__PARSER_SUPER_CLASS); genClassStart("", cu_name, new String[]{}, superClass == null ? new String[0] : new String[] { "public " + superClass}); switchToMainFile(); if (cu_to_insertion_point_2.size() != 0) { printTokenSetup((Token)(cu_to_insertion_point_2.get(0))); for (Iterator it = cu_to_insertion_point_2.iterator(); it.hasNext();) { t = (Token)it.next(); printToken(t); } } switchToMainFile(); /*genCodeLine("typedef class _LookaheadSuccess { } *LookaheadSuccess; // Dummy class"); genCodeLine(" static LookaheadSuccess jj_ls = new _LookaheadSuccess();");*/ genCodeLine(""); genCodeLine(""); new ParseEngine().build(this); switchToIncludeFile(); genCodeLine(" public: TokenManager *token_source;"); genCodeLine(" public: CharStream *jj_input_stream;"); genCodeLine(" /** Current token. */"); genCodeLine(" public: Token *token;"); genCodeLine(" /** Next token. */"); genCodeLine(" public: Token *jj_nt;"); genCodeLine(" private: int jj_ntk;"); genCodeLine(" private: JJCalls jj_2_rtns[" + (jj2index + 1) + "];"); genCodeLine(" private: bool jj_rescan;"); genCodeLine(" private: int jj_gc;"); genCodeLine(" private: Token *jj_scanpos, *jj_lastpos;"); genCodeLine(" private: int jj_la;"); genCodeLine(" /** Whether we are looking ahead. */"); genCodeLine(" private: bool jj_lookingAhead;"); genCodeLine(" private: bool jj_semLA;"); genCodeLine(" private: int jj_gen;"); genCodeLine(" private: int jj_la1[" + (maskindex + 1) + "];"); genCodeLine(" private: ErrorHandler *errorHandler;"); genCodeLine(" private: bool errorHandlerCreated;"); genCodeLine(" protected: bool hasError;"); genCodeLine(" public: void setErrorHandler(ErrorHandler *eh) {"); genCodeLine(" if (errorHandlerCreated) delete errorHandler;"); genCodeLine(" errorHandler = eh;"); genCodeLine(" errorHandlerCreated = false;"); genCodeLine(" }"); int tokenMaskSize = (tokenCount-1)/32 + 1; if (Options.getErrorReporting() && tokenMaskSize > 0) { switchToStaticsFile(); for (int i = 0; i < tokenMaskSize; i++) { if (maskVals.size() > 0) { genCodeLine(" unsigned int jj_la1_" + i + "[] = {"); for (Iterator it = maskVals.iterator(); it.hasNext();) { int[] tokenMask = (int[])(it.next()); genCode("0x" + Integer.toHexString(tokenMask[i]) + ","); } genCodeLine("};"); } } } genCodeLine(""); genCodeLine(" /** Constructor with user supplied TokenManager. */"); switchToIncludeFile(); // TEMP genCodeLine(" Token *head; "); genCodeLine(" public: "); generateMethodDefHeader("", cu_name, cu_name + "(TokenManager *tm)"); if (superClass != null) { genCodeLine(" : " + superClass + "()"); } genCodeLine("{"); genCodeLine(" head = NULL;"); genCodeLine(" errorHandlerCreated = false;"); genCodeLine(" ReInit(tm);"); genCodeLine("}"); switchToIncludeFile(); genCodeLine(" public: virtual ~" + cu_name + "();"); switchToMainFile(); genCodeLine(" " + cu_name + "::~" +cu_name + "()"); genCodeLine("{"); genCodeLine(" clear();"); genCodeLine("}"); generateMethodDefHeader("void", cu_name, "ReInit(TokenManager *tm)"); genCodeLine("{"); genCodeLine(" clear();"); genCodeLine(" errorHandler = new ErrorHandler();"); genCodeLine(" errorHandlerCreated = true;"); genCodeLine(" hasError = false;"); genCodeLine(" token_source = tm;"); genCodeLine(" head = token = new Token();"); genCodeLine(" token->kind = 0;"); genCodeLine(" token->next = NULL;"); genCodeLine(" jj_lookingAhead = false;"); genCodeLine(" jj_rescan = false;"); genCodeLine(" jj_done = false;"); genCodeLine(" jj_scanpos = jj_lastpos = NULL;"); genCodeLine(" jj_gc = 0;"); genCodeLine(" jj_kind = -1;"); genCodeLine(" trace_indent = 0;"); genCodeLine(" trace_enabled = " + Options.getDebugParser() + ";"); if (Options.getCacheTokens()) { genCodeLine(" token->next = jj_nt = token_source->getNextToken();"); } else { genCodeLine(" jj_ntk = -1;"); } if (jjtreeGenerated) { genCodeLine(" jjtree.reset();"); } if (Options.getErrorReporting()) { genCodeLine(" jj_gen = 0;"); if (maskindex > 0) { genCodeLine(" for (int i = 0; i < " + maskindex + "; i++) jj_la1[i] = -1;"); } } genCodeLine(" }"); genCodeLine(""); //Add clear function for deconstructor and ReInit generateMethodDefHeader("void", cu_name, "clear()"); genCodeLine("{"); genCodeLine(" //Since token manager was generate from outside,"); genCodeLine(" //parser should not take care of deleting"); genCodeLine(" //if (token_source) delete token_source;"); genCodeLine(" if (head) {"); genCodeLine(" Token *next, *t = head;"); genCodeLine(" while (t) {"); genCodeLine(" next = t->next;"); genCodeLine(" delete t;"); genCodeLine(" t = next;"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" if (errorHandlerCreated) {"); genCodeLine(" delete errorHandler;"); genCodeLine(" }"); genCodeLine("}"); genCodeLine(""); generateMethodDefHeader("Token *", cu_name, "jj_consume_token(int kind)", "ParseException"); genCodeLine(" {"); if (Options.getCacheTokens()) { genCodeLine(" Token *oldToken = token;"); genCodeLine(" if ((token = jj_nt)->next != NULL) jj_nt = jj_nt->next;"); genCodeLine(" else jj_nt = jj_nt->next = token_source->getNextToken();"); } else { genCodeLine(" Token *oldToken;"); genCodeLine(" if ((oldToken = token)->next != NULL) token = token->next;"); genCodeLine(" else token = token->next = token_source->getNextToken();"); genCodeLine(" jj_ntk = -1;"); } genCodeLine(" if (token->kind == kind) {"); if (Options.getErrorReporting()) { genCodeLine(" jj_gen++;"); if (jj2index != 0) { genCodeLine(" if (++jj_gc > 100) {"); genCodeLine(" jj_gc = 0;"); genCodeLine(" for (int i = 0; i < " + jj2index + "; i++) {"); genCodeLine(" JJCalls *c = &jj_2_rtns[i];"); genCodeLine(" while (c != NULL) {"); genCodeLine(" if (c->gen < jj_gen) c->first = NULL;"); genCodeLine(" c = c->next;"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" }"); } } if (Options.getDebugParser()) { genCodeLine(" trace_token(token, \"\");"); } genCodeLine(" return token;"); genCodeLine(" }"); if (Options.getCacheTokens()) { genCodeLine(" jj_nt = token;"); } genCodeLine(" token = oldToken;"); if (Options.getErrorReporting()) { genCodeLine(" jj_kind = kind;"); } //genCodeLine(" throw generateParseException();"); genCodeLine(" JAVACC_STRING_TYPE image = kind >= 0 ? tokenImage[kind] : tokenImage[0];"); genCodeLine(" errorHandler->handleUnexpectedToken(kind, image.substr(1, image.size() - 2), getToken(1), this), hasError = true;"); genCodeLine(" return token;"); genCodeLine(" }"); genCodeLine(""); if (jj2index != 0) { switchToMainFile(); generateMethodDefHeader("bool ", cu_name, "jj_scan_token(int kind)"); genCodeLine("{"); genCodeLine(" if (jj_scanpos == jj_lastpos) {"); genCodeLine(" jj_la--;"); genCodeLine(" if (jj_scanpos->next == NULL) {"); genCodeLine(" jj_lastpos = jj_scanpos = jj_scanpos->next = token_source->getNextToken();"); genCodeLine(" } else {"); genCodeLine(" jj_lastpos = jj_scanpos = jj_scanpos->next;"); genCodeLine(" }"); genCodeLine(" } else {"); genCodeLine(" jj_scanpos = jj_scanpos->next;"); genCodeLine(" }"); if (Options.getErrorReporting()) { genCodeLine(" if (jj_rescan) {"); genCodeLine(" int i = 0; Token *tok = token;"); genCodeLine(" while (tok != NULL && tok != jj_scanpos) { i++; tok = tok->next; }"); genCodeLine(" if (tok != NULL) jj_add_error_token(kind, i);"); if (Options.getDebugLookahead()) { genCodeLine(" } else {"); genCodeLine(" trace_scan(jj_scanpos, kind);"); } genCodeLine(" }"); } else if (Options.getDebugLookahead()) { genCodeLine(" trace_scan(jj_scanpos, kind);"); } genCodeLine(" if (jj_scanpos->kind != kind) return true;"); //genCodeLine(" if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;"); genCodeLine(" if (jj_la == 0 && jj_scanpos == jj_lastpos) { return jj_done = true; }"); genCodeLine(" return false;"); genCodeLine(" }"); genCodeLine(""); } genCodeLine(""); genCodeLine("/** Get the next Token. */"); generateMethodDefHeader("Token *", cu_name, "getNextToken()"); genCodeLine("{"); if (Options.getCacheTokens()) { genCodeLine(" if ((token = jj_nt)->next != NULL) jj_nt = jj_nt->next;"); genCodeLine(" else jj_nt = jj_nt->next = token_source->getNextToken();"); } else { genCodeLine(" if (token->next != NULL) token = token->next;"); genCodeLine(" else token = token->next = token_source->getNextToken();"); genCodeLine(" jj_ntk = -1;"); } if (Options.getErrorReporting()) { genCodeLine(" jj_gen++;"); } if (Options.getDebugParser()) { genCodeLine(" trace_token(token, \" (in getNextToken)\");"); } genCodeLine(" return token;"); genCodeLine(" }"); genCodeLine(""); genCodeLine("/** Get the specific Token. */"); generateMethodDefHeader("Token *", cu_name, "getToken(int index)"); genCodeLine("{"); if (lookaheadNeeded) { genCodeLine(" Token *t = jj_lookingAhead ? jj_scanpos : token;"); } else { genCodeLine(" Token *t = token;"); } genCodeLine(" for (int i = 0; i < index; i++) {"); genCodeLine(" if (t->next != NULL) t = t->next;"); genCodeLine(" else t = t->next = token_source->getNextToken();"); genCodeLine(" }"); genCodeLine(" return t;"); genCodeLine(" }"); genCodeLine(""); if (!Options.getCacheTokens()) { generateMethodDefHeader("int", cu_name, "jj_ntk_f()"); genCodeLine("{"); genCodeLine(" if ((jj_nt=token->next) == NULL)"); genCodeLine(" return (jj_ntk = (token->next=token_source->getNextToken())->kind);"); genCodeLine(" else"); genCodeLine(" return (jj_ntk = jj_nt->kind);"); genCodeLine(" }"); genCodeLine(""); } switchToIncludeFile(); genCodeLine(" private: int jj_kind;"); if (Options.getErrorReporting()) { genCodeLine(" int **jj_expentries;"); genCodeLine(" int *jj_expentry;"); if (jj2index != 0) { switchToStaticsFile(); // For now we don't support ERROR_REPORTING in the C++ version. //genCodeLine(" static int *jj_lasttokens = new int[100];"); //genCodeLine(" static int jj_endpos;"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "jj_add_error_token(int kind, int pos)"); genCodeLine(" {"); // For now we don't support ERROR_REPORTING in the C++ version. //genCodeLine(" if (pos >= 100) return;"); //genCodeLine(" if (pos == jj_endpos + 1) {"); //genCodeLine(" jj_lasttokens[jj_endpos++] = kind;"); //genCodeLine(" } else if (jj_endpos != 0) {"); //genCodeLine(" jj_expentry = new int[jj_endpos];"); //genCodeLine(" for (int i = 0; i < jj_endpos; i++) {"); //genCodeLine(" jj_expentry[i] = jj_lasttokens[i];"); //genCodeLine(" }"); //genCodeLine(" jj_entries_loop: for (java.util.Iterator it = jj_expentries.iterator(); it.hasNext();) {"); //genCodeLine(" int[] oldentry = (int[])(it->next());"); //genCodeLine(" if (oldentry.length == jj_expentry.length) {"); //genCodeLine(" for (int i = 0; i < jj_expentry.length; i++) {"); //genCodeLine(" if (oldentry[i] != jj_expentry[i]) {"); //genCodeLine(" continue jj_entries_loop;"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" jj_expentries.add(jj_expentry);"); //genCodeLine(" break jj_entries_loop;"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;"); //genCodeLine(" }"); genCodeLine(" }"); } genCodeLine(""); genCodeLine(" /** Generate ParseException. */"); generateMethodDefHeader("protected: virtual void ", cu_name, "parseError()"); genCodeLine(" {"); genCodeLine(" fprintf(stderr, \"Parse error at: %d:%d, after token: %s encountered: %s\\n\", token->beginLine, token->beginColumn, addUnicodeEscapes(token->image).c_str(), addUnicodeEscapes(getToken(1)->image).c_str());"); genCodeLine(" }"); /*generateMethodDefHeader("ParseException", cu_name, "generateParseException()"); genCodeLine(" {"); //genCodeLine(" jj_expentries.clear();"); //genCodeLine(" bool[] la1tokens = new boolean[" + tokenCount + "];"); //genCodeLine(" if (jj_kind >= 0) {"); //genCodeLine(" la1tokens[jj_kind] = true;"); //genCodeLine(" jj_kind = -1;"); //genCodeLine(" }"); //genCodeLine(" for (int i = 0; i < " + maskindex + "; i++) {"); //genCodeLine(" if (jj_la1[i] == jj_gen) {"); //genCodeLine(" for (int j = 0; j < 32; j++) {"); //for (int i = 0; i < (tokenCount-1)/32 + 1; i++) { //genCodeLine(" if ((jj_la1_" + i + "[i] & (1<<j)) != 0) {"); //genCode(" la1tokens["); //if (i != 0) { //genCode((32*i) + "+"); //} //genCodeLine("j] = true;"); //genCodeLine(" }"); //} //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" }"); //genCodeLine(" for (int i = 0; i < " + tokenCount + "; i++) {"); //genCodeLine(" if (la1tokens[i]) {"); //genCodeLine(" jj_expentry = new int[1];"); //genCodeLine(" jj_expentry[0] = i;"); //genCodeLine(" jj_expentries.add(jj_expentry);"); //genCodeLine(" }"); //genCodeLine(" }"); //if (jj2index != 0) { //genCodeLine(" jj_endpos = 0;"); //genCodeLine(" jj_rescan_token();"); //genCodeLine(" jj_add_error_token(0, 0);"); //} //genCodeLine(" int exptokseq[][1] = new int[1];"); //genCodeLine(" for (int i = 0; i < jj_expentries.size(); i++) {"); //if (!Options.getGenerateGenerics()) //genCodeLine(" exptokseq[i] = (int[])jj_expentries.get(i);"); //else //genCodeLine(" exptokseq[i] = jj_expentries.get(i);"); //genCodeLine(" }"); genCodeLine(" return new _ParseException();");//token, NULL, tokenImage);"); genCodeLine(" }"); */ } else { genCodeLine(" /** Generate ParseException. */"); generateMethodDefHeader("protected: virtual void ", cu_name, "parseError()"); genCodeLine(" {"); genCodeLine(" fprintf(stderr, \"Parse error at: %d:%d, after token: %s encountered: %s\\n\", token->beginLine, token->beginColumn, addUnicodeEscapes(token->image).c_str(), addUnicodeEscapes(getToken(1)->image).c_str());"); genCodeLine(" }"); /*generateMethodDefHeader("ParseException", cu_name, "generateParseException()"); genCodeLine(" {"); genCodeLine(" Token *errortok = token->next;"); if (Options.getKeepLineColumn()) genCodeLine(" int line = errortok.beginLine, column = errortok.beginColumn;"); genCodeLine(" JAVACC_STRING_TYPE mess = (errortok->kind == 0) ? tokenImage[0] : errortok->image;"); if (Options.getKeepLineColumn()) genCodeLine(" return new _ParseException();");// + //"\"Parse error at line \" + line + \", column \" + column + \". " + //"Encountered: \" + mess);"); else genCodeLine(" return new _ParseException();");//\"Parse error at <unknown location>. " + //"Encountered: \" + mess);"); genCodeLine(" }");*/ } genCodeLine(""); switchToIncludeFile(); genCodeLine(" private: int trace_indent;"); genCodeLine(" private: bool trace_enabled;"); if (Options.getDebugParser()) { genCodeLine(""); genCodeLine("/** Enable tracing. */"); generateMethodDefHeader("public: virtual void", cu_name, "enable_tracing()"); genCodeLine(" {"); genCodeLine(" trace_enabled = true;"); genCodeLine(" }"); genCodeLine(""); genCodeLine("/** Disable tracing. */"); generateMethodDefHeader("public: virtual void", cu_name, "disable_tracing()"); genCodeLine(" {"); genCodeLine(" trace_enabled = false;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_call(const char *s)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Call: %s\\n\", s);"); genCodeLine(" }"); genCodeLine(" trace_indent = trace_indent + 2;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_return(const char *s)"); genCodeLine(" {"); genCodeLine(" trace_indent = trace_indent - 2;"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Return: %s\\n\", s);"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_token(Token *t, const char *where)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Consumed token: <kind: %d(%s), \\\"%s\\\"\", t->kind, addUnicodeEscapes(tokenImage[t->kind]).c_str(), addUnicodeEscapes(t->image).c_str());"); //genCodeLine(" if (t->kind != 0 && !tokenImage[t->kind].equals(\"\\\"\" + t->image + \"\\\"\")) {"); //genCodeLine(" System.out.print(\": \\\"\" + t->image + \"\\\"\");"); //genCodeLine(" }"); genCodeLine(" printf(\" at line %d column %d> %s\\n\", t->beginLine, t->beginColumn, where);"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "trace_scan(Token *t1, int t2)"); genCodeLine(" {"); genCodeLine(" if (trace_enabled) {"); genCodeLine(" for (int i = 0; i < trace_indent; i++) { printf(\" \"); }"); genCodeLine(" printf(\"Visited token: <Kind: %d(%s), \\\"%s\\\"\", t1->kind, addUnicodeEscapes(tokenImage[t1->kind]).c_str(), addUnicodeEscapes(t1->image).c_str());"); //genCodeLine(" if (t1->kind != 0 && !tokenImage[t1->kind].equals(\"\\\"\" + t1->image + \"\\\"\")) {"); //genCodeLine(" System.out.print(\": \\\"\" + t1->image + \"\\\"\");"); //genCodeLine(" }"); genCodeLine(" printf(\" at line %d column %d>; Expected token: %s\\n\", t1->beginLine, t1->beginColumn, addUnicodeEscapes(tokenImage[t2]).c_str());"); genCodeLine(" }"); genCodeLine(" }"); genCodeLine(""); } else { genCodeLine(" /** Enable tracing. */"); generateMethodDefHeader("public: void", cu_name, "enable_tracing()"); genCodeLine(" {"); genCodeLine(" }"); genCodeLine(""); genCodeLine(" /** Disable tracing. */"); generateMethodDefHeader("public: void", cu_name, "disable_tracing()"); genCodeLine(" {"); genCodeLine(" }"); genCodeLine(""); } if (jj2index != 0 && Options.getErrorReporting()) { generateMethodDefHeader("void", cu_name, "jj_rescan_token()"); genCodeLine("{"); genCodeLine(" jj_rescan = true;"); genCodeLine(" for (int i = 0; i < " + jj2index + "; i++) {"); //genCodeLine(" try {"); genCodeLine(" JJCalls *p = &jj_2_rtns[i];"); genCodeLine(" do {"); genCodeLine(" if (p->gen > jj_gen) {"); genCodeLine(" jj_la = p->arg; jj_lastpos = jj_scanpos = p->first;"); genCodeLine(" switch (i) {"); for (int i = 0; i < jj2index; i++) { genCodeLine(" case " + i + ": jj_3_" + (i+1) + "(); break;"); } genCodeLine(" }"); genCodeLine(" }"); genCodeLine(" p = p->next;"); genCodeLine(" } while (p != NULL);"); //genCodeLine(" } catch(LookaheadSuccess ls) { }"); genCodeLine(" }"); genCodeLine(" jj_rescan = false;"); genCodeLine(" }"); genCodeLine(""); generateMethodDefHeader("void", cu_name, "jj_save(int index, int xla)"); genCodeLine("{"); genCodeLine(" JJCalls *p = &jj_2_rtns[index];"); genCodeLine(" while (p->gen > jj_gen) {"); genCodeLine(" if (p->next == NULL) { p = p->next = new JJCalls(); break; }"); genCodeLine(" p = p->next;"); genCodeLine(" }"); genCodeLine(" p->gen = jj_gen + xla - jj_la; p->first = token; p->arg = xla;"); genCodeLine(" }"); genCodeLine(""); } if (cu_from_insertion_point_2.size() != 0) { printTokenSetup((Token)(cu_from_insertion_point_2.get(0))); ccol = 1; for (Iterator it = cu_from_insertion_point_2.iterator(); it.hasNext();) { t = (Token)it.next(); printToken(t); } printTrailingComments(t); } genCodeLine(""); // in the include file close the class signature switchToIncludeFile(); // copy other stuff Token t1 = JavaCCGlobals.otherLanguageDeclTokenBeg; Token t2 = JavaCCGlobals.otherLanguageDeclTokenEnd; while(t1 != t2) { printToken(t1); t1 = t1.next; } if (jjtreeGenerated) { genCodeLine("JJT" + cu_name + "State jjtree;"); } genCodeLine("private: bool jj_done;"); genCodeLine(/*{*/ "\n};"); saveOutput(Options.getOutputDirectory() + File.separator + cu_name + getFileExtension(Options.getOutputLanguage())); } public static void reInit() { lookaheadNeeded = false; } }
A small beautifying
src/main/java/org/javacc/parser/ParseGenCPP.java
A small beautifying
<ide><path>rc/main/java/org/javacc/parser/ParseGenCPP.java <ide> } <ide> <ide> genCodeLine(" struct JJCalls {"); <del> genCodeLine(" int gen;"); <del> genCodeLine(" Token *first;"); <del> genCodeLine(" int arg;"); <del> genCodeLine(" JJCalls *next;"); <add> genCodeLine(" int gen;"); <add> genCodeLine(" int arg;"); <add> genCodeLine(" Token* first;"); <add> genCodeLine(" JJCalls* next;"); <ide> genCodeLine(" ~JJCalls() { if (next) delete next; }"); <del> genCodeLine(" JJCalls() { next = NULL; arg = 0; gen = -1; first = NULL; }"); <add> genCodeLine(" JJCalls() { next = NULL; arg = 0; gen = -1; first = NULL; }"); <ide> genCodeLine(" };"); <ide> genCodeLine(""); <ide> <ide> printToken(t1); <ide> t1 = t1.next; <ide> } <del> <add> genCodeLine("\n"); <ide> if (jjtreeGenerated) { <ide> genCodeLine("JJT" + cu_name + "State jjtree;"); <add> genCodeLine("\n"); <ide> } <ide> genCodeLine("private: bool jj_done;"); <ide>
Java
apache-2.0
c2b20fb6a53808322994e2bf8fa9df839cfc18d9
0
Fiware/cloud.PaaS,hmunfru/fiware-paas,Fiware/cloud.PaaS,telefonicaid/fiware-paas,telefonicaid/fiware-paas,hmunfru/fiware-paas,hmunfru/fiware-paas,Fiware/cloud.PaaS,telefonicaid/fiware-paas
/** * (c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights Reserved.<br> * The copyright to the software program(s) is property of Telefonica I+D. The program(s) may be used and or copied only * with the express written consent of Telefonica I+D or in accordance with the terms and conditions stipulated in the * agreement/contract under which the program(s) have been supplied. */ package com.telefonica.euro_iaas.paasmanager.claudia.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource.Builder; import com.telefonica.euro_iaas.commons.dao.EntityNotFoundException; import com.telefonica.euro_iaas.paasmanager.claudia.FirewallingClient; import com.telefonica.euro_iaas.paasmanager.exception.InfrastructureException; import com.telefonica.euro_iaas.paasmanager.exception.OpenStackException; import com.telefonica.euro_iaas.paasmanager.model.ClaudiaData; import com.telefonica.euro_iaas.paasmanager.model.Rule; import com.telefonica.euro_iaas.paasmanager.model.SecurityGroup; import com.telefonica.euro_iaas.paasmanager.model.dto.PaasManagerUser; import com.telefonica.euro_iaas.paasmanager.util.SystemPropertiesProvider; /** * @author henar mu�oz */ public class OpenstackFirewallingClientImpl implements FirewallingClient { private SystemPropertiesProvider systemPropertiesProvider; private static Logger log = Logger.getLogger(OpenstackFirewallingClientImpl.class); /** * Deploy a rule in the security group. * @param claudiaData * @param rule * */ public String deployRule(ClaudiaData claudiaData, Rule rule) throws InfrastructureException { log.debug("Creating security rule " + rule.getFromPort()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-group-rules"; log.debug("actionUri: " + url); String payload = rule.toJSON(); log.debug(payload); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type( MediaType.APPLICATION_JSON).entity(payload); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.post(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); org.json.JSONObject network = new org.json.JSONObject(result); String id = network.getJSONObject("security_group_rule").getString("id"); log.debug("Operation ok result " + id); rule.setIdRule(id); return id; } else { log.error("Error to create a security rule " + result); throw new InfrastructureException( "Error to create a security rule " + result); } } catch (Exception e) { String errorMessage = "Error performing post on the resource: " + url + " with payload: " + payload + " " + e.getMessage(); log.error(errorMessage); e.printStackTrace(); throw new InfrastructureException(errorMessage); } } /** * Creating the security group in Openstack. * * @param claudiaData * @param securityGroup * @return * @throws InfrastructureException */ public String deploySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) throws InfrastructureException { log.debug("Creating security group " + securityGroup.getName()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups"; log.debug("actionUri: " + url); String payload = securityGroup.toJSON(); log.debug(payload); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).entity(payload); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.post(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); org.json.JSONObject network = new org.json.JSONObject(result); String id; id = network.getJSONObject("security_group").getString("id"); log.debug("Operation ok id " + id); return id; } else { log.error("Error to create a security group " + result); throw new InfrastructureException("Error to create a security group " + result); } } catch (Exception e) { String errorMessage = "Error performing post on the resource: " + url + " with payload: " + payload + " " + e.getMessage(); log.error(errorMessage); throw new InfrastructureException(errorMessage); } } /** * Destroy the rule in the security group. * @param claudiaData * @param rule */ public void destroyRule(ClaudiaData claudiaData, Rule rule) throws InfrastructureException { log.debug("Destroy security rule " + rule.getFromPort()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-group-rules/" + rule.getIdRule(); log.debug("actionUri: " + url); try { Client client = new Client(); log.debug("url: " + url); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.delete(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 202 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); } else { log.error("Error to delete a security rule " + rule.getIdRule() + ": " + result); throw new InfrastructureException("Error to delete a security rule " + rule.getIdRule() + ": " + result); } } catch (Exception e) { String errorMessage = "Error performing delete on the resource: " + url; e.printStackTrace(); throw new InfrastructureException(errorMessage); } } /** * It destroys a security group. */ public void destroySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) throws InfrastructureException { log.debug("Destroy security group " + securityGroup.getName()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups/" + securityGroup.getIdSecurityGroup(); log.debug("actionUri: " + url); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.delete(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 202 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); } else { log.error("Error to delete a security group " + securityGroup + " : " + result); throw new InfrastructureException("Error to delete a security group " + securityGroup + " : " + result); } } catch (Exception e) { String errorMessage = "Error performing delete on the resource: " + url; e.printStackTrace(); throw new InfrastructureException(errorMessage); } } /** * Converting from a string (list of secGrous in json) to a list of SecurityGroups. * * @param jsonSecGroups * @return */ private List<SecurityGroup> fromStringToSecGroups(JSONObject jsonSecGroups) { List<SecurityGroup> secGroups = new ArrayList<SecurityGroup>(); JSONArray jsonSecGroupsList = jsonSecGroups.getJSONArray("security_groups"); for (Object o : jsonSecGroupsList) { SecurityGroup secGroup = new SecurityGroup(); JSONObject jsonSecGroup = (JSONObject) o; secGroup.fromJson(jsonSecGroup); secGroups.add(secGroup); } return secGroups; } private Map<String, String> getHeaders(PaasManagerUser claudiaData) { Map<String, String> headers = new HashMap<String, String>(); headers.put("X-Auth-Token", claudiaData.getToken()); headers.put("X-Auth-Project-Id", claudiaData.getTenantId()); return headers; } /** * It loads all the security groups. */ public List<SecurityGroup> loadAllSecurityGroups(ClaudiaData claudiaData) throws OpenStackException { String url = systemPropertiesProvider.getProperty(SystemPropertiesProvider.URL_NOVA_PROPERTY) + systemPropertiesProvider.getProperty(SystemPropertiesProvider.VERSION_PROPERTY) + claudiaData.getVdc() + "/os-security-groups"; log.debug("actionUri: " + url); Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.get(ClientResponse.class); if (response.getStatus() != 200) { String message = "Error calling OpenStack to recover all secGroups. " + "Status " + response.getStatus(); throw new OpenStackException(message); } String stringAllSecurityGroup = response.getEntity(String.class); log.debug("Status " + response.getStatus()); JSONObject jsonNode = JSONObject.fromObject(stringAllSecurityGroup); List<SecurityGroup> securityGroups = fromStringToSecGroups(jsonNode); return securityGroups; } /** * Load a security group. */ public SecurityGroup loadSecurityGroup(ClaudiaData claudiaData, String securityGroupId) throws EntityNotFoundException { log.debug("Loading security group " + securityGroupId); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups/" + securityGroupId; log.debug("actionUri: " + url); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.get(ClientResponse.class); String stringSecurityGroup = response.getEntity(String.class); JSONObject jsonsecurityGroup = JSONObject.fromObject(stringSecurityGroup); String jsonSecGroup = jsonsecurityGroup.getString("security_group"); JSONObject jsonSecGroupRaw = JSONObject.fromObject(jsonSecGroup); log.debug("Status " + response.getStatus()); if (response.getStatus() == 404) { String errorMessage = "Error loading SecurityGroup : " + securityGroupId; log.info(errorMessage); throw new EntityNotFoundException(SecurityGroup.class, securityGroupId, errorMessage); } SecurityGroup secGroup = new SecurityGroup(); secGroup.fromJson(jsonSecGroupRaw); return secGroup; } catch (Exception e) { String errorMessage = "Error loading SecurityGroup : " + securityGroupId + " " + e.getMessage(); log.error(errorMessage); throw new EntityNotFoundException(SecurityGroup.class, securityGroupId, errorMessage); } } /** * @param systemPropertiesProvider */ public void setSystemPropertiesProvider(SystemPropertiesProvider systemPropertiesProvider) { this.systemPropertiesProvider = systemPropertiesProvider; } }
core/src/main/java/com/telefonica/euro_iaas/paasmanager/claudia/impl/OpenstackFirewallingClientImpl.java
/** * (c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights Reserved.<br> * The copyright to the software program(s) is property of Telefonica I+D. The program(s) may be used and or copied only * with the express written consent of Telefonica I+D or in accordance with the terms and conditions stipulated in the * agreement/contract under which the program(s) have been supplied. */ package com.telefonica.euro_iaas.paasmanager.claudia.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.log4j.Logger; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource.Builder; import com.telefonica.euro_iaas.commons.dao.EntityNotFoundException; import com.telefonica.euro_iaas.paasmanager.claudia.FirewallingClient; import com.telefonica.euro_iaas.paasmanager.exception.InfrastructureException; import com.telefonica.euro_iaas.paasmanager.exception.OpenStackException; import com.telefonica.euro_iaas.paasmanager.model.ClaudiaData; import com.telefonica.euro_iaas.paasmanager.model.Rule; import com.telefonica.euro_iaas.paasmanager.model.SecurityGroup; import com.telefonica.euro_iaas.paasmanager.model.dto.PaasManagerUser; import com.telefonica.euro_iaas.paasmanager.util.SystemPropertiesProvider; /** * @author henar mu�oz */ public class OpenstackFirewallingClientImpl implements FirewallingClient { private SystemPropertiesProvider systemPropertiesProvider; private static Logger log = Logger.getLogger(OpenstackFirewallingClientImpl.class); /** * Deploy a rule in the security group. * @param claudiaData * @param rule * */ public String deployRule(ClaudiaData claudiaData, Rule rule) throws InfrastructureException { log.debug("Creating security rule " + rule.getFromPort()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-group-rules"; log.debug("actionUri: " + url); String payload = rule.toJSON(); log.debug(payload); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type( MediaType.APPLICATION_JSON).entity(payload); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.post(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); org.json.JSONObject network = new org.json.JSONObject(result); String id = network.getJSONObject("security_group_rule").getString("id"); log.debug("Operation ok result " + id); rule.setIdRule(id); return id; } else { log.error("Error to create a security rule " + result); throw new InfrastructureException( "Error to create a security rule " + result); } } catch (Exception e) { String errorMessage = "Error performing post on the resource: " + url + " with payload: " + payload + " " + e.getMessage(); log.error(errorMessage); e.printStackTrace(); throw new InfrastructureException(errorMessage); } } /** * Creating the security group in Openstack. * * @param claudiaData * @param securityGroup * @return * @throws InfrastructureException */ public String deploySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) throws InfrastructureException { log.debug("Creating security group " + securityGroup.getName()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups"; log.debug("actionUri: " + url); String payload = securityGroup.toJSON(); log.debug(payload); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).entity(payload); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.post(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); org.json.JSONObject network = new org.json.JSONObject(result); String id; id = network.getJSONObject("security_group").getString("id"); log.debug("Operation ok id " + id); return id; } else { log.error("Error to create a security group " + result); throw new InfrastructureException("Error to create a security group " + result); } } catch (Exception e) { String errorMessage = "Error performing post on the resource: " + url + " with payload: " + payload + " " + e.getMessage(); log.error(errorMessage); throw new InfrastructureException(errorMessage); } } /** * Destroy the rule in the security group. * @param claudiaData * @param rule */ public void destroyRule(ClaudiaData claudiaData, Rule rule) throws InfrastructureException { log.debug("Destroy security rule " + rule.getFromPort()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-group-rules/" + rule.getIdRule(); log.debug("actionUri: " + url); try { Client client = new Client(); log.debug("url: " + url); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.delete(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 202 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); } else { log.error("Error to delete a security rule " + rule.getIdRule() + ": " + result); throw new InfrastructureException("Error to delete a security rule " + rule.getIdRule() + ": " + result); } } catch (Exception e) { String errorMessage = "Error performing delete on the resource: " + url; e.printStackTrace(); throw new InfrastructureException(errorMessage); } } public void destroySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) throws InfrastructureException { log.debug("Destroy security group " + securityGroup.getName()); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups/" + securityGroup.getIdSecurityGroup(); log.debug("actionUri: " + url); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.delete(ClientResponse.class); String result = response.getEntity(String.class); log.debug("Status " + response.getStatus()); if (response.getStatus() == 200 || response.getStatus() == 202 || response.getStatus() == 201 || response.getStatus() == 204) { log.debug("Operation ok result " + result); } else { log.error("Error to delete a security group " + securityGroup + " : " + result); throw new InfrastructureException("Error to delete a security group " + securityGroup + " : " + result); } } catch (Exception e) { String errorMessage = "Error performing delete on the resource: " + url; e.printStackTrace(); throw new InfrastructureException(errorMessage); } } /** * Converting from a string (list of secGrous in json) to a list of SecurityGroups * * @param jsonSecGroups * @return */ private List<SecurityGroup> fromStringToSecGroups(JSONObject jsonSecGroups) { List<SecurityGroup> secGroups = new ArrayList<SecurityGroup>(); JSONArray jsonSecGroupsList = jsonSecGroups.getJSONArray("security_groups"); for (Object o : jsonSecGroupsList) { SecurityGroup secGroup = new SecurityGroup(); JSONObject jsonSecGroup = (JSONObject) o; secGroup.fromJson(jsonSecGroup); secGroups.add(secGroup); } return secGroups; } private Map<String, String> getHeaders(PaasManagerUser claudiaData) { Map<String, String> headers = new HashMap<String, String>(); headers.put("X-Auth-Token", claudiaData.getToken()); headers.put("X-Auth-Project-Id", claudiaData.getTenantId()); return headers; } /* * (non-Javadoc) * @see * com.telefonica.euro_iaas.paasmanager.claudia.FirewallingClient#loadAllSecurityGroups(com.telefonica.euro_iaas * .paasmanager.model.ClaudiaData) */ public List<SecurityGroup> loadAllSecurityGroups(ClaudiaData claudiaData) throws OpenStackException { String url = systemPropertiesProvider.getProperty(SystemPropertiesProvider.URL_NOVA_PROPERTY) + systemPropertiesProvider.getProperty(SystemPropertiesProvider.VERSION_PROPERTY) + claudiaData.getVdc() + "/os-security-groups"; log.debug("actionUri: " + url); Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.get(ClientResponse.class); if (response.getStatus() != 200) { String message = "Error calling OpenStack to recover all secGroups. " + "Status " + response.getStatus(); throw new OpenStackException(message); } String stringAllSecurityGroup = response.getEntity(String.class); log.debug("Status " + response.getStatus()); JSONObject jsonNode = JSONObject.fromObject(stringAllSecurityGroup); List<SecurityGroup> securityGroups = fromStringToSecGroups(jsonNode); return securityGroups; } /** * Load a security group. */ public SecurityGroup loadSecurityGroup(ClaudiaData claudiaData, String securityGroupId) throws EntityNotFoundException { log.debug("Loading security group " + securityGroupId); String url = systemPropertiesProvider.getProperty("openstack.nova.url") + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() + "/os-security-groups/" + securityGroupId; log.debug("actionUri: " + url); try { Client client = new Client(); ClientResponse response = null; WebResource wr = client.resource(url); Builder builder = wr.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON); Map<String, String> header = getHeaders(claudiaData.getUser()); for (String key : header.keySet()) { builder = builder.header(key, header.get(key)); } response = builder.get(ClientResponse.class); String stringSecurityGroup = response.getEntity(String.class); JSONObject jsonsecurityGroup = JSONObject.fromObject(stringSecurityGroup); String jsonSecGroup = jsonsecurityGroup.getString("security_group"); JSONObject jsonSecGroupRaw = JSONObject.fromObject(jsonSecGroup); log.debug("Status " + response.getStatus()); if (response.getStatus() == 404) { String errorMessage = "Error loading SecurityGroup : " + securityGroupId; log.info(errorMessage); throw new EntityNotFoundException(SecurityGroup.class, securityGroupId, errorMessage); } SecurityGroup secGroup = new SecurityGroup(); secGroup.fromJson(jsonSecGroupRaw); return secGroup; } catch (Exception e) { String errorMessage = "Error loading SecurityGroup : " + securityGroupId + " " + e.getMessage(); log.error(errorMessage); throw new EntityNotFoundException(SecurityGroup.class, securityGroupId, errorMessage); } } /** * @param systemPropertiesProvider */ public void setSystemPropertiesProvider(SystemPropertiesProvider systemPropertiesProvider) { this.systemPropertiesProvider = systemPropertiesProvider; } }
style in openstackfirewall
core/src/main/java/com/telefonica/euro_iaas/paasmanager/claudia/impl/OpenstackFirewallingClientImpl.java
style in openstackfirewall
<ide><path>ore/src/main/java/com/telefonica/euro_iaas/paasmanager/claudia/impl/OpenstackFirewallingClientImpl.java <ide> public String deployRule(ClaudiaData claudiaData, Rule rule) throws InfrastructureException { <ide> log.debug("Creating security rule " + rule.getFromPort()); <ide> String url = systemPropertiesProvider.getProperty("openstack.nova.url") <del> + systemPropertiesProvider.getProperty("openstack.version") <del> + claudiaData.getVdc() + "/os-security-group-rules"; <add> + systemPropertiesProvider.getProperty("openstack.version") <add> + claudiaData.getVdc() + "/os-security-group-rules"; <ide> log.debug("actionUri: " + url); <ide> <ide> String payload = rule.toJSON(); <ide> * @throws InfrastructureException <ide> */ <ide> public String deploySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) <del> throws InfrastructureException { <add> throws InfrastructureException { <ide> <ide> log.debug("Creating security group " + securityGroup.getName()); <ide> String url = systemPropertiesProvider.getProperty("openstack.nova.url") <del> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <del> + "/os-security-groups"; <add> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <add> + "/os-security-groups"; <ide> log.debug("actionUri: " + url); <ide> <ide> String payload = securityGroup.toJSON(); <ide> } catch (Exception e) { <ide> <ide> String errorMessage = "Error performing post on the resource: " + url + " with payload: " + payload + " " <del> + e.getMessage(); <add> + e.getMessage(); <ide> log.error(errorMessage); <ide> <ide> throw new InfrastructureException(errorMessage); <ide> * @param rule <ide> */ <ide> public void destroyRule(ClaudiaData claudiaData, Rule rule) <del> throws InfrastructureException { <add> throws InfrastructureException { <ide> <ide> log.debug("Destroy security rule " + rule.getFromPort()); <ide> String url = systemPropertiesProvider.getProperty("openstack.nova.url") <del> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <del> + "/os-security-group-rules/" + rule.getIdRule(); <add> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <add> + "/os-security-group-rules/" + rule.getIdRule(); <ide> log.debug("actionUri: " + url); <ide> <ide> try { <ide> log.debug("Operation ok result " + result); <ide> } else { <ide> log.error("Error to delete a security rule " + rule.getIdRule() + ": " + result); <del> throw new InfrastructureException("Error to delete a security rule " + rule.getIdRule() + ": " + result); <add> throw new InfrastructureException("Error to delete a security rule " + rule.getIdRule() <add> + ": " + result); <ide> } <ide> <ide> } catch (Exception e) { <ide> <ide> } <ide> <add> /** <add> * It destroys a security group. <add> */ <ide> public void destroySecurityGroup(ClaudiaData claudiaData, SecurityGroup securityGroup) <del> throws InfrastructureException { <add> throws InfrastructureException { <ide> log.debug("Destroy security group " + securityGroup.getName()); <ide> String url = systemPropertiesProvider.getProperty("openstack.nova.url") <del> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <del> + "/os-security-groups/" + securityGroup.getIdSecurityGroup(); <add> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <add> + "/os-security-groups/" + securityGroup.getIdSecurityGroup(); <ide> log.debug("actionUri: " + url); <ide> <ide> try { <ide> } <ide> <ide> /** <del> * Converting from a string (list of secGrous in json) to a list of SecurityGroups <add> * Converting from a string (list of secGrous in json) to a list of SecurityGroups. <ide> * <ide> * @param jsonSecGroups <ide> * @return <ide> <ide> } <ide> <del> /* <del> * (non-Javadoc) <del> * @see <del> * com.telefonica.euro_iaas.paasmanager.claudia.FirewallingClient#loadAllSecurityGroups(com.telefonica.euro_iaas <del> * .paasmanager.model.ClaudiaData) <add> /** <add> * It loads all the security groups. <ide> */ <ide> public List<SecurityGroup> loadAllSecurityGroups(ClaudiaData claudiaData) throws OpenStackException { <ide> <ide> String url = systemPropertiesProvider.getProperty(SystemPropertiesProvider.URL_NOVA_PROPERTY) <del> + systemPropertiesProvider.getProperty(SystemPropertiesProvider.VERSION_PROPERTY) <del> + claudiaData.getVdc() + "/os-security-groups"; <add> + systemPropertiesProvider.getProperty(SystemPropertiesProvider.VERSION_PROPERTY) <add> + claudiaData.getVdc() + "/os-security-groups"; <ide> log.debug("actionUri: " + url); <ide> <ide> Client client = new Client(); <ide> <ide> <ide> public SecurityGroup loadSecurityGroup(ClaudiaData claudiaData, String securityGroupId) <del> throws EntityNotFoundException { <add> throws EntityNotFoundException { <ide> <ide> log.debug("Loading security group " + securityGroupId); <ide> String url = systemPropertiesProvider.getProperty("openstack.nova.url") <del> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <del> + "/os-security-groups/" + securityGroupId; <add> + systemPropertiesProvider.getProperty("openstack.version") + claudiaData.getVdc() <add> + "/os-security-groups/" + securityGroupId; <ide> log.debug("actionUri: " + url); <ide> <ide> try {
Java
mit
bfba5d620e8e72ab0efd2a6120c4d9ce17827584
0
CS2103AUG2016-F11-C1/main,CS2103AUG2016-F11-C1/main
package seedu.todo.guitests; import static org.junit.Assert.assertEquals; import java.time.LocalDateTime; import org.junit.Before; import org.junit.Test; import seedu.todo.commons.util.DateUtil; import seedu.todo.controllers.TagController; import seedu.todo.controllers.UntagController; import seedu.todo.controllers.concerns.Renderer; import seedu.todo.models.Event; import seedu.todo.models.Task; /** * @@author A0093907W */ public class TagUntagCommandTest extends GuiTest { private final LocalDateTime oneDayFromNow = LocalDateTime.now().plusDays(1); private final String oneDayFromNowString = DateUtil.formatDate(oneDayFromNow); private final String oneDayFromNowIsoString = DateUtil.formatIsoDate(oneDayFromNow); private final LocalDateTime twoDaysFromNow = LocalDateTime.now().plusDays(2); private final String twoDaysFromNowString = DateUtil.formatDate(twoDaysFromNow); private final String twoDaysFromNowIsoString = DateUtil.formatIsoDate(twoDaysFromNow); private final LocalDateTime oneDayToNow = LocalDateTime.now().minusDays(1); private final String oneDayToNowString = DateUtil.formatDate(oneDayToNow); private final String oneDayToNowIsoString = DateUtil.formatIsoDate(oneDayToNow); private String commandAdd1 = String.format("add task Buy KOI by \"%s 8pm\"", oneDayToNowString); private Task task1 = new Task(); private String commandAdd2 = String.format("add task Buy Milk by \"%s 9pm\"", oneDayFromNowString); private Task task2 = new Task(); private String commandAdd3 = String.format("add event Some Event from \"%s 4pm\" to \"%s 5pm\"", twoDaysFromNowString, twoDaysFromNowString); private Event event3 = new Event(); @Before public void initFixtures() { // Need to re-initialize these on each test because we are modifying // them on tags. task1 = new Task(); task2 = new Task(); event3 = new Event(); task1.setName("Buy KOI"); task1.setDueDate(DateUtil.parseDateTime( String.format("%s 20:00:00", oneDayToNowIsoString))); task2.setName("Buy Milk"); task2.setDueDate(DateUtil.parseDateTime( String.format("%s 21:00:00", oneDayFromNowIsoString))); event3.setName("Some Event"); event3.setStartDate(DateUtil.parseDateTime( String.format("%s 16:00:00", twoDaysFromNowIsoString))); event3.setEndDate(DateUtil.parseDateTime( String.format("%s 17:00:00", twoDaysFromNowIsoString))); console.runCommand("clear"); console.runCommand(commandAdd1); console.runCommand(commandAdd2); console.runCommand(commandAdd3); } @Test public void tag_task_success() { console.runCommand("tag 1 argh"); task1.addTag("argh"); assertTaskVisible(task1); } @Test public void tag_event_success() { console.runCommand("tag 3 zzz"); event3.addTag("zzz"); assertEventVisible(event3); } @Test public void untag_task_success() { console.runCommand("tag 1 bugs"); console.runCommand("untag 1 bugs"); assertTaskVisible(task1); } @Test public void untag_event_success() { console.runCommand("tag 3 errors"); console.runCommand("untag 3 errors"); assertEventVisible(event3); } @Test public void tag_missingIndex_fail() { console.runCommand("tag"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + TagController.MESSAGE_MISSING_INDEX_AND_TAG_NAME; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void tag_missingTag_fail() { console.runCommand("tag 1"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + TagController.MESSAGE_TAG_NAME_NOT_FOUND; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_missingIndex_fail() { console.runCommand("untag"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_MISSING_INDEX_AND_TAG_NAME; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_missingTag_fail() { console.runCommand("untag 1"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_TAG_NAME_NOT_FOUND; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_tagNotExist_fail() { console.runCommand("untag 1 sucks"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_TAG_NAME_DOES_NOT_EXIST; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void tag_taskWithinLimit_success() { for (int i = 0; i < Task.MAX_TAG_LIST_SIZE; i++) { String tag = String.format("zz%s", i + 1); console.runCommand(String.format("tag 1 %s", tag)); task1.addTag(tag); } assertTaskVisible(task1); assertEquals(TagController.MESSAGE_TAG_SUCCESS, console.getConsoleTextArea()); } @Test public void tag_taskExceedLimit_fail() { for (int i = 0; i < Task.MAX_TAG_LIST_SIZE + 1; i++) { String tag = String.format("zz%s", i + 1); console.runCommand(String.format("tag 1 %s", tag)); task1.addTag(tag); } assertTaskVisible(task1); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + TagController.MESSAGE_EXCEED_TAG_SIZE; assertEquals(consoleMessage, console.getConsoleTextArea()); } }
src/test/java/seedu/todo/guitests/TagUntagCommandTest.java
package seedu.todo.guitests; import static org.junit.Assert.assertEquals; import java.time.LocalDateTime; import org.junit.Before; import org.junit.Test; import seedu.todo.commons.util.DateUtil; import seedu.todo.controllers.TagController; import seedu.todo.controllers.UntagController; import seedu.todo.controllers.concerns.Renderer; import seedu.todo.models.Event; import seedu.todo.models.Task; /** * @@author A0093907W */ public class TagUntagCommandTest extends GuiTest { private final LocalDateTime oneDayFromNow = LocalDateTime.now().plusDays(1); private final String oneDayFromNowString = DateUtil.formatDate(oneDayFromNow); private final String oneDayFromNowIsoString = DateUtil.formatIsoDate(oneDayFromNow); private final LocalDateTime twoDaysFromNow = LocalDateTime.now().plusDays(2); private final String twoDaysFromNowString = DateUtil.formatDate(twoDaysFromNow); private final String twoDaysFromNowIsoString = DateUtil.formatIsoDate(twoDaysFromNow); private final LocalDateTime oneDayToNow = LocalDateTime.now().minusDays(1); private final String oneDayToNowString = DateUtil.formatDate(oneDayToNow); private final String oneDayToNowIsoString = DateUtil.formatIsoDate(oneDayToNow); private String commandAdd1 = String.format("add task Buy KOI by \"%s 8pm\"", oneDayToNowString); private Task task1 = new Task(); private String commandAdd2 = String.format("add task Buy Milk by \"%s 9pm\"", oneDayFromNowString); private Task task2 = new Task(); private String commandAdd3 = String.format("add event Some Event from \"%s 4pm\" to \"%s 5pm\"", twoDaysFromNowString, twoDaysFromNowString); private Event event3 = new Event(); @Before public void initFixtures() { // Need to re-initialize these on each test because we are modifying // them on tags. task1 = new Task(); task2 = new Task(); event3 = new Event(); task1.setName("Buy KOI"); task1.setDueDate(DateUtil.parseDateTime( String.format("%s 20:00:00", oneDayToNowIsoString))); task2.setName("Buy Milk"); task2.setDueDate(DateUtil.parseDateTime( String.format("%s 21:00:00", oneDayFromNowIsoString))); event3.setName("Some Event"); event3.setStartDate(DateUtil.parseDateTime( String.format("%s 16:00:00", twoDaysFromNowIsoString))); event3.setEndDate(DateUtil.parseDateTime( String.format("%s 17:00:00", twoDaysFromNowIsoString))); console.runCommand("clear"); console.runCommand(commandAdd1); console.runCommand(commandAdd2); console.runCommand(commandAdd3); } @Test public void tag_task_success() { console.runCommand("tag 1 argh"); task1.addTag("argh"); assertTaskVisible(task1); } @Test public void tag_event_success() { console.runCommand("tag 3 zzz"); event3.addTag("zzz"); assertEventVisible(event3); } @Test public void untag_task_success() { console.runCommand("tag 1 bugs"); console.runCommand("untag 1 bugs"); assertTaskVisible(task1); } @Test public void untag_event_success() { console.runCommand("tag 3 errors"); console.runCommand("untag 3 errors"); assertEventVisible(event3); } @Test public void tag_missingIndex_fail() { console.runCommand("tag"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + TagController.MESSAGE_MISSING_INDEX_AND_TAG_NAME; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void tag_missingTag_fail() { console.runCommand("tag 1"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + TagController.MESSAGE_TAG_NAME_NOT_FOUND; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_missingIndex_fail() { console.runCommand("untag"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_MISSING_INDEX_AND_TAG_NAME; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_missingTag_fail() { console.runCommand("untag 1"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_TAG_NAME_NOT_FOUND; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void untag_tagNotExist_fail() { console.runCommand("untag 1 sucks"); String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" + UntagController.MESSAGE_TAG_NAME_DOES_NOT_EXIST; assertEquals(consoleMessage, console.getConsoleTextArea()); } }
Test maximum limits
src/test/java/seedu/todo/guitests/TagUntagCommandTest.java
Test maximum limits
<ide><path>rc/test/java/seedu/todo/guitests/TagUntagCommandTest.java <ide> assertEquals(consoleMessage, console.getConsoleTextArea()); <ide> } <ide> <add> @Test <add> public void tag_taskWithinLimit_success() { <add> for (int i = 0; i < Task.MAX_TAG_LIST_SIZE; i++) { <add> String tag = String.format("zz%s", i + 1); <add> console.runCommand(String.format("tag 1 %s", tag)); <add> task1.addTag(tag); <add> } <add> assertTaskVisible(task1); <add> assertEquals(TagController.MESSAGE_TAG_SUCCESS, console.getConsoleTextArea()); <add> } <add> <add> @Test <add> public void tag_taskExceedLimit_fail() { <add> for (int i = 0; i < Task.MAX_TAG_LIST_SIZE + 1; i++) { <add> String tag = String.format("zz%s", i + 1); <add> console.runCommand(String.format("tag 1 %s", tag)); <add> task1.addTag(tag); <add> } <add> assertTaskVisible(task1); <add> String consoleMessage = Renderer.MESSAGE_DISAMBIGUATE + "\n\n" <add> + TagController.MESSAGE_EXCEED_TAG_SIZE; <add> assertEquals(consoleMessage, console.getConsoleTextArea()); <add> } <ide> }
JavaScript
mpl-2.0
fcc4c96280f84fffc5f608bf619235d2242e977d
0
slidewiki/user-service,slidewiki/user-service
/* Handles the requests by executing stuff and replying to the client. Uses promises to get stuff done. */ 'use strict'; const boom = require('boom'), //Boom gives us some predefined http codes and proper responses co = require('../common'), userCtrl = require('../database/user'), usergroupCtrl = require('../database/usergroup'), config = require('../configuration'), jwt = require('./jwt'), Joi = require('joi'), util = require('./util'), request = require('request'), PLATFORM_INFORMATION_URL = require('../configs/microservices').platform.uri + '', queueAPI = require('../queue/api.js'); module.exports = { register: (req, res) => { let user = { surname: util.parseAPIParameter(req.payload.surname), forename: util.parseAPIParameter(req.payload.forename), username: util.parseAPIParameter(req.payload.username).replace(/\s/g,''), email: util.parseAPIParameter(req.payload.email).toLowerCase(), password: co.hashPassword(util.parseAPIParameter(req.payload.password), config.SALT), frontendLanguage: util.parseAPIParameter(req.payload.language), country: '', picture: '', description: '', organization: util.parseAPIParameter(req.payload.organization), registered: (new Date()).toISOString(), providers: [], activate_secret: require('crypto').randomBytes(64).toString('hex'), authorised: false }; //check if username already exists return util.isIdentityAssigned(user.email, user.username) .then((result) => { console.log('identity already taken: ', user.email, user.username, result); if (result.assigned === false) { //Send email before creating the user return util.sendEMail(user.email, 'Your new account on SlideWiki', 'Dear '+user.forename+' '+user.surname+',\nHappy welcome to SlideWiki! You have registered your account with the username '+user.username+'. In order to activate your account please use the following link: <a href="https://'+req.info.host+'/user/activate/'+user.email+'/'+user.activate_secret+'">Activate Account</a>\nGreetings,\nThe SlideWiki team') .then(() => { return userCtrl.create(user) .then((result) => { // console.log('register: user create result: ', result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('registration failed because data is wrong: ', co.parseAjvValidationErrors(result))); } if (result.insertedCount === 1) { //success return res({ userid: result.insertedId, secret: user.activate_secret }); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error on creating a user:', error); res(boom.badImplementation('Error', error)); }); }) .catch((error) => { console.log('Error sending the email:', error); return res(boom.badImplementation('Error', error)); }); } else { let message = 'The username and email is already taken'; if (result.email === false) message = 'The username is already taken'; if (result.username === false) message = 'The email is already taken'; return res(boom.conflict(message)); } }) .catch((error) => { delete user.password; console.log('Error:', error, 'with user:', user); res(boom.badImplementation('Error', error)); }); }, activateUser: (req, res) => { const email = util.parseAPIParameter(req.params.email), secret = util.parseAPIParameter(req.params.secret); const query = { email: email, activate_secret: secret, authorised: false }; console.log('trying to activate ', email); return userCtrl.partlyUpdate(query, { $set: { authorised: true } }) .then((result) => { // console.log(result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res() .redirect(PLATFORM_INFORMATION_URL) .temporary(true); } return res(boom.forbidden('Wrong credentials were used')); }) .catch((error) => { console.log('Error:', error); return res(boom.badImplementation()); }); }, login: (req, res) => { const query = { email: req.payload.email.toLowerCase(), password: co.hashPassword(decodeURI(req.payload.password), config.SALT) }; console.log('try logging in with email', query.email); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((result) => { switch (result.length) { case 0: res(boom.notFound('The credentials are wrong', '{"email":"", "password": ""}')); break; case 1: console.log('login: user object:', result[0]._id, result[0].username, result[0].registered); //TODO: call authorization service for OAuth2 token if (result[0].deactivated === true) { res(boom.locked('This user is deactivated.')); break; } //check if authorised if (result[0].authorised === false) { res(boom.locked('User is not authorised yet.')); break; } //check if SPAM if (result[0].suspended === true) { res(boom.forbidden('The user is marked as SPAM.')); break; } res({ userid: result[0]._id, username: result[0].username, picture: result[0].picture, access_token: 'dummy', expires_in: 0 }) .header(config.JWT.HEADER, jwt.createToken({ userid: result[0]._id, username: result[0].username, isReviewer: result[0].isReviewer })); break; default: res(boom.badImplementation('Found multiple users')); break; } }) .catch((error) => { console.log('Error: ', error); res(boom.badImplementation(error)); }); }, getUser: (req, res) => { //check if the request comes from the right user (have the right JWT data) const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot get detailed information about another user')); } return userCtrl.read(util.parseStringToInteger(req.params.id)) .then((user) => { //console.log('getUser: got user:', user); if (user !== undefined && user !== null && user.username !== undefined) { if (user.deactivated === true) { return res(boom.locked('This user is deactivated.')); } //get groups of a user return usergroupCtrl.readGroupsOfUser(req.params.id) .then((array) => { user.groups = array; return res(prepareDetailedUserData(user)); }); } else { return res(boom.notFound()); } }) .catch((error) => { console.log('Error while getting user with id '+req.params.id+':', error); res(boom.notFound('Wrong user id', error)); }); }, //add attribute "deactivated" to user document deleteUser: (req, res) => { let userid = util.parseStringToInteger(req.params.id); //check if the user which should be deleted have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot delete another user')); } const findQuery = { _id: userid }; const updateQuery = { $set: { deactivated: true } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { // console.log('deleteUser: delete with', userid, 'results in', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } res(boom.notFound('Deletion failed - no matched id')); }) .catch((error) => { console.log('Error while deleting user with id '+userid+':', error); return res(boom.badImplementation('Deletion failed', error)); }); }, //User profile updateUserPasswd: (req, res) => { let oldPassword = co.hashPassword(req.payload.oldPassword, config.SALT); let newPassword = co.hashPassword(req.payload.newPassword, config.SALT); const user__id = util.parseStringToInteger(req.params.id); //check if the user which should be updated have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot change the password of another user')); } //check if old password is correct return userCtrl.find({ _id: user__id, password: oldPassword }) .then((cursor) => cursor.count()) .then((count) => { switch (count) { case 0: res(boom.notFound('There is no user with this Id and password')); break; case 1: const findQuery = { _id: user__id, password: oldPassword }, updateQuery = { $set: { password: newPassword } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { console.log('handler: updateUserPasswd:', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error while updating password of user with id '+user__id+':', error); res(boom.badImplementation('Update password failed', error)); }); break; default: //should not happen console.log('BIG PROBLEM: multiple users in the database have the same id and password!'); res(boom.badImplementation('Found multiple users')); break; } }) .catch((error) => { console.log('Error while updating password of user with id '+user__id+':', error); return res(boom.badImplementation('Update password failed', error)); }); }, updateUserProfile: (req, res) => { let email = req.payload.email.toLowerCase(); let user = req.payload; user.email = email; user._id = util.parseStringToInteger(req.params.id); //check if the user which should be updated have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot change the user profile of another user')); } let updateCall = function() { const findQuery = { _id: user._id }, updateQuery = { $set: { email: email, username: util.parseAPIParameter(req.payload.username), surname: util.parseAPIParameter(req.payload.surname), forename: util.parseAPIParameter(req.payload.forename), frontendLanguage: util.parseAPIParameter(req.payload.language), country: util.parseAPIParameter(req.payload.country), picture: util.parseAPIParameter(req.payload.picture), description: util.parseAPIParameter(req.payload.description), organization: util.parseAPIParameter(req.payload.organization) } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } console.log('Update query failed with query and result:', updateQuery, result.result); return res(boom.badImplementation()); }) .catch((error) => { console.log('Update query failed with query and error:', updateQuery, error); return res(boom.notFound('Profile update failed', error)); }); }; //find user and check if username has changed return userCtrl.find({_id: user._id}) .then((cursor) => cursor.project({username: 1, email: 1})) .then((cursor2) => cursor2.next()) .then((document) => { // console.log('handler: updateUserProfile: got user as document', document); if (document === null) return res(boom.notFound('No user with the given id')); const oldUsername = document.username, oldEMail = document.email; if (decodeURI(req.payload.username).toLowerCase() !== oldUsername.toLowerCase()) { return res(boom.notAcceptable('It is impossible to change the username!')); } if (email === oldEMail) { return updateCall(); } else { //check if email already exists return isEMailAlreadyTaken(email) .then((isTaken) => { if (isTaken === false) { return updateCall(); } else { return res(boom.conflict('The email is already taken')); } }); } }) .catch((error1) => { console.log('handler: updateUserProfile: Error while getting user', error1, 'the user is:', user); const error = boom.badImplementation('Unknown error'); error.output.payload.custom = error1; return res(error); }); }, getPublicUser: (req, res) => { let identifier = decodeURI(req.params.identifier).replace(/\s/g,''); let query = {}; //validate identifier if its an integer or a username const integerSchema = Joi.number().integer(); const validationResult = Joi.validate(identifier, integerSchema); if (validationResult.error === null) { query._id = validationResult.value; } else { // console.log('no integer try reading as username'); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(identifier, schema); if (valid.error === null) { query.username = valid.value; } else { console.log('username is invalid:', identifier, valid.error); return res(boom.notFound()); } } // check for static user first let staticUser = userCtrl.findStaticUser(query); if (staticUser) { return res(preparePublicUserData(staticUser)); } //if no static user and username is given then use regex case insensitive if (query.username) query.username = new RegExp('^' + query.username + '$', 'i'); console.log(query); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { console.log('handler: getPublicUser: ', query, array); if (array.length === 0) return res(boom.notFound()); if (array.length > 1) return res(boom.badImplementation()); if (array[0].deactivated === true) { return res(boom.locked('This user is deactivated.')); } //check if authorised if (array[0].authorised === false) { return res(boom.locked('User is not authorised yet.')); } //check if SPAM if (array[0].suspended === true) { return res(boom.forbidden('The user is marked as SPAM.')); } res(preparePublicUserData(array[0])); }) .catch((error) => { console.log('handler: getPublicUser: Error', error); res(boom.notFound('Wrong user identifier?', error)); }); }, checkUsername: (req, res) => { // console.log(req.params); let username = decodeURI(req.params.username).replace(/\s/g,''); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(username, schema); if (valid.error === null) { username = valid.value; } else { console.log('username is invalid:', username, valid.error); return res({taken: true, alsoTaken: []}); } return userCtrl.find({ username: new RegExp('^' + username + '$', 'i') }) .then((cursor) => cursor.count()) .then((count) => { //console.log('checkUsername: username:', username, ' cursor.count():', count); // init this here because we may have to include a static user name let staticUserNames = []; if (count === 0) { // also check if it's in static users let staticUser = userCtrl.findStaticUserByName(username); if (staticUser) { staticUserNames.push(staticUser.username); } else { // not found as before return res({taken: false, alsoTaken: []}); } } const query = { username: new RegExp(username + '*', 'i') }; return userCtrl.find(query) .then((cursor1) => cursor1.project({username: 1})) .then((cursor2) => cursor2.limit(40)) .then((cursor3) => cursor3.toArray()) .then((array) => { //console.log('handler: checkUsername: similar usernames', array); let alreadyTaken = array.reduce((prev, curr) => { prev.push(curr.username); return prev; }, staticUserNames); return res({taken: true, alsoTaken: alreadyTaken}); }); }) .catch((error) => { console.log('handler: checkUsername: error', error); res(boom.badImplementation(error)); }); }, searchUser: (req, res) => { let username = decodeURI(req.params.username).replace(/\s/g,''); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(username, schema); if (valid.error === null) { username = valid.value + '+'; } else { if (username === '') { //search random users - a* matches everyone username = 'a*'; } else { console.log('username is invalid:', username, valid.error); return res({success: false, results: []}); } } const query = { username: new RegExp(username, 'i'), deactivated: { $not: { $eq: true } }, suspended: { $not: { $eq: true } }, authorised: { $not: { $eq: false } } }; if (username === undefined || username === null || username === '') { query.username = new RegExp('\w*', 'i'); } console.log('query:', query); return userCtrl.find(query) .then((cursor1) => cursor1.project({username: 1, _id: 1, picture: 1, country: 1, organization: 1})) .then((cursor2) => cursor2.limit(8)) .then((cursor3) => cursor3.toArray()) .then((array) => { // console.log('handler: searchUser: similar usernames', array); let data = array.reduce((prev, curr) => { let description = curr.username; if (curr.organization) description = description + ', ' + curr.organization; if (curr.country) description = description + ' (' + curr.country + ')'; prev.push({ name: description, value: encodeURIComponent(JSON.stringify({ userid: curr._id, picture: curr.picture, country: curr.country, organization: curr.organization, username: curr.username })) }); return prev; }, []); return res({success: true, results: data}); }) .catch((error) => { console.log('handler: searchUser: error', error, 'with query:', query); res({success: false, results: []}); }); }, checkEmail: (req, res) => { const email = decodeURI(req.params.email).replace(/\s/g,'').toLowerCase(); return userCtrl.find({ email: email }) .then((cursor) => cursor.count()) .then((count) => { //console.log('checkEmail: username:', username, ' cursor.count():', count); if (count === 0) { return res({taken: false}); } return res({taken: true}); }) .catch((error) => { console.log('handler: checkEmail: error', error); res(boom.badImplementation(error)); }); }, resetPassword: (req, res) => { const email = req.payload.email.toLowerCase(); const APIKey = req.payload.APIKey; const salt = req.payload.salt; if (APIKey !== config.SMTP.APIKey) { return res(boom.forbidden('Wrong APIKey was used')); } return isEMailAlreadyTaken(email) .then((isTaken) => { console.log('resetPassword: email taken:', isTaken); if (!isTaken) { return res(boom.notFound('EMail adress is not taken.')); } const newPassword = require('crypto').randomBytes(9).toString('hex'); /* The password is hashed one time at the client site (inner hash and optional) and one time at server-side. As we currently only have one salt, it must be the same for slidewiki-platform and the user-service. In case this is splitted, the user-service must know both salts in order to be able to generate a valid password for resetPassword.*/ let hashedPassword = co.hashPassword(newPassword, config.SALT); if (salt && salt.length > 0) hashedPassword = co.hashPassword(co.hashPassword(newPassword, salt), config.SALT); console.log('resetPassword: email is in use thus we connect to the SMTP server'); let connectionPromise = util.sendEMail(email, 'Password reset on SlideWiki', 'Dear SlideWiki user, \nA request has been made to reset your password. Your new password is: ' + newPassword + ' Please login with this password and then go to My Settings>Accounts to change it. Passwords should have 8 characters or more. \nThanks, The SlideWiki team'); return connectionPromise .then((data) => { console.log('connectionPromise returned', data); //change password in the database const findQuery = { email: email }; const updateQuery = { $set: { password: hashedPassword } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { console.log('handler: resetPassword:', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(data.message); } return res(boom.badImplementation()); }) .catch((error) => { res(boom.notFound('Update of user password failed', error)); }); }) .catch((error) => { console.log('Error:', error); return res(boom.badImplementation(error)); }); }); }, deleteUsergroup: (req, res) => { //first check if user is creator return usergroupCtrl.read(req.params.groupid) .then((document) => { if (document === undefined || document === null) { return res(boom.notFound()); } let creator = document.creator.userid || document.creator; if (creator !== req.auth.credentials.userid) { return res(boom.unauthorized()); } //now delete return usergroupCtrl.delete(req.params.groupid) .then((result) => { // console.log('deleteUsergroup: deleted', result.result); if (result.result.ok !== 1) { return res(boom.badImplementation()); } if (result.result.n !== 1) { return res(boom.notFound()); } if (document.members.length < 1) return res(); //notify users let promises = []; document.members.forEach((member) => { promises.push(notifiyUser({ id: creator, name: document.creator.username || 'Group leader' }, member.userid, 'left', document, true)); }); return Promise.all(promises).then(() => { return res(); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(); }); }); }) .catch((error) => { console.log('error while reading or deleting the usergroup '+req.params.groupid+':', error); res(boom.badImplementation(error)); }); }, createOrUpdateUsergroup: (req, res) => { const userid = req.auth.credentials.userid; let group = req.payload; group.creator = { userid: userid, username: req.auth.credentials.username }; let referenceDateTime = util.parseAPIParameter(req.payload.referenceDateTime) || (new Date()).toISOString(); delete group.referenceDateTime; group.description = util.parseAPIParameter(group.description); group.name = util.parseAPIParameter(group.name); group.timestamp = util.parseAPIParameter(group.timestamp); if (group.timestamp === undefined || group.timestamp === null || group.timestamp === '') group.timestamp = referenceDateTime; if (group.isActive !== false) group.isActive = true; if (group.members === undefined || group.members === null || group.members.length < 0) group.members = []; //add joined attribute if not given group.members = group.members.reduce((array, user) => { if (user.joined === undefined || user.joined === '') user.joined = referenceDateTime; array.push(user); return array; }, []); if (group.id === undefined || group.id === null) { //create console.log('create group', group.name); return usergroupCtrl.create(group) .then((result) => { // console.log('createOrUpdateUsergroup: created group', result.result || result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('Wrong data: ', co.parseAjvValidationErrors(result))); } if (result.insertedCount === 1) { //success group.id = result.insertedId; if (group.members.length < 1) return res(group); //notify users console.log('Notify '+group.members.length+' users...'); let promises = []; group.members.forEach((member) => { promises.push(notifiyUser({ id: group.creator.userid || group.creator, name: group.creator.username || 'Group leader' }, member.userid, 'joined', group, true)); }); return Promise.all(promises).then(() => { return res(group); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(group); }); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error while creating group:', error, group); res(boom.badImplementation(error)); }); } else if (group.id < 1) { //error return res(boom.badData()); } else { //update console.log('update group', group.id); //first check if user is creator return usergroupCtrl.read(group.id) .then((document) => { if (document === undefined || document === null) { return res(boom.notFound()); } let dCreator = document.creator.userid || document.creator; if (dCreator !== group.creator.userid) { return res(boom.unauthorized()); } //some attribute should be unchangeable group.timestamp = document.timestamp; group._id = document._id; return usergroupCtrl.update(group) .then((result) => { // console.log('createOrUpdateUsergroup: updated group', result.result || result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('Wrong data: ', co.parseAjvValidationErrors(result))); } if (result.result.ok === 1) { if (group.members.length < 1 && document.members.length < 1) return res(group); //notify users let wasUserAMember = (userid) => { let result = false; document.members.forEach((member) => { if (member.userid === userid) result = true; }); return result; }; let wasUserDeleted = (userid) => { let result = true; group.members.forEach((member) => { if (member.userid === userid) result = false; }); return result; }; let promises = []; group.members.forEach((member) => { if (!wasUserAMember(member.userid)) promises.push(notifiyUser({ id: group.creator.userid, name: group.creator.username || 'Group leader' }, member.userid, 'joined', group, true)); }); document.members.forEach((member) => { if (wasUserDeleted(member.userid)) promises.push(notifiyUser({ id: dCreator, name: document.creator.username || 'Group leader' }, member.userid, 'left', document, true)); }); console.log('Notify '+promises.length+' users...'); return Promise.all(promises).then(() => { return res(group); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(group); }); } console.log('Failed updating group '+group._id+' and got result:', result.result, group); return res(boom.badImplementation()); }); }) .catch((error) => { console.log('Error while reading group '+group.id+':', error); res(boom.badImplementation(error)); }); } }, getUsergroups: (req, res) => { if (req.payload === undefined || req.payload.length < 1) return res(boom.badData()); let selectors = req.payload.reduce((q, element) => { q.push({_id: element}); return q; }, []); let query = { $or: selectors }; console.log('getUsergroups:', query); return usergroupCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { if (array === undefined || array === null || array.length < 1) { return res([]); } let enrichedGroups_promises = array.reduce((prev, curr) => { prev.push(enrichGroupMembers(curr)); return prev; }, []); return Promise.all(enrichedGroups_promises) .then((enrichedGroups) => { return res(enrichedGroups); }); }) .catch((error) => { console.log('Error while reading groups:', error); res(boom.badImplementation(error)); }); }, leaveUsergroup: (req, res) => { return usergroupCtrl.partlyUpdate({ _id: req.params.groupid }, { $pull: { members: { userid: req.auth.credentials.userid } } }). then((result) => { console.log('leaveUsergroup: ', result.result); if (result.result.ok !== 1) return res(boom.notFound()); if (result.result.nModified !== 1) return res(boom.unauthorized()); return res(); }); }, // getUserdata: (req, res) => { return usergroupCtrl.readGroupsOfUser(req.auth.credentials.userid) .then((array) => { if (array === undefined || array === null) return res(boom.notFound()); return res({ id: req.auth.credentials.userid, username: req.auth.credentials.username, groups: array }); }) .catch((error) => { console.log('getUserdata('+req.auth.credentials.userid+') error:', error); res(boom.notFound('Wrong user id', error)); }); }, getUsers: (req, res) => { if (req.payload === undefined || req.payload.length < 1) return res(boom.badData()); // keep initial result for static users let staticUsers = userCtrl.findStaticUsersByIds(req.payload); let selectors = req.payload.reduce((q, element) => { q.push({_id: element}); return q; }, []); let query = { $or: selectors }; console.log('getUsers:', query); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { if (array === undefined || array === null || array.length < 1) { return res(staticUsers); } let publicUsers = array.reduce((array, user) => { array.push(preparePublicUserData(user)); return array; }, staticUsers); return res(publicUsers); }); }, getReviewableUsers: (req, res) => { let query = { authorised: { $not: { $eq: false } }, deactivated: { $not: { $eq: true } }, reviewed: { $not: { $eq: true } } }; return userCtrl.find(query) .then((cursor) => cursor.project({_id: 1, registered: 1, username: 1})) .then((cursor2) => cursor2.toArray()) .then((array) => { if (array.length < 1) return res([]); // console.log('filter users', array.length); let startTime = (new Date('2017-07-19')).getTime(); let userids = array.reduce((arr, curr) => { if ((new Date(curr.registered)).getTime() > startTime) arr.push(curr._id); return arr; }, []); if (userids.length < 1) return res([]); //now call service const options = { url: require('../configs/microservices').deck.uri + '/deckOwners?user=' + userids.reduce((a, b) => {let r = a === '' ? b : a + ',' + b; return r;}, ''), method: 'GET', json: true, body: { userids: userids } }; function callback(error, response, body) { // console.log('getReviewableUsers: ', error, response.statusCode, body); if (!error && (response.statusCode === 200)) { let result = body.reduce((arr, curr) => { curr.decks = curr.decksCount; curr.userid = curr._id; curr.username = array.find((u) => {return u._id === curr.userid;}).username; arr.push(curr); return arr; }, []); return res(result); } else { console.log('Error', (response) ? response.statusCode : undefined, error, body); return res([]); } } // console.log('now calling the service'); if (process.env.NODE_ENV === 'test') { callback(null, {statusCode: 200}, userids.reduce((arr, curr) => {arr.push({_id: curr, decksCount: 3}); return arr;}, [])); } else request(options, callback); }) .catch((error) => { console.log('Error', error); res([]); }); }, suspendUser: (req, res) => { return reviewUser(req, res, true); }, approveUser: (req, res) => { return reviewUser(req, res, false); }, getNextReviewableUser: (req, res) => { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); console.log('getNextReviewableUser'); return queueAPI.get() .then((user) => { console.log('got user', user); return res() .redirect(PLATFORM_INFORMATION_URL + '/Sfn87Pfew9Af09aM/user/' + user.username) .temporary(true); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation()); }); }, addToQueue: (req, res) => { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); const reviewerid = req.auth.credentials.userid; const userid = req.params.id; return userCtrl.read(userid) .then((user) => { if (!user) return res(boom.notFound()); if (user.deactivated || user.authorised === false) return res(boom.locked()); if (user.reviewed || user.suspended) return res(boom.forbidden()); return queueAPI.getAll() .then((users) => { if (users.findIndex((u) => {return u.userid === user._id;}) !== -1) return res();//user is already in queue let queueUser = queueAPI.getEmptyElement(); queueUser.userid = user._id; queueUser.username = user.username; queueUser.decks = req.query.decks || 0; queueUser.addedByReviewer = reviewerid; return queueAPI.add(queueUser) .then((success) => { success ? res() : res(boom.badImplementation()); return; }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); } }; function reviewUser(req, res, suspended) { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); const reviewerid = req.auth.credentials.userid; const userid = req.params.id; let query = { _id: userid, authorised: { $not: { $eq: false } }, deactivated: { $not: { $eq: true } }, reviewed: { $not: { $eq: true } } }; let update = { $set: { reviewed: true, suspended: suspended, lastReviewDoneBy: reviewerid } }; return userCtrl.partlyUpdate(query, update) .then((result) => { if (result.result.ok === 1 && result.result.n === 1) { //found user and got updated if (!suspended) return res(); //now archive all the decks of the user const options = { url: require('../configs/microservices').deck.uri + '/alldecks/'+userid, method: 'GET', json: true }; function callback(error, response, body) { console.log('alldecks: ', (response) ? response.statusCode : undefined, error, body); if (!error && (response.statusCode === 200)) { //now archive all decks (one request per deck) let promises = body.reduce((arr, curr) => { arr.push(archiveDeck(userid, curr._id)); return arr; }, []); return Promise.all(promises) .then(() => { return res(); }) .catch((error) => { console.log('Error', error); return res(); }); } else { console.log('Error', (response) ? response.statusCode : undefined, error, body); return res(); } } if (process.env.NODE_ENV === 'test') { callback(null, {statusCode: 200}, []); } else request(options, callback); } else return res(boom.notFound()); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation()); }); } function archiveDeck(userid, deckid) { let myPromise = new Promise((resolve, reject) => { const options = { url: require('../configs/microservices').deck.uri + '/decktree/'+deckid+'/archive', method: 'POST', json: true, body: { user: userid } }; function callback(error, response, body) { console.log('archiveDeck: ', (response) ? response.statusCode : undefined, error, body); if (!error && (response.statusCode === 200)) { resolve(); } else { console.log('Error', (response) ? response.statusCode : undefined, error, body); return reject(error); } } if (process.env.NODE_ENV === 'test') { callback(null, {statusCode: 200}, null); } else request(options, callback); }); return myPromise; } function isUsernameAlreadyTaken(username) { let myPromise = new Promise((resolve, reject) => { return userCtrl.find({ username: username }) .then((cursor) => cursor.count()) .then((count) => { console.log('isUsernameAlreadyTaken: cursor.count():', count); if (count > 0) { resolve(true); } else { resolve(false); } }) .catch((error) => { reject(error); }); }); return myPromise; } function isEMailAlreadyTaken(email) { let myPromise = new Promise((resolve, reject) => { return userCtrl.find({ email: email }) .then((cursor) => cursor.count()) .then((count) => { console.log('isEMailAlreadyTaken: cursor.count():', count); if (count > 0) { resolve(true); } else { resolve(false); } }) .catch((error) => { reject(error); }); }); return myPromise; } //Remove attributes of the user data object which should not be transmitted function prepareDetailedUserData(user) { const hiddenKeys = ['password']; let minimizedUser = {}; let key; for (key in user) { const found = hiddenKeys.find((hiddenKey) => { if (key === hiddenKey) return true; return false; }); if (found === undefined) { minimizedUser[key] = user[key]; } } //map attributes for better API minimizedUser.language = minimizedUser.frontendLanguage; minimizedUser.frontendLanguage = undefined; //add data for social provider stuff minimizedUser.hasPassword = true; if (user.password === undefined || user.password === null || user.password === '') minimizedUser.hasPassword = false; minimizedUser.providers = (user.providers || []).reduce((prev, cur) => { if (prev.indexOf(cur.provider) === -1) { prev.push(cur.provider); return prev; } return prev; }, []); return minimizedUser; } //Remove attributes of the user data object which should not be transmitted for the user profile function preparePublicUserData(user) { const shownKeys = ['_id', 'username', 'organization', 'picture', 'description', 'country']; let minimizedUser = {}; let key; for (key in user) { const found = shownKeys.find((shownkey) => { if (key === shownkey) return true; return false; }); if (found !== undefined) { minimizedUser[key] = user[key]; } } return minimizedUser; } function notifiyUser(actor, receiver, type, group, isActiveAction = false) { let promise = new Promise((resolve, reject) => { let message = actor.name + ': Has ' + type + ' the group ' + group.name; if (isActiveAction) message = 'You ' + type + ' the group ' + group.name; const options = { url: require('../configs/microservices').activities.uri + '/activity/new', method: 'POST', json: true, body: { activity_type: type, user_id: actor.id.toString(), content_id: group._id.toString(), content_kind: 'group', content_name: message, content_owner_id: receiver.toString() } }; function callback(error, response, body) { // console.log('notifiyUser: ', error, response.statusCode, body); if (!error && (response.statusCode === 200)) { return resolve(body); } else { return reject(error); } } request(options, callback); }); return promise; } //Uses userids of creator and members in order to add username and picture function enrichGroupMembers(group) { let userids = group.members.reduce((prev, curr) => { prev.push(curr.userid); return prev; }, []); userids.push(group.creator.userid); console.log('enrichGroupMembers: group, userids', group, userids); let query = { _id: { $in: userids } }; return userCtrl.find(query) .then((cursor) => cursor.project({_id: 1, username: 1, picture: 1, country: 1, organization: 1})) .then((cursor2) => cursor2.toArray()) .then((array) => { array = array.reduce((prev, curr) => { if (curr._id) { curr.userid = curr._id; delete curr._id; } prev.push(curr); return prev; }, []); let creator = array.filter((user) => { return user.userid === group.creator.userid; }); let members = array.filter((user) => { return user.userid !== group.creator.userid; }); console.log('enrichGroupMembers: got creator and users (amount)', {id: creator[0]._id, name: creator[0].username, email: creator[0].email}, members.concat(group.members).length); //add joined attribute to members members = (members.concat(group.members)).reduce((prev, curr) => { if (prev[curr.userid] === undefined) prev[curr.userid] = {}; if (curr.joined === undefined) { prev[curr.userid].userid = curr.userid; prev[curr.userid].username = curr.username; prev[curr.userid].picture = curr.picture; prev[curr.userid].country = curr.country; prev[curr.userid].organization = curr.organization; } else prev[curr.userid].joined = curr.joined; return prev; }, {}); members = Object.keys(members).map((key) => { return members[key]; }).filter((member) => {return member.joined && member.userid && member.username;}); group.creator = creator[0]; group.members = members; console.log('enrichGroupMembers: got new members (after reading from database, adding joined attribute and cleanup), amount:', members.length); return group; }); }
application/controllers/handler.js
/* Handles the requests by executing stuff and replying to the client. Uses promises to get stuff done. */ 'use strict'; const boom = require('boom'), //Boom gives us some predefined http codes and proper responses co = require('../common'), userCtrl = require('../database/user'), usergroupCtrl = require('../database/usergroup'), config = require('../configuration'), jwt = require('./jwt'), Joi = require('joi'), util = require('./util'), request = require('request'), PLATFORM_INFORMATION_URL = require('../configs/microservices').platform.uri + '', queueAPI = require('../queue/api.js'); module.exports = { register: (req, res) => { let user = { surname: util.parseAPIParameter(req.payload.surname), forename: util.parseAPIParameter(req.payload.forename), username: util.parseAPIParameter(req.payload.username).replace(/\s/g,''), email: util.parseAPIParameter(req.payload.email).toLowerCase(), password: co.hashPassword(util.parseAPIParameter(req.payload.password), config.SALT), frontendLanguage: util.parseAPIParameter(req.payload.language), country: '', picture: '', description: '', organization: util.parseAPIParameter(req.payload.organization), registered: (new Date()).toISOString(), providers: [], activate_secret: require('crypto').randomBytes(64).toString('hex'), authorised: false }; //check if username already exists return util.isIdentityAssigned(user.email, user.username) .then((result) => { console.log('identity already taken: ', user.email, user.username, result); if (result.assigned === false) { //Send email before creating the user return util.sendEMail(user.email, 'Your new account on SlideWiki', 'Dear '+user.forename+' '+user.surname+',\nHappy welcome to SlideWiki! You have registered your account with the username '+user.username+'. In order to activate your account please use the following link: <a href="https://'+req.info.host+'/user/activate/'+user.email+'/'+user.activate_secret+'">Activate Account</a>\nGreetings,\nThe SlideWiki team') .then(() => { return userCtrl.create(user) .then((result) => { // console.log('register: user create result: ', result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('registration failed because data is wrong: ', co.parseAjvValidationErrors(result))); } if (result.insertedCount === 1) { //success return res({ userid: result.insertedId, secret: user.activate_secret }); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error on creating a user:', error); res(boom.badImplementation('Error', error)); }); }) .catch((error) => { console.log('Error sending the email:', error); return res(boom.badImplementation('Error', error)); }); } else { let message = 'The username and email is already taken'; if (result.email === false) message = 'The username is already taken'; if (result.username === false) message = 'The email is already taken'; return res(boom.conflict(message)); } }) .catch((error) => { delete user.password; console.log('Error:', error, 'with user:', user); res(boom.badImplementation('Error', error)); }); }, activateUser: (req, res) => { const email = util.parseAPIParameter(req.params.email), secret = util.parseAPIParameter(req.params.secret); const query = { email: email, activate_secret: secret, authorised: false }; console.log('trying to activate ', email); return userCtrl.partlyUpdate(query, { $set: { authorised: true } }) .then((result) => { // console.log(result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res() .redirect(PLATFORM_INFORMATION_URL) .temporary(true); } return res(boom.forbidden('Wrong credentials were used')); }) .catch((error) => { console.log('Error:', error); return res(boom.badImplementation()); }); }, login: (req, res) => { const query = { email: req.payload.email.toLowerCase(), password: co.hashPassword(decodeURI(req.payload.password), config.SALT) }; console.log('try logging in with email', query.email); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((result) => { switch (result.length) { case 0: res(boom.notFound('The credentials are wrong', '{"email":"", "password": ""}')); break; case 1: console.log('login: user object:', result[0]._id, result[0].username, result[0].registered); //TODO: call authorization service for OAuth2 token if (result[0].deactivated === true) { res(boom.locked('This user is deactivated.')); break; } //check if authorised if (result[0].authorised === false) { res(boom.locked('User is not authorised yet.')); break; } //check if SPAM if (result[0].suspended === true) { res(boom.forbidden('The user is marked as SPAM.')); break; } res({ userid: result[0]._id, username: result[0].username, picture: result[0].picture, access_token: 'dummy', expires_in: 0 }) .header(config.JWT.HEADER, jwt.createToken({ userid: result[0]._id, username: result[0].username, isReviewer: result[0].isReviewer })); break; default: res(boom.badImplementation('Found multiple users')); break; } }) .catch((error) => { console.log('Error: ', error); res(boom.badImplementation(error)); }); }, getUser: (req, res) => { //check if the request comes from the right user (have the right JWT data) const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot get detailed information about another user')); } return userCtrl.read(util.parseStringToInteger(req.params.id)) .then((user) => { //console.log('getUser: got user:', user); if (user !== undefined && user !== null && user.username !== undefined) { if (user.deactivated === true) { return res(boom.locked('This user is deactivated.')); } //get groups of a user return usergroupCtrl.readGroupsOfUser(req.params.id) .then((array) => { user.groups = array; return res(prepareDetailedUserData(user)); }); } else { return res(boom.notFound()); } }) .catch((error) => { console.log('Error while getting user with id '+req.params.id+':', error); res(boom.notFound('Wrong user id', error)); }); }, //add attribute "deactivated" to user document deleteUser: (req, res) => { let userid = util.parseStringToInteger(req.params.id); //check if the user which should be deleted have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot delete another user')); } const findQuery = { _id: userid }; const updateQuery = { $set: { deactivated: true } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { // console.log('deleteUser: delete with', userid, 'results in', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } res(boom.notFound('Deletion failed - no matched id')); }) .catch((error) => { console.log('Error while deleting user with id '+userid+':', error); return res(boom.badImplementation('Deletion failed', error)); }); }, //User profile updateUserPasswd: (req, res) => { let oldPassword = co.hashPassword(req.payload.oldPassword, config.SALT); let newPassword = co.hashPassword(req.payload.newPassword, config.SALT); const user__id = util.parseStringToInteger(req.params.id); //check if the user which should be updated have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot change the password of another user')); } //check if old password is correct return userCtrl.find({ _id: user__id, password: oldPassword }) .then((cursor) => cursor.count()) .then((count) => { switch (count) { case 0: res(boom.notFound('There is no user with this Id and password')); break; case 1: const findQuery = { _id: user__id, password: oldPassword }, updateQuery = { $set: { password: newPassword } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { console.log('handler: updateUserPasswd:', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error while updating password of user with id '+user__id+':', error); res(boom.badImplementation('Update password failed', error)); }); break; default: //should not happen console.log('BIG PROBLEM: multiple users in the database have the same id and password!'); res(boom.badImplementation('Found multiple users')); break; } }) .catch((error) => { console.log('Error while updating password of user with id '+user__id+':', error); return res(boom.badImplementation('Update password failed', error)); }); }, updateUserProfile: (req, res) => { let email = req.payload.email.toLowerCase(); let user = req.payload; user.email = email; user._id = util.parseStringToInteger(req.params.id); //check if the user which should be updated have the right JWT data const isUseridMatching = util.isJWTValidForTheGivenUserId(req); if (!isUseridMatching) { return res(boom.forbidden('You cannot change the user profile of another user')); } let updateCall = function() { const findQuery = { _id: user._id }, updateQuery = { $set: { email: email, username: util.parseAPIParameter(req.payload.username), surname: util.parseAPIParameter(req.payload.surname), forename: util.parseAPIParameter(req.payload.forename), frontendLanguage: util.parseAPIParameter(req.payload.language), country: util.parseAPIParameter(req.payload.country), picture: util.parseAPIParameter(req.payload.picture), description: util.parseAPIParameter(req.payload.description), organization: util.parseAPIParameter(req.payload.organization) } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { if (result.result.ok === 1 && result.result.n === 1) { //success return res(); } console.log('Update query failed with query and result:', updateQuery, result.result); return res(boom.badImplementation()); }) .catch((error) => { console.log('Update query failed with query and error:', updateQuery, error); return res(boom.notFound('Profile update failed', error)); }); }; //find user and check if username has changed return userCtrl.find({_id: user._id}) .then((cursor) => cursor.project({username: 1, email: 1})) .then((cursor2) => cursor2.next()) .then((document) => { // console.log('handler: updateUserProfile: got user as document', document); if (document === null) return res(boom.notFound('No user with the given id')); const oldUsername = document.username, oldEMail = document.email; if (decodeURI(req.payload.username).toLowerCase() !== oldUsername.toLowerCase()) { return res(boom.notAcceptable('It is impossible to change the username!')); } if (email === oldEMail) { return updateCall(); } else { //check if email already exists return isEMailAlreadyTaken(email) .then((isTaken) => { if (isTaken === false) { return updateCall(); } else { return res(boom.conflict('The email is already taken')); } }); } }) .catch((error1) => { console.log('handler: updateUserProfile: Error while getting user', error1, 'the user is:', user); const error = boom.badImplementation('Unknown error'); error.output.payload.custom = error1; return res(error); }); }, getPublicUser: (req, res) => { let identifier = decodeURI(req.params.identifier).replace(/\s/g,''); let query = {}; //validate identifier if its an integer or a username const integerSchema = Joi.number().integer(); const validationResult = Joi.validate(identifier, integerSchema); if (validationResult.error === null) { query._id = validationResult.value; } else { // console.log('no integer try reading as username'); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(identifier, schema); if (valid.error === null) { query.username = valid.value; } else { console.log('username is invalid:', identifier, valid.error); return res(boom.notFound()); } } // check for static user first let staticUser = userCtrl.findStaticUser(query); if (staticUser) { return res(preparePublicUserData(staticUser)); } //if no static user and username is given then use regex case insensitive if (query.username) query.username = new RegExp('^' + query.username + '$', 'i'); console.log(query); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { console.log('handler: getPublicUser: ', query, array); if (array.length === 0) return res(boom.notFound()); if (array.length > 1) return res(boom.badImplementation()); if (array[0].deactivated === true) { return res(boom.locked('This user is deactivated.')); } //check if authorised if (array[0].authorised === false) { return res(boom.locked('User is not authorised yet.')); } //check if SPAM if (array[0].suspended === true) { return res(boom.forbidden('The user is marked as SPAM.')); } res(preparePublicUserData(array[0])); }) .catch((error) => { console.log('handler: getPublicUser: Error', error); res(boom.notFound('Wrong user identifier?', error)); }); }, checkUsername: (req, res) => { // console.log(req.params); let username = decodeURI(req.params.username).replace(/\s/g,''); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(username, schema); if (valid.error === null) { username = valid.value; } else { console.log('username is invalid:', username, valid.error); return res({taken: true, alsoTaken: []}); } return userCtrl.find({ username: new RegExp('^' + username + '$', 'i') }) .then((cursor) => cursor.count()) .then((count) => { //console.log('checkUsername: username:', username, ' cursor.count():', count); // init this here because we may have to include a static user name let staticUserNames = []; if (count === 0) { // also check if it's in static users let staticUser = userCtrl.findStaticUserByName(username); if (staticUser) { staticUserNames.push(staticUser.username); } else { // not found as before return res({taken: false, alsoTaken: []}); } } const query = { username: new RegExp(username + '*', 'i') }; return userCtrl.find(query) .then((cursor1) => cursor1.project({username: 1})) .then((cursor2) => cursor2.limit(40)) .then((cursor3) => cursor3.toArray()) .then((array) => { //console.log('handler: checkUsername: similar usernames', array); let alreadyTaken = array.reduce((prev, curr) => { prev.push(curr.username); return prev; }, staticUserNames); return res({taken: true, alsoTaken: alreadyTaken}); }); }) .catch((error) => { console.log('handler: checkUsername: error', error); res(boom.badImplementation(error)); }); }, searchUser: (req, res) => { let username = decodeURI(req.params.username).replace(/\s/g,''); let schema = Joi.string().regex(/^[\w\-.~]*$/); let valid = Joi.validate(username, schema); if (valid.error === null) { username = valid.value + '+'; } else { if (username === '') { //search random users - a* matches everyone username = 'a*'; } else { console.log('username is invalid:', username, valid.error); return res({success: false, results: []}); } } const query = { username: new RegExp(username, 'i'), deactivated: { $not: { $eq: true } }, suspended: { $not: { $eq: true } }, authorised: { $not: { $eq: false } } }; if (username === undefined || username === null || username === '') { query.username = new RegExp('\w*', 'i'); } console.log('query:', query); return userCtrl.find(query) .then((cursor1) => cursor1.project({username: 1, _id: 1, picture: 1, country: 1, organization: 1})) .then((cursor2) => cursor2.limit(8)) .then((cursor3) => cursor3.toArray()) .then((array) => { // console.log('handler: searchUser: similar usernames', array); let data = array.reduce((prev, curr) => { let description = curr.username; if (curr.organization) description = description + ', ' + curr.organization; if (curr.country) description = description + ' (' + curr.country + ')'; prev.push({ name: description, value: encodeURIComponent(JSON.stringify({ userid: curr._id, picture: curr.picture, country: curr.country, organization: curr.organization, username: curr.username })) }); return prev; }, []); return res({success: true, results: data}); }) .catch((error) => { console.log('handler: searchUser: error', error, 'with query:', query); res({success: false, results: []}); }); }, checkEmail: (req, res) => { const email = decodeURI(req.params.email).replace(/\s/g,'').toLowerCase(); return userCtrl.find({ email: email }) .then((cursor) => cursor.count()) .then((count) => { //console.log('checkEmail: username:', username, ' cursor.count():', count); if (count === 0) { return res({taken: false}); } return res({taken: true}); }) .catch((error) => { console.log('handler: checkEmail: error', error); res(boom.badImplementation(error)); }); }, resetPassword: (req, res) => { const email = req.payload.email.toLowerCase(); const APIKey = req.payload.APIKey; const salt = req.payload.salt; if (APIKey !== config.SMTP.APIKey) { return res(boom.forbidden('Wrong APIKey was used')); } return isEMailAlreadyTaken(email) .then((isTaken) => { console.log('resetPassword: email taken:', isTaken); if (!isTaken) { return res(boom.notFound('EMail adress is not taken.')); } const newPassword = require('crypto').randomBytes(9).toString('hex'); /* The password is hashed one time at the client site (inner hash and optional) and one time at server-side. As we currently only have one salt, it must be the same for slidewiki-platform and the user-service. In case this is splitted, the user-service must know both salts in order to be able to generate a valid password for resetPassword.*/ let hashedPassword = co.hashPassword(newPassword, config.SALT); if (salt && salt.length > 0) hashedPassword = co.hashPassword(co.hashPassword(newPassword, salt), config.SALT); console.log('resetPassword: email is in use thus we connect to the SMTP server'); let connectionPromise = util.sendEMail(email, 'Password reset on SlideWiki', 'Dear SlideWiki user, \nA request has been made to reset your password. Your new password is: ' + newPassword + ' Please login with this password and then go to My Settings>Accounts to change it. Passwords should have 8 characters or more. \nThanks, The SlideWiki team'); return connectionPromise .then((data) => { console.log('connectionPromise returned', data); //change password in the database const findQuery = { email: email }; const updateQuery = { $set: { password: hashedPassword } }; return userCtrl.partlyUpdate(findQuery, updateQuery) .then((result) => { console.log('handler: resetPassword:', result.result); if (result.result.ok === 1 && result.result.n === 1) { //success return res(data.message); } return res(boom.badImplementation()); }) .catch((error) => { res(boom.notFound('Update of user password failed', error)); }); }) .catch((error) => { console.log('Error:', error); return res(boom.badImplementation(error)); }); }); }, deleteUsergroup: (req, res) => { //first check if user is creator return usergroupCtrl.read(req.params.groupid) .then((document) => { if (document === undefined || document === null) { return res(boom.notFound()); } let creator = document.creator.userid || document.creator; if (creator !== req.auth.credentials.userid) { return res(boom.unauthorized()); } //now delete return usergroupCtrl.delete(req.params.groupid) .then((result) => { // console.log('deleteUsergroup: deleted', result.result); if (result.result.ok !== 1) { return res(boom.badImplementation()); } if (result.result.n !== 1) { return res(boom.notFound()); } if (document.members.length < 1) return res(); //notify users let promises = []; document.members.forEach((member) => { promises.push(notifiyUser({ id: creator, name: document.creator.username || 'Group leader' }, member.userid, 'left', document, true)); }); return Promise.all(promises).then(() => { return res(); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(); }); }); }) .catch((error) => { console.log('error while reading or deleting the usergroup '+req.params.groupid+':', error); res(boom.badImplementation(error)); }); }, createOrUpdateUsergroup: (req, res) => { const userid = req.auth.credentials.userid; let group = req.payload; group.creator = { userid: userid, username: req.auth.credentials.username }; let referenceDateTime = util.parseAPIParameter(req.payload.referenceDateTime) || (new Date()).toISOString(); delete group.referenceDateTime; group.description = util.parseAPIParameter(group.description); group.name = util.parseAPIParameter(group.name); group.timestamp = util.parseAPIParameter(group.timestamp); if (group.timestamp === undefined || group.timestamp === null || group.timestamp === '') group.timestamp = referenceDateTime; if (group.isActive !== false) group.isActive = true; if (group.members === undefined || group.members === null || group.members.length < 0) group.members = []; //add joined attribute if not given group.members = group.members.reduce((array, user) => { if (user.joined === undefined || user.joined === '') user.joined = referenceDateTime; array.push(user); return array; }, []); if (group.id === undefined || group.id === null) { //create console.log('create group', group.name); return usergroupCtrl.create(group) .then((result) => { // console.log('createOrUpdateUsergroup: created group', result.result || result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('Wrong data: ', co.parseAjvValidationErrors(result))); } if (result.insertedCount === 1) { //success group.id = result.insertedId; if (group.members.length < 1) return res(group); //notify users console.log('Notify '+group.members.length+' users...'); let promises = []; group.members.forEach((member) => { promises.push(notifiyUser({ id: group.creator.userid || group.creator, name: group.creator.username || 'Group leader' }, member.userid, 'joined', group, true)); }); return Promise.all(promises).then(() => { return res(group); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(group); }); } res(boom.badImplementation()); }) .catch((error) => { console.log('Error while creating group:', error, group); res(boom.badImplementation(error)); }); } else if (group.id < 1) { //error return res(boom.badData()); } else { //update console.log('update group', group.id); //first check if user is creator return usergroupCtrl.read(group.id) .then((document) => { if (document === undefined || document === null) { return res(boom.notFound()); } let dCreator = document.creator.userid || document.creator; if (dCreator !== group.creator.userid) { return res(boom.unauthorized()); } //some attribute should be unchangeable group.timestamp = document.timestamp; group._id = document._id; return usergroupCtrl.update(group) .then((result) => { // console.log('createOrUpdateUsergroup: updated group', result.result || result); if (result[0] !== undefined && result[0] !== null) { //Error return res(boom.badData('Wrong data: ', co.parseAjvValidationErrors(result))); } if (result.result.ok === 1) { if (group.members.length < 1 && document.members.length < 1) return res(group); //notify users let wasUserAMember = (userid) => { let result = false; document.members.forEach((member) => { if (member.userid === userid) result = true; }); return result; }; let wasUserDeleted = (userid) => { let result = true; group.members.forEach((member) => { if (member.userid === userid) result = false; }); return result; }; let promises = []; group.members.forEach((member) => { if (!wasUserAMember(member.userid)) promises.push(notifiyUser({ id: group.creator.userid, name: group.creator.username || 'Group leader' }, member.userid, 'joined', group, true)); }); document.members.forEach((member) => { if (wasUserDeleted(member.userid)) promises.push(notifiyUser({ id: dCreator, name: document.creator.username || 'Group leader' }, member.userid, 'left', document, true)); }); console.log('Notify '+promises.length+' users...'); return Promise.all(promises).then(() => { return res(group); }).catch((error) => { console.log('Error while processing notification of users:', error); //reply(boom.badImplementation()); //for now always succeed return res(group); }); } console.log('Failed updating group '+group._id+' and got result:', result.result, group); return res(boom.badImplementation()); }); }) .catch((error) => { console.log('Error while reading group '+group.id+':', error); res(boom.badImplementation(error)); }); } }, getUsergroups: (req, res) => { if (req.payload === undefined || req.payload.length < 1) return res(boom.badData()); let selectors = req.payload.reduce((q, element) => { q.push({_id: element}); return q; }, []); let query = { $or: selectors }; console.log('getUsergroups:', query); return usergroupCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { if (array === undefined || array === null || array.length < 1) { return res([]); } let enrichedGroups_promises = array.reduce((prev, curr) => { prev.push(enrichGroupMembers(curr)); return prev; }, []); return Promise.all(enrichedGroups_promises) .then((enrichedGroups) => { return res(enrichedGroups); }); }) .catch((error) => { console.log('Error while reading groups:', error); res(boom.badImplementation(error)); }); }, leaveUsergroup: (req, res) => { return usergroupCtrl.partlyUpdate({ _id: req.params.groupid }, { $pull: { members: { userid: req.auth.credentials.userid } } }). then((result) => { console.log('leaveUsergroup: ', result.result); if (result.result.ok !== 1) return res(boom.notFound()); if (result.result.nModified !== 1) return res(boom.unauthorized()); return res(); }); }, // getUserdata: (req, res) => { return usergroupCtrl.readGroupsOfUser(req.auth.credentials.userid) .then((array) => { if (array === undefined || array === null) return res(boom.notFound()); return res({ id: req.auth.credentials.userid, username: req.auth.credentials.username, groups: array }); }) .catch((error) => { console.log('getUserdata('+req.auth.credentials.userid+') error:', error); res(boom.notFound('Wrong user id', error)); }); }, getUsers: (req, res) => { if (req.payload === undefined || req.payload.length < 1) return res(boom.badData()); // keep initial result for static users let staticUsers = userCtrl.findStaticUsersByIds(req.payload); let selectors = req.payload.reduce((q, element) => { q.push({_id: element}); return q; }, []); let query = { $or: selectors }; console.log('getUsers:', query); return userCtrl.find(query) .then((cursor) => cursor.toArray()) .then((array) => { if (array === undefined || array === null || array.length < 1) { return res(staticUsers); } let publicUsers = array.reduce((array, user) => { array.push(preparePublicUserData(user)); return array; }, staticUsers); return res(publicUsers); }); }, getReviewableUsers: (req, res) => { let query = { authorised: { $not: { $eq: false } }, deactivated: { $not: { $eq: true } }, reviewed: { $not: { $eq: true } } }; return userCtrl.find(query) .then((cursor) => cursor.project({_id: 1, registered: 1, username: 1})) .then((cursor2) => cursor2.toArray()) .then((array) => { if (array.length < 1) return res([]); // console.log('filter users', array.length); let startTime = (new Date('2017-07-19')).getTime(); let userids = array.reduce((arr, curr) => { if ((new Date(curr.registered)).getTime() > startTime) arr.push(curr._id); return arr; }, []); if (userids.length < 1) return res([]); //now call service const options = { url: require('../configs/microservices').deck.uri + '/deckOwners?user=' + userids.reduce((a, b) => {let r = a === '' ? b : a + ',' + b; return r;}, ''), method: 'GET', json: true, body: { userids: userids } }; function callback(error, response, body) { // console.log('getReviewableUsers: ', error, response.statusCode, body); if (!error && (response.statusCode === 200)) { let result = body.reduce((arr, curr) => { curr.decks = curr.decksCount; curr.userid = curr._id; curr.username = array.find((u) => {return u._id === curr.userid;}).username; arr.push(curr); return arr; }, []); return res(result); } else { console.log('Error', (response) ? response.statusCode : undefined, error, body); return res([]); } } // console.log('now calling the service'); if (process.env.NODE_ENV === 'test') { callback(null, {statusCode: 200}, userids.reduce((arr, curr) => {arr.push({_id: curr, decksCount: 3}); return arr;}, [])); } else request(options, callback); }) .catch((error) => { console.log('Error', error); res([]); }); }, suspendUser: (req, res) => { return reviewUser(req, res, true); }, approveUser: (req, res) => { return reviewUser(req, res, false); }, getNextReviewableUser: (req, res) => { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); console.log('getNextReviewableUser'); return queueAPI.get() .then((user) => { console.log('got user', user); return res() .redirect(PLATFORM_INFORMATION_URL + '/Sfn87Pfew9Af09aM/user/' + user.username) .temporary(true); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation()); }); }, addToQueue: (req, res) => { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); const reviewerid = req.auth.credentials.userid; const userid = req.params.id; return userCtrl.read(userid) .then((user) => { if (!user) return res(boom.notFound()); if (user.deactivated || user.authorised === false) return res(boom.locked()); if (user.reviewed || user.suspended) return res(boom.forbidden()); return queueAPI.getAll() .then((users) => { if (users.findIndex((u) => {return u.userid === user._id;}) !== -1) return res();//user is already in queue let queueUser = queueAPI.getEmptyElement(); queueUser.userid = user._id; queueUser.username = user.username; queueUser.decks = req.query.decks || 0; queueUser.addedByReviewer = reviewerid; return queueAPI.add(queueUser) .then((success) => { success ? res() : res(boom.badImplementation()); return; }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation(error)); }); } }; function reviewUser(req, res, suspended) { let secret = (req.query !== undefined && req.query.secret !== undefined) ? req.query.secret : undefined; if (secret === undefined || secret !== process.env.SECRET_REVIEW_KEY || !req.auth.credentials.isReviewer) return res(boom.unauthorized()); const reviewerid = req.auth.credentials.userid; const userid = req.params.id; let query = { _id: userid, authorised: { $not: { $eq: false } }, deactivated: { $not: { $eq: true } }, reviewed: { $not: { $eq: true } } }; let update = { $set: { reviewed: true, suspended: suspended, lastReviewDoneBy: reviewerid } }; return userCtrl.partlyUpdate(query, update) .then((result) => { if (result.result.ok === 1 && result.result.n === 1) { //found user and got updated return res(); } return res(boom.notFound()); }) .catch((error) => { console.log('Error', error); res(boom.badImplementation()); }); } function isUsernameAlreadyTaken(username) { let myPromise = new Promise((resolve, reject) => { return userCtrl.find({ username: username }) .then((cursor) => cursor.count()) .then((count) => { console.log('isUsernameAlreadyTaken: cursor.count():', count); if (count > 0) { resolve(true); } else { resolve(false); } }) .catch((error) => { reject(error); }); }); return myPromise; } function isEMailAlreadyTaken(email) { let myPromise = new Promise((resolve, reject) => { return userCtrl.find({ email: email }) .then((cursor) => cursor.count()) .then((count) => { console.log('isEMailAlreadyTaken: cursor.count():', count); if (count > 0) { resolve(true); } else { resolve(false); } }) .catch((error) => { reject(error); }); }); return myPromise; } //Remove attributes of the user data object which should not be transmitted function prepareDetailedUserData(user) { const hiddenKeys = ['password']; let minimizedUser = {}; let key; for (key in user) { const found = hiddenKeys.find((hiddenKey) => { if (key === hiddenKey) return true; return false; }); if (found === undefined) { minimizedUser[key] = user[key]; } } //map attributes for better API minimizedUser.language = minimizedUser.frontendLanguage; minimizedUser.frontendLanguage = undefined; //add data for social provider stuff minimizedUser.hasPassword = true; if (user.password === undefined || user.password === null || user.password === '') minimizedUser.hasPassword = false; minimizedUser.providers = (user.providers || []).reduce((prev, cur) => { if (prev.indexOf(cur.provider) === -1) { prev.push(cur.provider); return prev; } return prev; }, []); return minimizedUser; } //Remove attributes of the user data object which should not be transmitted for the user profile function preparePublicUserData(user) { const shownKeys = ['_id', 'username', 'organization', 'picture', 'description', 'country']; let minimizedUser = {}; let key; for (key in user) { const found = shownKeys.find((shownkey) => { if (key === shownkey) return true; return false; }); if (found !== undefined) { minimizedUser[key] = user[key]; } } return minimizedUser; } function notifiyUser(actor, receiver, type, group, isActiveAction = false) { let promise = new Promise((resolve, reject) => { let message = actor.name + ': Has ' + type + ' the group ' + group.name; if (isActiveAction) message = 'You ' + type + ' the group ' + group.name; const options = { url: require('../configs/microservices').activities.uri + '/activity/new', method: 'POST', json: true, body: { activity_type: type, user_id: actor.id.toString(), content_id: group._id.toString(), content_kind: 'group', content_name: message, content_owner_id: receiver.toString() } }; function callback(error, response, body) { // console.log('notifiyUser: ', error, response.statusCode, body); if (!error && (response.statusCode === 200)) { return resolve(body); } else { return reject(error); } } request(options, callback); }); return promise; } //Uses userids of creator and members in order to add username and picture function enrichGroupMembers(group) { let userids = group.members.reduce((prev, curr) => { prev.push(curr.userid); return prev; }, []); userids.push(group.creator.userid); console.log('enrichGroupMembers: group, userids', group, userids); let query = { _id: { $in: userids } }; return userCtrl.find(query) .then((cursor) => cursor.project({_id: 1, username: 1, picture: 1, country: 1, organization: 1})) .then((cursor2) => cursor2.toArray()) .then((array) => { array = array.reduce((prev, curr) => { if (curr._id) { curr.userid = curr._id; delete curr._id; } prev.push(curr); return prev; }, []); let creator = array.filter((user) => { return user.userid === group.creator.userid; }); let members = array.filter((user) => { return user.userid !== group.creator.userid; }); console.log('enrichGroupMembers: got creator and users (amount)', {id: creator[0]._id, name: creator[0].username, email: creator[0].email}, members.concat(group.members).length); //add joined attribute to members members = (members.concat(group.members)).reduce((prev, curr) => { if (prev[curr.userid] === undefined) prev[curr.userid] = {}; if (curr.joined === undefined) { prev[curr.userid].userid = curr.userid; prev[curr.userid].username = curr.username; prev[curr.userid].picture = curr.picture; prev[curr.userid].country = curr.country; prev[curr.userid].organization = curr.organization; } else prev[curr.userid].joined = curr.joined; return prev; }, {}); members = Object.keys(members).map((key) => { return members[key]; }).filter((member) => {return member.joined && member.userid && member.username;}); group.creator = creator[0]; group.members = members; console.log('enrichGroupMembers: got new members (after reading from database, adding joined attribute and cleanup), amount:', members.length); return group; }); }
[SWIK-1617_new_route__suspendUser_connected_with_deckservice] Added calling deck-service when suspending a user in order to archive all decks of the user
application/controllers/handler.js
[SWIK-1617_new_route__suspendUser_connected_with_deckservice] Added calling deck-service when suspending a user in order to archive all decks of the user
<ide><path>pplication/controllers/handler.js <ide> .then((result) => { <ide> if (result.result.ok === 1 && result.result.n === 1) { <ide> //found user and got updated <del> return res(); <del> } <del> <del> return res(boom.notFound()); <add> <add> if (!suspended) <add> return res(); <add> <add> //now archive all the decks of the user <add> const options = { <add> url: require('../configs/microservices').deck.uri + '/alldecks/'+userid, <add> method: 'GET', <add> json: true <add> }; <add> <add> function callback(error, response, body) { <add> console.log('alldecks: ', (response) ? response.statusCode : undefined, error, body); <add> <add> if (!error && (response.statusCode === 200)) { <add> //now archive all decks (one request per deck) <add> let promises = body.reduce((arr, curr) => { <add> arr.push(archiveDeck(userid, curr._id)); <add> return arr; <add> }, []); <add> <add> return Promise.all(promises) <add> .then(() => { <add> return res(); <add> }) <add> .catch((error) => { <add> console.log('Error', error); <add> return res(); <add> }); <add> } else { <add> console.log('Error', (response) ? response.statusCode : undefined, error, body); <add> return res(); <add> } <add> } <add> <add> if (process.env.NODE_ENV === 'test') { <add> callback(null, {statusCode: 200}, []); <add> } <add> else <add> request(options, callback); <add> } <add> else <add> return res(boom.notFound()); <ide> }) <ide> .catch((error) => { <ide> console.log('Error', error); <ide> res(boom.badImplementation()); <ide> }); <add>} <add> <add>function archiveDeck(userid, deckid) { <add> let myPromise = new Promise((resolve, reject) => { <add> const options = { <add> url: require('../configs/microservices').deck.uri + '/decktree/'+deckid+'/archive', <add> method: 'POST', <add> json: true, <add> body: { <add> user: userid <add> } <add> }; <add> <add> function callback(error, response, body) { <add> console.log('archiveDeck: ', (response) ? response.statusCode : undefined, error, body); <add> <add> if (!error && (response.statusCode === 200)) { <add> resolve(); <add> } else { <add> console.log('Error', (response) ? response.statusCode : undefined, error, body); <add> return reject(error); <add> } <add> } <add> <add> if (process.env.NODE_ENV === 'test') { <add> callback(null, {statusCode: 200}, null); <add> } <add> else <add> request(options, callback); <add> }); <add> return myPromise; <ide> } <ide> <ide> function isUsernameAlreadyTaken(username) {
Java
apache-2.0
b900461e7730e0d43bf90617c659179d15539e86
0
alexryndin/ambari,sekikn/ambari,radicalbit/ambari,alexryndin/ambari,radicalbit/ambari,alexryndin/ambari,arenadata/ambari,alexryndin/ambari,alexryndin/ambari,alexryndin/ambari,alexryndin/ambari,sekikn/ambari,radicalbit/ambari,arenadata/ambari,arenadata/ambari,sekikn/ambari,radicalbit/ambari,radicalbit/ambari,arenadata/ambari,arenadata/ambari,alexryndin/ambari,sekikn/ambari,arenadata/ambari,alexryndin/ambari,arenadata/ambari,arenadata/ambari,arenadata/ambari,radicalbit/ambari,radicalbit/ambari,radicalbit/ambari,alexryndin/ambari,sekikn/ambari,arenadata/ambari,sekikn/ambari,alexryndin/ambari,radicalbit/ambari,sekikn/ambari,sekikn/ambari,sekikn/ambari,arenadata/ambari,radicalbit/ambari,radicalbit/ambari
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.bootstrap; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.List; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.bootstrap.BSResponse.BSRunStat; import org.apache.ambari.server.configuration.Configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.ambari.server.controller.AmbariServer; @Singleton public class BootStrapImpl { public static final String DEV_VERSION = "${ambariVersion}"; private File bootStrapDir; private String bootScript; private String bootSetupAgentScript; private String bootSetupAgentPassword; private BSRunner bsRunner; private String masterHostname; long timeout; private static Log LOG = LogFactory.getLog(BootStrapImpl.class); /* Monotonically increasing requestid for the bootstrap api to query on */ int requestId = 0; private FifoLinkedHashMap<Long, BootStrapStatus> bsStatus; private final String clusterOsType; private final String clusterOsFamily; private String projectVersion; private int serverPort; @Inject public BootStrapImpl(Configuration conf, AmbariMetaInfo ambariMetaInfo) throws IOException { this.bootStrapDir = conf.getBootStrapDir(); this.bootScript = conf.getBootStrapScript(); this.bootSetupAgentScript = conf.getBootSetupAgentScript(); this.bootSetupAgentPassword = conf.getBootSetupAgentPassword(); this.bsStatus = new FifoLinkedHashMap<Long, BootStrapStatus>(); this.masterHostname = conf.getMasterHostname( InetAddress.getLocalHost().getCanonicalHostName()); this.clusterOsType = conf.getServerOsType(); this.clusterOsFamily = conf.getServerOsFamily(); this.projectVersion = ambariMetaInfo.getServerVersion(); this.projectVersion = (this.projectVersion.equals(DEV_VERSION)) ? DEV_VERSION.replace("$", "") : this.projectVersion; this.serverPort = (conf.getApiSSLAuthentication())? conf.getClientSSLApiPort() : conf.getClientApiPort(); } /** * Return {@link BootStrapStatus} for a given responseId. * @param requestId the responseId for which the status needs to be returned. * @return status for a specific response id. A response Id of -1 means the * latest responseId. */ public synchronized BootStrapStatus getStatus(long requestId) { if (! bsStatus.containsKey(Long.valueOf(requestId))) { return null; } return bsStatus.get(Long.valueOf(requestId)); } /** * update status of a request. Mostly called by the status collector thread. * @param requestId the request id. * @param status the status of the update. */ synchronized void updateStatus(long requestId, BootStrapStatus status) { bsStatus.put(Long.valueOf(requestId), status); } public synchronized void init() throws IOException { if (!bootStrapDir.exists()) { boolean mkdirs = bootStrapDir.mkdirs(); if (!mkdirs) throw new IOException("Unable to make directory for " + "bootstrap " + bootStrapDir); } } public synchronized BSResponse runBootStrap(SshHostInfo info) { BSResponse response = new BSResponse(); /* Run some checks for ssh host */ LOG.info("BootStrapping hosts " + info.hostListAsString()); if (bsRunner != null) { response.setLog("BootStrap in Progress: Cannot Run more than one."); response.setStatus(BSRunStat.ERROR); return response; } requestId++; if (info.getHosts() == null || info.getHosts().isEmpty()) { BootStrapStatus status = new BootStrapStatus(); status.setLog("Host list is empty."); status.setHostsStatus(new ArrayList<BSHostStatus>()); status.setStatus(BootStrapStatus.BSStat.ERROR); updateStatus(requestId, status); response.setStatus(BSRunStat.OK); response.setLog("Host list is empty."); response.setRequestId(requestId); return response; } else { bsRunner = new BSRunner(this, info, bootStrapDir.toString(), bootScript, bootSetupAgentScript, bootSetupAgentPassword, requestId, 0L, this.masterHostname, info.isVerbose(), this.clusterOsFamily, this.projectVersion, this.serverPort); bsRunner.start(); response.setStatus(BSRunStat.OK); response.setLog("Running Bootstrap now."); response.setRequestId(requestId); return response; } } /** * @param hosts * @return */ public synchronized List<BSHostStatus> getHostInfo(List<String> hosts) { List<BSHostStatus> statuses = new ArrayList<BSHostStatus>(); if (null == hosts || 0 == hosts.size() || (hosts.size() == 1 && hosts.get(0).equals("*"))) { for (BootStrapStatus status : bsStatus.values()) { if (null != status.getHostsStatus()) statuses.addAll(status.getHostsStatus()); } } else { // TODO make bootstrapping a bit more robust then stop looping for (BootStrapStatus status : bsStatus.values()) { for (BSHostStatus hostStatus : status.getHostsStatus()) { if (-1 != hosts.indexOf(hostStatus.getHostName())) { statuses.add(hostStatus); } } } } return statuses; } /** * */ public synchronized void reset() { bsRunner = null; } }
ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.bootstrap; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.List; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.bootstrap.BSResponse.BSRunStat; import org.apache.ambari.server.configuration.Configuration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.ambari.server.controller.AmbariServer; @Singleton public class BootStrapImpl { public static final String DEV_VERSION = "${ambariVersion}"; private File bootStrapDir; private String bootScript; private String bootSetupAgentScript; private String bootSetupAgentPassword; private BSRunner bsRunner; private String masterHostname; long timeout; private static Log LOG = LogFactory.getLog(BootStrapImpl.class); /* Monotonically increasing requestid for the bootstrap api to query on */ int requestId = 0; private FifoLinkedHashMap<Long, BootStrapStatus> bsStatus; private final String clusterOsType; private final String clusterOsFamily; private String projectVersion; private int serverPort; @Inject public BootStrapImpl(Configuration conf, AmbariMetaInfo ambariMetaInfo) throws IOException { this.bootStrapDir = conf.getBootStrapDir(); this.bootScript = conf.getBootStrapScript(); this.bootSetupAgentScript = conf.getBootSetupAgentScript(); this.bootSetupAgentPassword = conf.getBootSetupAgentPassword(); this.bsStatus = new FifoLinkedHashMap<Long, BootStrapStatus>(); this.masterHostname = conf.getMasterHostname( InetAddress.getLocalHost().getCanonicalHostName()); this.clusterOsType = conf.getServerOsType(); this.clusterOsFamily = conf.getServerOsFamily(); this.projectVersion = ambariMetaInfo.getServerVersion(); this.projectVersion = (this.projectVersion.equals(DEV_VERSION)) ? DEV_VERSION.replace("$", "") : this.projectVersion; this.serverPort = (conf.getApiSSLAuthentication())? conf.getClientSSLApiPort() : conf.getClientApiPort(); } /** * Return {@link BootStrapStatus} for a given responseId. * @param requestId the responseId for which the status needs to be returned. * @return status for a specific response id. A response Id of -1 means the * latest responseId. */ public synchronized BootStrapStatus getStatus(long requestId) { if (! bsStatus.containsKey(Long.valueOf(requestId))) { return null; } return bsStatus.get(Long.valueOf(requestId)); } /** * update status of a request. Mostly called by the status collector thread. * @param requestId the request id. * @param status the status of the update. */ synchronized void updateStatus(long requestId, BootStrapStatus status) { bsStatus.put(Long.valueOf(requestId), status); } public synchronized void init() throws IOException { if (!bootStrapDir.exists()) { boolean mkdirs = bootStrapDir.mkdirs(); if (!mkdirs) throw new IOException("Unable to make directory for " + "bootstrap " + bootStrapDir); } } public synchronized BSResponse runBootStrap(SshHostInfo info) { BSResponse response = new BSResponse(); /* Run some checks for ssh host */ LOG.info("BootStrapping hosts " + info.hostListAsString()); if (bsRunner != null) { response.setLog("BootStrap in Progress: Cannot Run more than one."); response.setStatus(BSRunStat.ERROR); return response; } requestId++; bsRunner = new BSRunner(this, info, bootStrapDir.toString(), bootScript, bootSetupAgentScript, bootSetupAgentPassword, requestId, 0L, this.masterHostname, info.isVerbose(), this.clusterOsFamily, this.projectVersion, this.serverPort); bsRunner.start(); response.setStatus(BSRunStat.OK); response.setLog("Running Bootstrap now."); response.setRequestId(requestId); return response; } /** * @param hosts * @return */ public synchronized List<BSHostStatus> getHostInfo(List<String> hosts) { List<BSHostStatus> statuses = new ArrayList<BSHostStatus>(); if (null == hosts || 0 == hosts.size() || (hosts.size() == 1 && hosts.get(0).equals("*"))) { for (BootStrapStatus status : bsStatus.values()) { if (null != status.getHostsStatus()) statuses.addAll(status.getHostsStatus()); } } else { // TODO make bootstrapping a bit more robust then stop looping for (BootStrapStatus status : bsStatus.values()) { for (BSHostStatus hostStatus : status.getHostsStatus()) { if (-1 != hosts.indexOf(hostStatus.getHostName())) { statuses.add(hostStatus); } } } } return statuses; } /** * */ public synchronized void reset() { bsRunner = null; } }
AMBARI-9788 Ambari UI shows message about host checks even after failed bootstrap (dsen)
ambari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java
AMBARI-9788 Ambari UI shows message about host checks even after failed bootstrap (dsen)
<ide><path>mbari-server/src/main/java/org/apache/ambari/server/bootstrap/BootStrapImpl.java <ide> } <ide> requestId++; <ide> <del> bsRunner = new BSRunner(this, info, bootStrapDir.toString(), <del> bootScript, bootSetupAgentScript, bootSetupAgentPassword, requestId, 0L, <del> this.masterHostname, info.isVerbose(), this.clusterOsFamily, this.projectVersion, this.serverPort); <del> bsRunner.start(); <del> response.setStatus(BSRunStat.OK); <del> response.setLog("Running Bootstrap now."); <del> response.setRequestId(requestId); <del> return response; <add> if (info.getHosts() == null || info.getHosts().isEmpty()) { <add> BootStrapStatus status = new BootStrapStatus(); <add> status.setLog("Host list is empty."); <add> status.setHostsStatus(new ArrayList<BSHostStatus>()); <add> status.setStatus(BootStrapStatus.BSStat.ERROR); <add> updateStatus(requestId, status); <add> <add> response.setStatus(BSRunStat.OK); <add> response.setLog("Host list is empty."); <add> response.setRequestId(requestId); <add> return response; <add> } else { <add> bsRunner = new BSRunner(this, info, bootStrapDir.toString(), <add> bootScript, bootSetupAgentScript, bootSetupAgentPassword, requestId, 0L, <add> this.masterHostname, info.isVerbose(), this.clusterOsFamily, this.projectVersion, this.serverPort); <add> bsRunner.start(); <add> response.setStatus(BSRunStat.OK); <add> response.setLog("Running Bootstrap now."); <add> response.setRequestId(requestId); <add> return response; <add> } <ide> } <ide> <ide> /**
Java
epl-1.0
33b64f9615f1ebea9247eef3a529fafa2820ee96
0
theArchonius/mervin,theArchonius/mervin
/******************************************************************************* * Copyright (c) 2017 Florian Zoubek. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Florian Zoubek - initial API and implementation *******************************************************************************/ package at.bitandart.zoubek.mervin.patchset.history.organizers; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.inject.Named; import org.eclipse.e4.core.di.annotations.Optional; import org.eclipse.emf.compare.Diff; import org.eclipse.emf.compare.rcp.EMFCompareRCPPlugin; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import at.bitandart.zoubek.mervin.IMatchHelper; import at.bitandart.zoubek.mervin.IMervinContextConstants; import at.bitandart.zoubek.mervin.model.modelreview.ModelReview; import at.bitandart.zoubek.mervin.patchset.history.IPatchSetHistoryEntry; import at.bitandart.zoubek.mervin.patchset.history.NamedHistoryEntryContainer; /** * An {@link DiffCategoryOrganizer} that groups the entries by their common * containing match. This organizer assumes that each entry contains only * {@link Diff}s whose matches are considered to have the same old or, if no old * value exists, the same new value. * * @author Florian Zoubek * */ public class MatchingObjectOrganizer extends DiffCategoryOrganizer { private AdapterFactoryLabelProvider adapterFactoryLabelProvider; @Inject private IMatchHelper matchHelper; @Inject @Named(IMervinContextConstants.ACTIVE_MODEL_REVIEW) @Optional private ModelReview modelReview; public MatchingObjectOrganizer() { adapterFactoryLabelProvider = new AdapterFactoryLabelProvider( new ComposedAdapterFactory(EMFCompareRCPPlugin.getDefault().createFilteredAdapterFactoryRegistry())); } @Override public Collection<Object> groupPatchSetHistoryEntries(List<? extends IPatchSetHistoryEntry<?, ?>> modelEntries, List<? extends IPatchSetHistoryEntry<?, ?>> diagramEntries) { Collection<Object> rootEntries = super.groupPatchSetHistoryEntries(modelEntries, diagramEntries); for (Object entry : rootEntries) { if (entry instanceof NamedHistoryEntryContainer) { NamedHistoryEntryContainer container = ((NamedHistoryEntryContainer) entry); List<IPatchSetHistoryEntry<?, ?>> entries = new ArrayList<>(container.getSubEntries()); organizeEntries(entries, container); } } return rootEntries; } private void organizeEntries(List<IPatchSetHistoryEntry<?, ?>> entries, IPatchSetHistoryEntry<?, ?> parent) { Map<Object, IPatchSetHistoryEntry<?, ?>> objectDiffMap = new HashMap<>(); Map<EPackage, IPatchSetHistoryEntry<?, ?>> packageEntries = new HashMap<>(); parent.getSubEntries().clear(); for (IPatchSetHistoryEntry<?, ?> entry : entries) { Object key = getCategoryKey(entry); if (key != null) { IPatchSetHistoryEntry<?, ?> parentEntry = objectDiffMap.get(key); if (parentEntry == null) { parentEntry = new NamedHistoryEntryContainer(adapterFactoryLabelProvider.getText(key), new LinkedList<IPatchSetHistoryEntry<?, ?>>()); if (key instanceof EObject) { /* create a subentry for each containing package */ EPackage containingPackage = getContainingPackage((EObject) key); if (containingPackage != null) { IPatchSetHistoryEntry<?, ?> packageEntry = packageEntries.get(containingPackage); if (packageEntry == null) { packageEntry = new NamedHistoryEntryContainer( adapterFactoryLabelProvider.getText(containingPackage), new LinkedList<IPatchSetHistoryEntry<?, ?>>()); parent.getSubEntries().add(packageEntry); packageEntries.put(containingPackage, packageEntry); } packageEntry.getSubEntries().add(parentEntry); } } else { parent.getSubEntries().add(parentEntry); } objectDiffMap.put(key, parentEntry); } parentEntry.getSubEntries().add(entry); } } } /** * @param eObject * the {@link EObject} to find the root {@link EPackage} for. * @return the containing root package of the given {@link EObject}. */ private EPackage getContainingPackage(EObject eObject) { EPackage ePackage = eObject.eClass().getEPackage(); while (ePackage.getESuperPackage() != null) { ePackage = ePackage.getESuperPackage(); } return ePackage; } /** * @param entry * the entry to obtain the category key for. * @return the category key for the given entry or null if no key could be * found. */ private Object getCategoryKey(IPatchSetHistoryEntry<?, ?> entry) { /* the old value is the primary key - as long as it is not null */ Object primaryKey = null; /* the new value is the secondary key - as long as it is not null */ Object secondaryKey = null; Object entryObject = entry.getEntryObject(); if (entryObject instanceof Diff) { Diff diff = (Diff) entryObject; primaryKey = getPrimaryKey(diff); secondaryKey = getSecondaryKey(diff); } if (primaryKey != null) { return primaryKey; } return secondaryKey; } /** * @param diff * the {@link Diff} used to derive the key. * @return the primary key derived from the given diff, or null if no * primary key could be derived. */ private Object getPrimaryKey(Diff diff) { return matchHelper.getOldValue(diff.getMatch()); } /** * @param diff * the {@link Diff} used to derive the key. * @return the secondary key derived from the given diff, or null if no * secondary key could be derived. */ private Object getSecondaryKey(Diff diff) { return matchHelper.getNewValue(diff.getMatch()); } }
plugins/at.bitandart.zoubek.mervin/src/at/bitandart/zoubek/mervin/patchset/history/organizers/MatchingObjectOrganizer.java
/******************************************************************************* * Copyright (c) 2017 Florian Zoubek. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Florian Zoubek - initial API and implementation *******************************************************************************/ package at.bitandart.zoubek.mervin.patchset.history.organizers; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.inject.Inject; import javax.inject.Named; import org.eclipse.e4.core.di.annotations.Optional; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.compare.Diff; import org.eclipse.emf.compare.rcp.EMFCompareRCPPlugin; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import at.bitandart.zoubek.mervin.IMatchHelper; import at.bitandart.zoubek.mervin.IMervinContextConstants; import at.bitandart.zoubek.mervin.model.modelreview.ModelReview; import at.bitandart.zoubek.mervin.model.modelreview.PatchSet; import at.bitandart.zoubek.mervin.patchset.history.IPatchSetHistoryEntry; import at.bitandart.zoubek.mervin.patchset.history.ISimilarityHistoryService.DiffWithSimilarity; import at.bitandart.zoubek.mervin.patchset.history.NamedHistoryEntryContainer; /** * An {@link DiffCategoryOrganizer} that groups the entries by their common * containing match. This organizer assumes that each entry contains only * {@link Diff}s whose matches are considered to have the same old or, if no old * value exists, the same new value. * * @author Florian Zoubek * */ public class MatchingObjectOrganizer extends DiffCategoryOrganizer { private AdapterFactoryLabelProvider adapterFactoryLabelProvider; @Inject private IMatchHelper matchHelper; @Inject @Named(IMervinContextConstants.ACTIVE_MODEL_REVIEW) @Optional private ModelReview modelReview; public MatchingObjectOrganizer() { adapterFactoryLabelProvider = new AdapterFactoryLabelProvider( new ComposedAdapterFactory(EMFCompareRCPPlugin.getDefault().createFilteredAdapterFactoryRegistry())); } @Override public Collection<Object> groupPatchSetHistoryEntries(List<? extends IPatchSetHistoryEntry<?, ?>> modelEntries, List<? extends IPatchSetHistoryEntry<?, ?>> diagramEntries) { Collection<Object> rootEntries = super.groupPatchSetHistoryEntries(modelEntries, diagramEntries); for (Object entry : rootEntries) { if (entry instanceof NamedHistoryEntryContainer) { NamedHistoryEntryContainer container = ((NamedHistoryEntryContainer) entry); List<IPatchSetHistoryEntry<?, ?>> entries = new ArrayList<>(container.getSubEntries()); organizeEntries(entries, container); } } return rootEntries; } private void organizeEntries(List<IPatchSetHistoryEntry<?, ?>> entries, IPatchSetHistoryEntry<?, ?> parent) { Map<Object, IPatchSetHistoryEntry<?, ?>> objectDiffMap = new HashMap<>(); Map<EPackage, IPatchSetHistoryEntry<?, ?>> packageEntries = new HashMap<>(); parent.getSubEntries().clear(); for (IPatchSetHistoryEntry<?, ?> entry : entries) { Object key = getCategoryKey(entry); if (key != null) { IPatchSetHistoryEntry<?, ?> parentEntry = objectDiffMap.get(key); if (parentEntry == null) { parentEntry = new NamedHistoryEntryContainer(adapterFactoryLabelProvider.getText(key), new LinkedList<IPatchSetHistoryEntry<?, ?>>()); if (key instanceof EObject) { /* create a subentry for each containing package */ EPackage containingPackage = getContainingPackage((EObject) key); if (containingPackage != null) { IPatchSetHistoryEntry<?, ?> packageEntry = packageEntries.get(containingPackage); if (packageEntry == null) { packageEntry = new NamedHistoryEntryContainer( adapterFactoryLabelProvider.getText(containingPackage), new LinkedList<IPatchSetHistoryEntry<?, ?>>()); parent.getSubEntries().add(packageEntry); packageEntries.put(containingPackage, packageEntry); } packageEntry.getSubEntries().add(parentEntry); } } else { parent.getSubEntries().add(parentEntry); } objectDiffMap.put(key, parentEntry); } parentEntry.getSubEntries().add(entry); } } } /** * @param eObject * the {@link EObject} to find the root {@link EPackage} for. * @return the containing root package of the given {@link EObject}. */ private EPackage getContainingPackage(EObject eObject) { EPackage ePackage = eObject.eClass().getEPackage(); while (ePackage.getESuperPackage() != null) { ePackage = ePackage.getESuperPackage(); } return ePackage; } /** * @param entry * the entry to obtain the category key for. * @return the category key for the given entry or null if no key could be * found. */ private Object getCategoryKey(IPatchSetHistoryEntry<?, ?> entry) { /* the old value is the primary key - as long as it is not null */ Object primaryKey = null; /* the new value is the secondary key - as long as it is not null */ Object secondaryKey = null; Object entryObject = entry.getEntryObject(); if (entryObject instanceof Diff) { Diff diff = (Diff) entryObject; primaryKey = getPrimaryKey(diff); secondaryKey = getSecondaryKey(diff); } if (primaryKey != null) { return primaryKey; } /* * try to obtain the primary key and secondary key from the value diff * as fallback for the case that the assumption (the old values match * or, if no old values exist, the new values match) is wrong */ if (modelReview != null) { EList<PatchSet> patchSets = modelReview.getPatchSets(); for (PatchSet patchSet : patchSets) { Object value = entry.getValue(patchSet); Diff valueDiff = null; if (value instanceof Diff) { valueDiff = (Diff) value; } else if (value instanceof DiffWithSimilarity) { valueDiff = ((DiffWithSimilarity) value).getDiff(); } if (valueDiff != null) { primaryKey = getPrimaryKey(valueDiff); if (secondaryKey != null) { secondaryKey = getSecondaryKey(valueDiff); } } if (primaryKey != null) { return primaryKey; } } } return secondaryKey; } /** * @param diff * the {@link Diff} used to derive the key. * @return the primary key derived from the given diff, or null if no * primary key could be derived. */ private Object getPrimaryKey(Diff diff) { return matchHelper.getOldValue(diff.getMatch()); } /** * @param diff * the {@link Diff} used to derive the key. * @return the secondary key derived from the given diff, or null if no * secondary key could be derived. */ private Object getSecondaryKey(Diff diff) { return matchHelper.getNewValue(diff.getMatch()); } }
fixes confusing diff match categorization in patch set history The previous organizer implementation categorized diffs also on their similar diffs. This resulted in a categorization that did not match the containment tree, which potentially confuses the users. Fixed by considering only the entry diff for categorization.
plugins/at.bitandart.zoubek.mervin/src/at/bitandart/zoubek/mervin/patchset/history/organizers/MatchingObjectOrganizer.java
fixes confusing diff match categorization in patch set history
<ide><path>lugins/at.bitandart.zoubek.mervin/src/at/bitandart/zoubek/mervin/patchset/history/organizers/MatchingObjectOrganizer.java <ide> import javax.inject.Named; <ide> <ide> import org.eclipse.e4.core.di.annotations.Optional; <del>import org.eclipse.emf.common.util.EList; <ide> import org.eclipse.emf.compare.Diff; <ide> import org.eclipse.emf.compare.rcp.EMFCompareRCPPlugin; <ide> import org.eclipse.emf.ecore.EObject; <ide> import at.bitandart.zoubek.mervin.IMatchHelper; <ide> import at.bitandart.zoubek.mervin.IMervinContextConstants; <ide> import at.bitandart.zoubek.mervin.model.modelreview.ModelReview; <del>import at.bitandart.zoubek.mervin.model.modelreview.PatchSet; <ide> import at.bitandart.zoubek.mervin.patchset.history.IPatchSetHistoryEntry; <del>import at.bitandart.zoubek.mervin.patchset.history.ISimilarityHistoryService.DiffWithSimilarity; <ide> import at.bitandart.zoubek.mervin.patchset.history.NamedHistoryEntryContainer; <ide> <ide> /** <ide> if (primaryKey != null) { <ide> return primaryKey; <ide> } <del> <del> /* <del> * try to obtain the primary key and secondary key from the value diff <del> * as fallback for the case that the assumption (the old values match <del> * or, if no old values exist, the new values match) is wrong <del> */ <del> if (modelReview != null) { <del> <del> EList<PatchSet> patchSets = modelReview.getPatchSets(); <del> for (PatchSet patchSet : patchSets) { <del> <del> Object value = entry.getValue(patchSet); <del> Diff valueDiff = null; <del> <del> if (value instanceof Diff) { <del> valueDiff = (Diff) value; <del> } else if (value instanceof DiffWithSimilarity) { <del> valueDiff = ((DiffWithSimilarity) value).getDiff(); <del> } <del> <del> if (valueDiff != null) { <del> primaryKey = getPrimaryKey(valueDiff); <del> if (secondaryKey != null) { <del> secondaryKey = getSecondaryKey(valueDiff); <del> } <del> } <del> <del> if (primaryKey != null) { <del> return primaryKey; <del> } <del> } <del> <del> } <ide> return secondaryKey; <ide> <ide> }
JavaScript
mit
ac7385b8749b96145eb0c215638fcfad9c844b6d
0
Osuriel/drunkDices,Osuriel/drunkDices
/* DrunkDices.com GAME RULES: Each Player can roll the dices as many time as they wish until they decide to hold their point and pass the turn or they get a “one”. If a player gets a “one” when they roll the dice, they will not get any points and their turn will be over. A player must take a shot every time they get a 1. If a player roll the dice 6 or more times in a turn and decides to hold his points, that turn become a fire turn. If a player gets a streak of 3 fire turns in a row he will get 50 bonus points and every other player has to have a drink. When a player gets a “one” 3 turns in a row he must have an extra drink. The first Player to reach 250 points wins the game, and every losing player has to have a drink. Its super easy! The game will remind you when someone has to have a drink! */ // ----------------------------------- SET UP ----------------------------------- var numberOfPlayers, players, winningScore, activePlayer, diceResult, currentScore, playerTurn, gameOn, diceImgs, nameInputArray, successfulRolls, errorNameInput, holdRoll, diceRolling, rollDiceButtonActive, holdPointsButtonActive; players = {}; winningScore = 250; playerTurn = 1; currentScore = 0; gameon = false; diceImgs =[]; successfulRolls = 0; diceRolling = false; rollDiceButtonActive = true; holdPointsButtonActive = true; nameInputArray = []; //DOM elements var lowOpacityScreenDiv= document.getElementById('low-opacity-screen'); var popupBoxDiv = document.getElementById('popup-box'); var howManyPlayersDiv = document.getElementById('how-many-players'); var nameFormDiv = document.getElementById('name-form'); var error = document.querySelector('.error'); var nameList = document.getElementById('name-list'); var currentAttempsDOM = document.getElementById('current_attemps_number'); var currentBigBoxDOM = document.getElementById('current_big_box'); var bigFlamesDOM = document.getElementById('inside-left'); var bigXDOM = document.getElementById('inside-right'); var activePlayerScoreNode = document.getElementById('active_player_score'); var activePlayerNameNode = document.getElementById('active_player_name'); //Dom Elements Game controls var playerBoxContainer = document.getElementById('player-box-container'); var rollDiceButton = document.getElementById('roll_dice_button'); var holdPointsButton = document.getElementById('hold_points_button'); var diceDom = document.getElementById('dice'); var currentScoreDOM = document.getElementById('current_score'); for ( var i = 1; i <= 6; i++){ var diceUrl = 'img/dices/'+i+'dice.png'; diceImgs[i]= diceUrl; } //functions: function makeNode( type, classes , parent){ var element = document.createElement(type); element.className = classes; parent.appendChild(element); return element; } function hide(element){ element.classList.add('hidden'); } function show(element){ element.classList.remove('hidden'); } function stillRolling(){ var tempDice = Math.floor(Math.random() * 6) + 1; diceDom.src = diceImgs[tempDice]; console.log('this is running'); } function initializeGame(){ numberOfPlayers = 0; players = {}; winningScore = 250; playerTurn = 1; currentScore = 0; gameon = false; diceImgs =[]; successfulRolls = 0; diceRolling = false; rollDiceButtonActive = true; holdPointsButtonActive = true; nameInputArray = []; nameList.innerHTML = ""; //Set UI to empty. currentScoreDOM.textContent = 0; activePlayerNameNode.textContent = "Player1"; bigFlamesDOM.innerHTML = ""; bigXDOM.innerHTML = ""; playerBoxContainer.innerHTML = ""; currentAttempsDOM.textContent = 0; activePlayerScoreNode.textContent = 0; //Show Pop Up Boxes. show(lowOpacityScreenDiv); show(popupBoxDiv); show(howManyPlayersDiv); } function rollDice(){ if ( rollDiceButtonActive === true ){ //Making dice rol for .150 seconds before selecting the number if ( diceRolling === false ){ interval= setInterval( stillRolling, 60); } // debugger; //Real Dice Roll function realDiceRoll (){ clearInterval(interval) diceResult = Math.floor(Math.random() * 6) + 1; diceDom.src = diceImgs[diceResult]; addToCurrentScore(); console.log(diceResult); diceRolling = false; } setTimeout( realDiceRoll, 400); diceRolling = true; rollDiceButtonActive = false; rollDiceButton.className = 'roll_dice_active roll_dice_inactive'; setTimeout ( function(){ rollDiceButtonActive = true; rollDiceButton.className = ''; } , 700); } } function addToCurrentScore(){ if ( diceResult === 1 ){ currentScore = 0; currentScoreDOM.textContent= currentScore; activePlayer.flames = 0; activePlayer.x++; //Takes 20 points away if you have an X streak of 3. if ( activePlayer.x >= 3 && activePlayer.score > 20 ) { activePlayer.score -= 20; //Show Pop up saying you just lost 20 points and every other player needs a drink. alert('You got 3 X in a row. you just lost 20 points'); } else if (activePlayer.x >= 3 && activePlayer.score <= 20) { activePlayer.score = 0; //Show Pop up saying you just lost 20 points and every other player needs a drink alert('You got 3 X in a row. you just lost 20 points'); } setTimeout( function(){ alert('You got a 1! Take a Shot and your turn is over'); switchTurn(); }, 500); } else { currentScore += diceResult; currentScoreDOM.textContent= currentScore; successfulRolls++; currentAttempsDOM.textContent = successfulRolls; if ( successfulRolls === 6 ){ currentAttempsDOM.className = 'current_attemps_streak';//Lights up the flame indicating a streak. } } if( (currentScore + activePlayer.score ) >= winningScore ){ setTimeout( function(){ alert(activePlayer.name + ' just WON the game! Every other player has to take a shot, the person in last place takes 2'); }, 500); } } function addToPlayerScore(){ rollDiceButtonActive = false; rollDiceButton.className = 'roll_dice_inactive'; if (holdPointsButtonActive){ holdPointsButtonActive = false; currentScoreDOM.className = 'moving_current_score'; holdPointsButton.className = 'hold_points_inactive'; setTimeout( function(){ activePlayer.score += currentScore; activePlayer.x = 0; activePlayerScoreNode.textContent = activePlayer.score; if ( successfulRolls >= 6 ){ if (activePlayer.flames < 3){ activePlayer.flames++; } } else { activePlayer.flames = 0; } if ( activePlayer.flames === 6 ){ activePlayer.score += 50; //Show Pop up saying you just got 50 points and every other player needs a drink. alert('You got 3 X in a row. you just Won 50 points, Every other player needs a drink'); } updateBigStreaks( activePlayer.flames, activePlayer.x ); setTimeout( function(){ currentScoreDOM.className = ''; switchTurn(); rollDiceButtonActive = true; rollDiceButton.className = ''; holdPointsButton.className = ''; } , 1000) } , 1000 ) } } function switchTurn(){ activePlayer.html.score.textContent = activePlayer.score; currentScore = 0; holdPointsButtonActive = true; currentAttempsDOM.className = '';//Lights up the flame indicating a streak. currentScoreDOM.textContent = 0; currentAttempsDOM.textContent = 0; successfulRolls = 0; updateStreaks( activePlayer.flames, activePlayer.x ); activePlayer.html.playerBox.classList.remove('active_player_box'); if ( playerTurn === numberOfPlayers ){ playerTurn = 1; } else { playerTurn++; } activePlayer = players['player' + playerTurn]; alert(activePlayer.name + " It is your turn now."); activePlayerNameNode.textContent = activePlayer.name; activePlayerScoreNode.textContent = activePlayer.score; activePlayer.html.playerBox.classList.add('active_player_box'); updateBigStreaks( activePlayer.flames, activePlayer.x ); if ( activePlayer.flames >= 3 ){ currentBigBoxDOM.classList.add('fire_bg'); }else{ currentBigBoxDOM.classList.remove('fire_bg'); } } function updateStreaks( flames, xs ){ var flameImgs = '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; var xImgs = '<img src="img/x_icon.png" alt="X for Hot streak" class="small-streak-img">'; activePlayer.html.flames.innerHTML = ""; activePlayer.html.x.innerHTML = ""; for ( var i = 1 ; i <= flames && i <=3 ; i++ ){ activePlayer.html.flames.innerHTML += flameImgs; } for ( var i = 1 ; i <= xs && i <=3 ; i++ ){ activePlayer.html.x.innerHTML += xImgs; } } function updateBigStreaks ( flames, xs) { var flameImgs = '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="big-streak-img">'; var xImgs = '<img src="img/x_icon.png" alt="X for Hot streak" class="big-streak-img">'; bigFlamesDOM.innerHTML = ""; bigXDOM.innerHTML = ""; for ( var i = 1 ; i <= flames && i <=3 ; i++ ){ bigFlamesDOM.innerHTML += flameImgs; } for ( var i = 1 ; i <= xs && i <=3 ; i++ ){ bigXDOM.innerHTML += xImgs; } } function setNumberOfPlayers(){ numberOfPlayers = parseFloat(document.getElementById('number-of-players').value); hide(howManyPlayersDiv); show(nameFormDiv); document.getElementById('number-selected').textContent = numberOfPlayers; //Create input field for each player name. for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ var inputElement = document.createElement('input'); var liElement = document.createElement('li'); inputElement.setAttribute( 'type' , 'text' ); inputElement.setAttribute ( 'id' , 'name-input-' + i ); inputElement.setAttribute ( 'class' , 'name-input'); inputElement.setAttribute ( 'placeholder' , 'Enter a name for player ' + i ); liElement.appendChild(inputElement); nameInputArray[i] = inputElement; nameList.appendChild(liElement); inputElement.addEventListener( 'keypress' , function(event){ if ( event.which === 13 ) { var i = parseInt(this.id.charAt(this.id.length -1)); if( i < numberOfPlayers){ document.getElementById( 'name-input-' + (i+1)).focus(); } else { document.getElementById('name-form-ok').focus(); } } } ); } } function startGame(){ //Make sure all fields are filled. for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ if ( nameInputArray[i].value == false){ show(error); return; } if ( nameInputArray[i].value.length > 11){ nameInputArray[i].classList.add('name-too-long'); nameInputArray[i].value = "Name was too long. Try 11 characters or less"; nameInputArray[i].focus(); nameInputArray[i].setSelectionRange(0, nameInputArray[i].value.length); errorNameInput = nameInputArray[i]; errorNameInput.addEventListener ( 'click', function(){ errorNameInput.setSelectionRange(0, nameInputArray[i].value.length); errorNameInput.classList.remove('name-too-long'); }); errorNameInput.addEventListener ( 'keypress', function(){ errorNameInput.classList.remove('name-too-long'); }); return; } } for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ //create object for each player. var name = nameInputArray[i].value; players["player" + i] = { name: name, score: 0, html: {}, flames: 0, x: 0, } //Create a Player box for each player. var playerBox= document.createElement('div'); playerBox.classList.add('player_box'); playerBoxContainer.appendChild(playerBox); var playerName = document.createElement('div'); playerName.textContent = players["player" + i].name; playerName.classList.add('player_names'); playerBox.appendChild(playerName); var playerScoreHeading = document.createElement('div'); playerScoreHeading.classList.add('player_score_heading'); playerScoreHeading.textContent = "Player Score:"; playerBox.appendChild(playerScoreHeading); var playerScoreDiv = document.createElement('div'); playerScoreDiv.classList.add('player_score'); playerBox.appendChild(playerScoreDiv); var playerScoreNumber = document.createElement('span'); playerScoreNumber.classList.add('player_score_number'); playerScoreNumber.textContent = players["player" + i].score; playerScoreNumber.id = "player_score_number_" + i; playerScoreDiv.appendChild(playerScoreNumber); playerScoreDiv.appendChild(document.createTextNode('pts')); //Set default gray flames: var smallFlames = makeNode('div', 'small_flames', playerBox); smallFlames.innerHTML = '<img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img">'; //SET DEFAULT FOR COLORED RED ACTIVE flames var insideSmallFlame = makeNode('div', 'inside-small-flame',smallFlames); // insideSmallFlame.innerHTML = // '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; //SET DEFAULT FOR gray Xs var smallXs = makeNode( 'div','small_xs', playerBox); smallXs.innerHTML = '<img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img">'; var insideSmallXs = makeNode( 'div','inside-small-x', smallXs); // insideSmallXs.innerHTML = // '<img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; //inserting DOM elements inside players object for later access players["player" + i].html.playerBox =playerBox; insideSmallXs; players["player" + i].html.score = playerScoreNumber; players["player" + i].html.flames = insideSmallFlame; players["player" + i].html.x = insideSmallXs; // players["player" + i].html.score = document.querySelector('.player_score_number'); // players["player" + i].html.score = playerScoreNumber; // players["player" + i].html.score.textContent = 34; } hide(nameFormDiv); hide(error); hide(lowOpacityScreenDiv); hide(popupBoxDiv); gameOn = true; activePlayer = players.player1; activePlayer.html.playerBox.classList.add('active_player_box'); activePlayerNameNode.textContent = activePlayer.name; } // ----------------------------------- CODE STARTS RUNNING HERE ----------------------------------- initializeGame(); document.getElementById('number-of-players-ok').addEventListener( 'click' , setNumberOfPlayers ); document.getElementById('name-form-ok').addEventListener( 'click' , startGame ); function changeScore(){players.player1.html.score.innerHTML = 30;} holdPointsButton.addEventListener( 'click', addToPlayerScore); rollDiceButton.addEventListener( 'mousedown' , rollDice); // rollDiceButton.addEventListener( 'mouseup' , rollingDiceStop); // rollDiceButton.addEventListener( 'mouseleave' , rollingDiceStop);
js/main.js
/* DrunkDices.com GAME RULES: Each Player can roll the dices as many time as they wish until they decide to hold their point and pass the turn or they get a “one”. If a player gets a “one” when they roll the dice, they will not get any points and their turn will be over. A player must take a shot every time they get a 1. If a player roll the dice 6 or more times in a turn and decides to hold his points, that turn become a fire turn. If a player gets a streak of 3 fire turns in a row he will get 50 bonus points and every other player has to have a drink. When a player gets a “one” 3 turns in a row he must have an extra drink. The first Player to reach 250 points wins the game, and every losing player has to have a drink. Its super easy! The game will remind you when someone has to have a drink! */ // ----------------------------------- SET UP ----------------------------------- var numberOfPlayers, players, winningScore, activePlayer, diceResult, currentScore, playerTurn, gameOn, diceImgs, nameInputArray, successfulRolls, errorNameInput, holdRoll, diceRolling, rollDiceButtonActive, holdPointsButtonActive; players = {}; winningScore = 250; playerTurn = 1; currentScore = 0; gameon = false; diceImgs =[]; successfulRolls = 0; diceRolling = false; rollDiceButtonActive = true; holdPointsButtonActive = true; nameInputArray = []; //DOM elements var lowOpacityScreenDiv= document.getElementById('low-opacity-screen'); var popupBoxDiv = document.getElementById('popup-box'); var howManyPlayersDiv = document.getElementById('how-many-players'); var nameFormDiv = document.getElementById('name-form'); var error = document.querySelector('.error'); var nameList = document.getElementById('name-list'); var currentAttempsDOM = document.getElementById('current_attemps_number'); var currentBigBoxDOM = document.getElementById('current_big_box'); var bigFlamesDOM = document.getElementById('inside-left'); var bigXDOM = document.getElementById('inside-right'); var activePlayerScoreNode = document.getElementById('active_player_score'); //Dom Elements Game controls var playerBoxContainer = document.getElementById('player-box-container'); var rollDiceButton = document.getElementById('roll_dice_button'); var holdPointsButton = document.getElementById('hold_points_button'); var diceDom = document.getElementById('dice'); var currentScoreDOM = document.getElementById('current_score'); var activePlayerNameNode = document.getElementById('active_player_name'); for ( var i = 1; i <= 6; i++){ var diceUrl = 'img/dices/'+i+'dice.png'; diceImgs[i]= diceUrl; } //functions: function makeNode( type, classes , parent){ var element = document.createElement(type); element.className = classes; parent.appendChild(element); return element; } function hide(element){ element.classList.add('hidden'); } function show(element){ element.classList.remove('hidden'); } function stillRolling(){ var tempDice = Math.floor(Math.random() * 6) + 1; diceDom.src = diceImgs[tempDice]; console.log('this is running'); } function initializeGame(){ numberOfPlayers = 0; players = {}; winningScore = 250; playerTurn = 1; currentScore = 0; gameon = false; diceImgs =[]; successfulRolls = 0; diceRolling = false; rollDiceButtonActive = true; holdPointsButtonActive = true; nameInputArray = []; nameList.innerHTML = ""; //Set UI to empty. currentScoreDOM.textContent = 0; activePlayerNameNode.textContent = "Player1"; bigFlamesDOM.innerHTML = ""; bigXDOM.innerHTML = ""; playerBoxContainer.innerHTML = ""; currentAttempsDOM.textContent = 0; activePlayerScoreNode.textContent = 0; //Show Pop Up Boxes. show(lowOpacityScreenDiv); show(popupBoxDiv); show(howManyPlayersDiv); } function rollDice(){ if ( rollDiceButtonActive === true ){ //Making dice rol for .150 seconds before selecting the number if ( diceRolling === false ){ interval= setInterval( stillRolling, 60); } // debugger; //Real Dice Roll function realDiceRoll (){ clearInterval(interval) diceResult = Math.floor(Math.random() * 6) + 1; diceDom.src = diceImgs[diceResult]; addToCurrentScore(); console.log(diceResult); diceRolling = false; } setTimeout( realDiceRoll, 400); diceRolling = true; rollDiceButtonActive = false; rollDiceButton.className = 'roll_dice_active roll_dice_inactive'; setTimeout ( function(){ rollDiceButtonActive = true; rollDiceButton.className = ''; } , 700); } } function addToCurrentScore(){ if ( diceResult === 1 ){ currentScore = 0; currentScoreDOM.textContent= currentScore; activePlayer.flames = 0; activePlayer.x++; //Takes 20 points away if you have an X streak of 3. if ( activePlayer.x >= 3 && activePlayer.score > 20 ) { activePlayer.score -= 20; //Show Pop up saying you just lost 20 points and every other player needs a drink. alert('You got 3 X in a row. you just lost 20 points'); } else if (activePlayer.x >= 3 && activePlayer.score <= 20) { activePlayer.score = 0; //Show Pop up saying you just lost 20 points and every other player needs a drink alert('You got 3 X in a row. you just lost 20 points'); } setTimeout( function(){ alert('You got a 1! Take a Shot and your turn is over'); switchTurn(); }, 500); } else { currentScore += diceResult; currentScoreDOM.textContent= currentScore; successfulRolls++; currentAttempsDOM.textContent = successfulRolls; if ( successfulRolls === 6 ){ currentAttempsDOM.className = 'current_attemps_streak';//Lights up the flame indicating a streak. } } if( (currentScore + activePlayer.score ) >= winningScore ){ setTimeout( function(){ alert(activePlayer.name + ' just WON the game! Every other player has to take a shot, the person in last place takes 2'); }, 500); } } function addToPlayerScore(){ rollDiceButtonActive = false; rollDiceButton.className = 'roll_dice_inactive'; if (holdPointsButtonActive){ holdPointsButtonActive = false; currentScoreDOM.className = 'moving_current_score'; holdPointsButton.className = 'hold_points_inactive'; setTimeout( function(){ activePlayer.score += currentScore; activePlayer.x = 0; activePlayerScoreNode.textContent = activePlayer.score; if ( successfulRolls >= 6 ){ if (activePlayer.flames < 3){ activePlayer.flames++; } } else { activePlayer.flames = 0; } if ( activePlayer.flames === 6 ){ activePlayer.score += 50; //Show Pop up saying you just got 50 points and every other player needs a drink. alert('You got 3 X in a row. you just Won 50 points, Every other player needs a drink'); } updateBigStreaks( activePlayer.flames, activePlayer.x ); setTimeout( function(){ currentScoreDOM.className = ''; switchTurn(); rollDiceButtonActive = true; rollDiceButton.className = ''; holdPointsButton.className = ''; } , 1000) } , 1000 ) } } function switchTurn(){ activePlayer.html.score.textContent = activePlayer.score; currentScore = 0; holdPointsButtonActive = true; currentAttempsDOM.className = '';//Lights up the flame indicating a streak. currentScoreDOM.textContent = 0; currentAttempsDOM.textContent = 0; successfulRolls = 0; updateStreaks( activePlayer.flames, activePlayer.x ); activePlayer.html.playerBox.classList.remove('active_player_box'); if ( playerTurn === numberOfPlayers ){ playerTurn = 1; } else { playerTurn++; } activePlayer = players['player' + playerTurn]; alert(activePlayer.name + " It is your turn now."); activePlayerNameNode.textContent = activePlayer.name; activePlayerScoreNode.textContent = activePlayer.score; activePlayer.html.playerBox.classList.add('active_player_box'); updateBigStreaks( activePlayer.flames, activePlayer.x ); if ( activePlayer.flames >= 3 ){ currentBigBoxDOM.classList.add('fire_bg'); }else{ currentBigBoxDOM.classList.remove('fire_bg'); } } function updateStreaks( flames, xs ){ var flameImgs = '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; var xImgs = '<img src="img/x_icon.png" alt="X for Hot streak" class="small-streak-img">'; activePlayer.html.flames.innerHTML = ""; activePlayer.html.x.innerHTML = ""; for ( var i = 1 ; i <= flames && i <=3 ; i++ ){ activePlayer.html.flames.innerHTML += flameImgs; } for ( var i = 1 ; i <= xs && i <=3 ; i++ ){ activePlayer.html.x.innerHTML += xImgs; } } function updateBigStreaks ( flames, xs) { var flameImgs = '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="big-streak-img">'; var xImgs = '<img src="img/x_icon.png" alt="X for Hot streak" class="big-streak-img">'; bigFlamesDOM.innerHTML = ""; bigXDOM.innerHTML = ""; for ( var i = 1 ; i <= flames && i <=3 ; i++ ){ bigFlamesDOM.innerHTML += flameImgs; } for ( var i = 1 ; i <= xs && i <=3 ; i++ ){ bigXDOM.innerHTML += xImgs; } } function setNumberOfPlayers(){ numberOfPlayers = parseFloat(document.getElementById('number-of-players').value); hide(howManyPlayersDiv); show(nameFormDiv); document.getElementById('number-selected').textContent = numberOfPlayers; //Create input field for each player name. for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ var inputElement = document.createElement('input'); var liElement = document.createElement('li'); inputElement.setAttribute( 'type' , 'text' ); inputElement.setAttribute ( 'id' , 'name-input-' + i ); inputElement.setAttribute ( 'class' , 'name-input'); inputElement.setAttribute ( 'placeholder' , 'Enter a name for player ' + i ); liElement.appendChild(inputElement); nameInputArray[i] = inputElement; nameList.appendChild(liElement); inputElement.addEventListener( 'keypress' , function(event){ if ( event.which === 13 ) { var i = parseInt(this.id.charAt(this.id.length -1)); if( i < numberOfPlayers){ document.getElementById( 'name-input-' + (i+1)).focus(); } else { document.getElementById('name-form-ok').focus(); } } } ); } } function startGame(){ //Make sure all fields are filled. for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ if ( nameInputArray[i].value == false){ show(error); return; } if ( nameInputArray[i].value.length > 11){ nameInputArray[i].classList.add('name-too-long'); nameInputArray[i].value = "Name was too long. Try 11 characters or less"; nameInputArray[i].focus(); nameInputArray[i].setSelectionRange(0, nameInputArray[i].value.length); errorNameInput = nameInputArray[i]; errorNameInput.addEventListener ( 'click', function(){ errorNameInput.setSelectionRange(0, nameInputArray[i].value.length); errorNameInput.classList.remove('name-too-long'); }); errorNameInput.addEventListener ( 'keypress', function(){ errorNameInput.classList.remove('name-too-long'); }); return; } } for ( var i = 1 ; i <= numberOfPlayers ; i++ ){ //create object for each player. var name = nameInputArray[i].value; players["player" + i] = { name: name, score: 0, html: {}, flames: 0, x: 0, } //Create a Player box for each player. var playerBox= document.createElement('div'); playerBox.classList.add('player_box'); playerBoxContainer.appendChild(playerBox); var playerName = document.createElement('div'); playerName.textContent = players["player" + i].name; playerName.classList.add('player_names'); playerBox.appendChild(playerName); var playerScoreHeading = document.createElement('div'); playerScoreHeading.classList.add('player_score_heading'); playerScoreHeading.textContent = "Player Score:"; playerBox.appendChild(playerScoreHeading); var playerScoreDiv = document.createElement('div'); playerScoreDiv.classList.add('player_score'); playerBox.appendChild(playerScoreDiv); var playerScoreNumber = document.createElement('span'); playerScoreNumber.classList.add('player_score_number'); playerScoreNumber.textContent = players["player" + i].score; playerScoreNumber.id = "player_score_number_" + i; playerScoreDiv.appendChild(playerScoreNumber); playerScoreDiv.appendChild(document.createTextNode('pts')); //Set default gray flames: var smallFlames = makeNode('div', 'small_flames', playerBox); smallFlames.innerHTML = '<img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img">'; //SET DEFAULT FOR COLORED RED ACTIVE flames var insideSmallFlame = makeNode('div', 'inside-small-flame',smallFlames); // insideSmallFlame.innerHTML = // '<img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/flame_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; //SET DEFAULT FOR gray Xs var smallXs = makeNode( 'div','small_xs', playerBox); smallXs.innerHTML = '<img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon_gray.png" alt="Flame for Hot streak" class="small-streak-img">'; var insideSmallXs = makeNode( 'div','inside-small-x', smallXs); // insideSmallXs.innerHTML = // '<img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img"><img src="img/x_icon.png" alt="Flame for Hot streak" class="small-streak-img">'; //inserting DOM elements inside players object for later access players["player" + i].html.playerBox =playerBox; insideSmallXs; players["player" + i].html.score = playerScoreNumber; players["player" + i].html.flames = insideSmallFlame; players["player" + i].html.x = insideSmallXs; // players["player" + i].html.score = document.querySelector('.player_score_number'); // players["player" + i].html.score = playerScoreNumber; // players["player" + i].html.score.textContent = 34; } hide(nameFormDiv); hide(error); hide(lowOpacityScreenDiv); hide(popupBoxDiv); gameOn = true; activePlayer = players.player1; } // ----------------------------------- CODE STARTS RUNNING HERE ----------------------------------- document.getElementById('number-of-players-ok').addEventListener( 'click' , setNumberOfPlayers ); document.getElementById('name-form-ok').addEventListener( 'click' , startGame ); function changeScore(){players.player1.html.score.innerHTML = 30;} holdPointsButton.addEventListener( 'click', addToPlayerScore); rollDiceButton.addEventListener( 'mousedown' , rollDice); // rollDiceButton.addEventListener( 'mouseup' , rollingDiceStop); // rollDiceButton.addEventListener( 'mouseleave' , rollingDiceStop);
all thats left is replacing the alerts with on screen messages and getting better pics for the gay icons. i think
js/main.js
all thats left is replacing the alerts with on screen messages and getting better pics for the gay icons. i think
<ide><path>s/main.js <ide> var bigFlamesDOM = document.getElementById('inside-left'); <ide> var bigXDOM = document.getElementById('inside-right'); <ide> var activePlayerScoreNode = document.getElementById('active_player_score'); <add>var activePlayerNameNode = document.getElementById('active_player_name'); <ide> <ide> //Dom Elements Game controls <ide> var playerBoxContainer = document.getElementById('player-box-container'); <ide> var holdPointsButton = document.getElementById('hold_points_button'); <ide> var diceDom = document.getElementById('dice'); <ide> var currentScoreDOM = document.getElementById('current_score'); <del>var activePlayerNameNode = document.getElementById('active_player_name'); <add> <ide> <ide> <ide> for ( var i = 1; i <= 6; i++){ <ide> hide(popupBoxDiv); <ide> gameOn = true; <ide> activePlayer = players.player1; <add> activePlayer.html.playerBox.classList.add('active_player_box'); <add> activePlayerNameNode.textContent = activePlayer.name; <add> <ide> <ide> } <ide> <ide> <ide> // ----------------------------------- CODE STARTS RUNNING HERE ----------------------------------- <ide> <del> <add>initializeGame(); <ide> <ide> document.getElementById('number-of-players-ok').addEventListener( 'click' , setNumberOfPlayers ); <ide> document.getElementById('name-form-ok').addEventListener( 'click' , startGame );
Java
apache-2.0
47fdd27b39df4fa0391908ce58aa9a8301168f05
0
baszero/yanel,wyona/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import java.io.ByteArrayOutputStream; import java.io.File; import java.util.Enumeration; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.util.PathUtil; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.CreatableV2; import org.wyona.security.core.api.UserManager; import org.wyona.security.core.api.GroupManager; import org.wyona.security.core.api.User; import org.wyona.security.core.api.Group; import org.wyona.security.core.api.AccessManagementException; import org.wyona.yarep.core.Repository; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfiguration; import org.apache.avalon.framework.configuration.DefaultConfigurationSerializer; import org.apache.log4j.Category; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.servlet.http.HttpServletRequest; /** * This resource allows creating, editing and deleting users. A user is defined * by the following parameters: userId userName (optional) group email password */ public class YanelUserResource extends Resource implements ViewableV2, CreatableV2 { protected static final String POLICY = "policy"; protected static final String ROLE = "role"; protected static final String WORLD = "world"; protected static final String USER = "user"; protected static final String GROUP = "group"; protected static final String PERMISSION_ATTR = "permission"; protected static final String ID_ATTR = "id"; protected static final String XMLNS_ATTR = "xmlns"; protected static final String POLICIES_SUFFIX = ".policy"; protected static final String XMLNS = "http://www.wyona.org/security/1.0"; protected static final String MIME_TYPE = "application/xhtml+xml; charset=UTF-8"; private static Category log = Category.getInstance(YanelUserResource.class); private HashMap properties = new HashMap(); /** * Constructor */ public YanelUserResource() { properties.put("userId", ""); properties.put("userName", ""); properties.put("email", ""); properties.put("password", ""); properties.put("passwordConfirmed", ""); //properties.put("group", ""); } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getViewDescriptors() */ public ViewDescriptor[] getViewDescriptors() { return null; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getMimeType(java.lang.String) */ public String getMimeType(String viewId) { return MIME_TYPE; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getView(String) * @return The requested view. Possible views are: * defaultView - Displays change password, edit profile and delete forms * submitProfile - Result of updating the user's profile * submitPassword - Result of changing the user's password * submitDelete - Result of deleting the user */ public View getView(String viewId) throws Exception { View defaultView = new View(); File xslFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile().getParentFile() .getAbsolutePath(), "xslt" + File.separator + "yanel-user-profile.xsl"); File xmlFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile().getParentFile() .getAbsolutePath(), "xml" + File.separator + "yanel-user-profile.xml"); try { Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xslFile)); String action = determineAction(request); String userId = getUserId(); if (userId == null) { log.error("No user ID!"); throw new Exception("No user ID!"); } if (action.equals("submitProfile")) { updateUserProfile(request, transformer); } else if (action.equals("submitPassword")) { updatePassword(request, transformer); } else if (action.equals("submitDelete")) { if (deleteUser(userId)) { transformer.setParameter("success", "User deleted successfully"); } else { transformer.setParameter("error", "Unable to delete user successfully"); } transformer.setParameter("userId", userId); transformer.setParameter("deletion", "true"); } else if (action.startsWith("submitDeleteFromGroup")) { deleteFromGroup(action, transformer); } else if (action.equals("submitAddToGroup")) { addToGroup(request,transformer); } if(!action.equals("submitDelete")) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); if (user == null) { log.error("No such user: " + userId); throw new Exception("No such user: " + userId); } transformer.setParameter("userId", userId); transformer.setParameter("userName", user.getName()); transformer.setParameter("email", user.getEmail()); transformer.setParameter("description", user.getDescription()); if (user.getLanguage() != null) { transformer.setParameter("user-profile-language", user.getLanguage()); } if (user.getExpirationDate() != null) { transformer.setParameter("expiration-date", user.getExpirationDate()); } Group[] userGroups = user.getGroups(); StringBuffer userGroupsString = new StringBuffer(); for (int i = 0; i < userGroups.length; i++) { userGroupsString.append(userGroups[i].getID()).append(";"); } transformer.setParameter("userGroupsString", userGroupsString); Group[] allGroups = getRealm().getIdentityManager().getGroupManager().getGroups(); StringBuffer allGroupsString = new StringBuffer(); for (int i = 0; i < allGroups.length; i++) { boolean isMember = false; for(int j = 0; j < userGroups.length; j++) { if(userGroups[j].getID().equals(allGroups[i].getID())) { isMember = true; } } if(!isMember) { allGroupsString.append(allGroups[i].getID()).append(";"); } } transformer.setParameter("allGroupsString", allGroupsString); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); transformer.transform(new javax.xml.transform.stream.StreamSource(xmlFile), new StreamResult(baos)); defaultView.setMimeType(MIME_TYPE); defaultView.setInputStream(new java.io.ByteArrayInputStream(baos.toByteArray())); } catch (Exception e) { // TODO: Improve exception handling log.error(e.getMessage(), e); } return defaultView; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#exists() */ public boolean exists() throws Exception { log.warn("Not implemented yet!"); return true; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getSize() */ public long getSize() throws Exception { log.warn("Not implemented yet!"); return -1; } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#getPropertyType(String) */ public String getPropertyType(String propertyName) { if (propertyName.equals("password") || propertyName.equals("passwordConfirmed")) { return CreatableV2.TYPE_PASSWORD; } else { return null; } } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#create(HttpServletRequest) * Creates the user */ public void create(HttpServletRequest request) { try { if (!getRealm().getIdentityManager().getUserManager().existsUser("rp.userId")) { boolean doCreate = validateNewUserData(request); if (doCreate) { UserManager um = getRealm().getIdentityManager().getUserManager(); String userId = request.getParameter("rp.userId"); String password = request.getParameter("rp.password"); String email = request.getParameter("rp.email"); String userName = request.getParameter("rp.userName"); um.createUser(userId, userName, email, password); // TODO: Handle more than one group String groupId = request.getParameter("rp.group"); if (groupId != null) { GroupManager gm = getRealm().getIdentityManager().getGroupManager(); Group group = gm.getGroup(groupId); group.addMember(um.getUser(userId)); group.save(); } //saveUserPolicy(userId); } else { log.error("Unable to create user: the data introduced was not valid"); } } else { log.error("Unable to create user: the supplied id already exists"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#createRTIProperties(HttpServletRequest) */ public HashMap createRTIProperties(HttpServletRequest request) { HashMap map = new HashMap(); map.put("user", request.getParameter("rp.userId")); return map; } /** * If the suggested name is an empty string, then return null such that the resource creator will not create a resource configuration */ public String getCreateName(String suggestedName) { if (suggestedName != null && suggestedName.equals("")) { return null; } else { return suggestedName; } } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#getPropertyNames() */ public String[] getPropertyNames() { String[] propertyNames = (String[]) properties.keySet().toArray( new String[properties.keySet().size()]); return propertyNames; } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#setProperty(String, * Object) */ public void setProperty(String name, Object value) { properties.put(name, value); } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#getProperty(String) */ public Object getProperty(String name) { Object property = properties.get(name); return property; } /** * Create a configuration object with the policies to access the user data. * Only the user and lenya will be able to access the user data. * * @param userId * The user who must be granted permission to modify data * @return Configuration object containing the corresponding policies */ private Configuration createPolicyConfiguration(String userId) { DefaultConfiguration config = new DefaultConfiguration(POLICY); config.setAttribute(XMLNS_ATTR, XMLNS); DefaultConfiguration child = null; child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "view"); config.addChild(child); DefaultConfiguration worldNode = new DefaultConfiguration(WORLD); worldNode.setAttribute(PERMISSION_ATTR, "false"); child.addChild(worldNode); DefaultConfiguration userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "open"); config.addChild(child); userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "write"); config.addChild(child); userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); return config; } /** * Updates the user profile * * @param request * The request containing the data to update * @param transformer */ private void updateUserProfile(HttpServletRequest request, Transformer transformer) { String email = request.getParameter("email"); if (email == null || ("").equals(email)) { transformer.setParameter("error", "emailNotSet"); } else if (!validateEmail(email)) { transformer.setParameter("error", "emailNotValid"); } else { try { String userId = getUserId(); User user = realm.getIdentityManager().getUserManager().getUser(userId); user.setEmail(request.getParameter("email")); user.setName(request.getParameter("userName")); user.save(); transformer.setParameter("success", "Profile updated successfully"); } catch (Exception e) { log.error(e.getMessage(), e); } } } /** * Updates the groups the user belongs to. Currently it only allows for * deletion of membership The action parameter is always * submitDeleteFromGroup_X where X is the target group * * @param request * The request containing the group involved * @param transformer */ private void deleteFromGroup(String action, Transformer transformer) { try { String userId = getUserId(); User user = getRealm().getIdentityManager().getUserManager().getUser(userId); Group[] userGroups = user.getGroups(); GroupManager gm = getRealm().getIdentityManager().getGroupManager(); String targetGroup = action.substring(action.indexOf("_")+1); if (userGroups.length > 0) { if(userGroups.length > 1) { Group group = gm.getGroup(targetGroup); if (group.isMember(user)) { group.removeMember(user); group.save(); } transformer.setParameter("success", "User successfully deleted from group: " + targetGroup); } else { transformer.setParameter("error", "User can not be removed from group: " + targetGroup + ". Users must belong to one group at least."); } } else { log.error("The user " + userId + "does not belong to any group!"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * Add a user to a group * @param request Request containing the group the user has to be added to * @param transformer */ private void addToGroup(HttpServletRequest request, Transformer transformer) { String groupId = request.getParameter("Group"); try { String userId = getUserId(); Group group = getRealm().getIdentityManager().getGroupManager().getGroup(groupId); User user = getRealm().getIdentityManager().getUserManager().getUser(userId); group.addMember(user); group.save(); } catch (Exception e) { log.error(e.getMessage(), e); } transformer.setParameter("success", "User successfully added to group: " + groupId); } /** * Change user password * * @param request * @param transformer */ private void updatePassword(HttpServletRequest request, Transformer transformer) { String oldPassword = request.getParameter("oldPassword"); try { String userId = getUserId(); if (getRealm().getIdentityManager().getUserManager().getUser(userId).authenticate( oldPassword)) { String plainPassword = request.getParameter("newPassword"); boolean confirmation = plainPassword.equals(request .getParameter("newPasswordConfirmation")); if (confirmation && !plainPassword.equals("")) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); user.setPassword(plainPassword); user.save(); transformer.setParameter("success", "Password updated successfully"); } else { transformer.setParameter("error", "Either no new password was supplied " + "or the password supplied and its confirmation do not coincide"); } } else { transformer.setParameter("error", "Authentication failed!"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * This method checks if the specified email is valid against a regex * * @param email * @return true if email is valid */ private boolean validateEmail(String email) { String emailRegEx = "(\\w+)@(\\w+\\.)(\\w+)(\\.\\w+)*"; Pattern pattern = Pattern.compile(emailRegEx); Matcher matcher = pattern.matcher(email); return matcher.find(); } /** * Determine the requested view: defaultView, submitProfile, * submitPassword,submitGroup, submitDelete * * @param request * @return name of the desired view */ private String determineAction(HttpServletRequest request) { boolean submit = false; String action = "defaultView"; Enumeration enumeration = request.getParameterNames(); while (enumeration.hasMoreElements() && !submit) { action = enumeration.nextElement().toString(); if (action.startsWith("submit")) { submit = true; } } return action; } /** * Saves the user data access policies to the policies repository */ /* private void saveUserPolicy(String userId) { Configuration policyConfig = createPolicyConfiguration(userId); DefaultConfigurationSerializer serializer = new DefaultConfigurationSerializer(); String policyPath = getPath() + POLICIES_SUFFIX; try { Repository policiesRepository = getRealm().getPolicyManager().getPoliciesRepository(); serializer.serialize(policiesRepository.getNode(policyPath).getOutputStream(), policyConfig); } catch (Exception e) { log.error(e.getMessage(), e); } } */ /** * Validate the supplied user data. userName is optional */ private boolean validateNewUserData(HttpServletRequest request) throws AccessManagementException { boolean isValid = true; if (request.getParameter("rp.password").equals("")) { isValid = false; log.warn("No password supplied"); } if (request.getParameter("rp.passwordConfirmed").equals("")) { isValid = false; log.warn("No password confirmation supplied"); } if (!request.getParameter("rp.passwordConfirmed").equals(request.getParameter("rp.password"))) { isValid = false; log.warn("Password and confirmed password do no match"); } if (request.getParameter("rp.userId").equals("")) { isValid = false; log.warn("No userId supplied"); } /* String group = request.getParameter("rp.group"); if (group.equals("") || !realm.getIdentityManager().getGroupManager().existsGroup(group)) { isValid = false; log.error("Either no group was supplied or the group does not exist"); } */ if (!validateEmail(request.getParameter("rp.email"))) { isValid = false; log.warn("No email supplied or email not valid"); } return isValid; } /** * Get user id from resource configuration */ private String getUserId() throws Exception { String userId = null; ResourceConfiguration resConfig = getConfiguration(); if(resConfig != null) { userId = getConfiguration().getProperty("user"); } else { log.warn("DEPRECATED: Do not use RTI but rather a resource configuration"); userId = getRTI().getProperty("user"); } return userId; } /** * Delete the user from the identities repository and remove the related * user interface files */ private boolean deleteUser(String userId) { boolean success = false; try { UserManager um = getRealm().getIdentityManager().getUserManager(); um.removeUser(userId); String userScreenPolicyPath = getPath() + POLICIES_SUFFIX; Repository policiesRepo = getRealm().getPolicyManager().getPoliciesRepository(); policiesRepo.getNode(userScreenPolicyPath).delete(); Repository rtiRepo = getRealm().getRTIRepository(); String userScreenRCPath = PathUtil.getRCPath(getPath()); if(rtiRepo.existsNode(userScreenRCPath)) { rtiRepo.getNode(userScreenRCPath).delete(); } else { String userScreenRTIPath = PathUtil.getRTIPath(getPath()); rtiRepo.getNode(userScreenRTIPath).delete(); } success = true; } catch (Exception e) { log.error(e.getMessage(), e); } return success; } }
src/contributions/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/YanelUserResource.java
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import java.io.ByteArrayOutputStream; import java.io.File; import java.util.Enumeration; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.util.PathUtil; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.api.attributes.CreatableV2; import org.wyona.security.core.api.UserManager; import org.wyona.security.core.api.GroupManager; import org.wyona.security.core.api.User; import org.wyona.security.core.api.Group; import org.wyona.security.core.api.AccessManagementException; import org.wyona.yarep.core.Repository; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfiguration; import org.apache.avalon.framework.configuration.DefaultConfigurationSerializer; import org.apache.log4j.Category; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import javax.servlet.http.HttpServletRequest; /** * This resource allows creating, editing and deleting users. A user is defined * by the following parameters: userId userName (optional) group email password */ public class YanelUserResource extends Resource implements ViewableV2, CreatableV2 { protected static final String POLICY = "policy"; protected static final String ROLE = "role"; protected static final String WORLD = "world"; protected static final String USER = "user"; protected static final String GROUP = "group"; protected static final String PERMISSION_ATTR = "permission"; protected static final String ID_ATTR = "id"; protected static final String XMLNS_ATTR = "xmlns"; protected static final String POLICIES_SUFFIX = ".policy"; protected static final String XMLNS = "http://www.wyona.org/security/1.0"; protected static final String MIME_TYPE = "application/xhtml+xml; charset=UTF-8"; private static Category log = Category.getInstance(YanelUserResource.class); private HashMap properties = new HashMap(); /** * Constructor */ public YanelUserResource() { properties.put("userId", ""); properties.put("userName", ""); properties.put("email", ""); properties.put("password", ""); properties.put("passwordConfirmed", ""); //properties.put("group", ""); } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getViewDescriptors() */ public ViewDescriptor[] getViewDescriptors() { return null; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getMimeType(java.lang.String) */ public String getMimeType(String viewId) { return MIME_TYPE; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getView(String) * @return The requested view. Possible views are: * defaultView - Displays change password, edit profile and delete forms * submitProfile - Result of updating the user's profile * submitPassword - Result of changing the user's password * submitDelete - Result of deleting the user */ public View getView(String viewId) throws Exception { View defaultView = new View(); File xslFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile().getParentFile() .getAbsolutePath(), "xslt" + File.separator + "yanel-user-profile.xsl"); File xmlFile = org.wyona.commons.io.FileUtil.file(rtd.getConfigFile().getParentFile() .getAbsolutePath(), "xml" + File.separator + "yanel-user-profile.xml"); try { Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xslFile)); String action = determineAction(request); String userId = getUserId(); if (userId == null) { log.error("No user ID!"); throw new Exception("No user ID!"); } if (action.equals("submitProfile")) { updateUserProfile(request, transformer); } else if (action.equals("submitPassword")) { updatePassword(request, transformer); } else if (action.equals("submitDelete")) { if (deleteUser(userId)) { transformer.setParameter("success", "User deleted successfully"); } else { transformer.setParameter("error", "Unable to delete user successfully"); } transformer.setParameter("userId", userId); transformer.setParameter("deletion", "true"); } else if (action.startsWith("submitDeleteFromGroup")) { deleteFromGroup(action, transformer); } else if (action.equals("submitAddToGroup")) { addToGroup(request,transformer); } if(!action.equals("submitDelete")) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); if (user == null) { log.error("No such user: " + userId); throw new Exception("No such user: " + userId); } transformer.setParameter("userId", userId); transformer.setParameter("userName", user.getName()); transformer.setParameter("email", user.getEmail()); transformer.setParameter("description", user.getDescription()); if (user.getLanguage() != null) { transformer.setParameter("user-profile-language", user.getLanguage()); } if (user.getExpirationDate() != null) { transformer.setParameter("expiration-date", user.getExpirationDate()); } Group[] userGroups = user.getGroups(); StringBuffer userGroupsString = new StringBuffer(); for (int i = 0; i < userGroups.length; i++) { userGroupsString.append(userGroups[i].getID()).append(";"); } transformer.setParameter("userGroupsString", userGroupsString); Group[] allGroups = getRealm().getIdentityManager().getGroupManager().getGroups(); StringBuffer allGroupsString = new StringBuffer(); for (int i = 0; i < allGroups.length; i++) { boolean isMember = false; for(int j = 0; j < userGroups.length; j++) { if(userGroups[j].getID().equals(allGroups[i].getID())) { isMember = true; } } if(!isMember) { allGroupsString.append(allGroups[i].getID()).append(";"); } } transformer.setParameter("allGroupsString", allGroupsString); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); transformer.transform(new javax.xml.transform.stream.StreamSource(xmlFile), new StreamResult(baos)); defaultView.setMimeType(MIME_TYPE); defaultView.setInputStream(new java.io.ByteArrayInputStream(baos.toByteArray())); } catch (Exception e) { // TODO: Improve exception handling log.error(e.getMessage(), e); } return defaultView; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#exists() */ public boolean exists() throws Exception { log.warn("Not implemented yet!"); return true; } /** * @see org.wyona.yanel.core.api.attributes.ViewableV2#getSize() */ public long getSize() throws Exception { log.warn("Not implemented yet!"); return -1; } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#getPropertyType(String) */ public String getPropertyType(String propertyName) { if (propertyName.equals("password") || propertyName.equals("passwordConfirmed")) { return CreatableV2.TYPE_PASSWORD; } else { return null; } } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#create(HttpServletRequest) * Creates the user */ public void create(HttpServletRequest request) { try { if (!getRealm().getIdentityManager().getUserManager().existsUser("rp.userId")) { boolean doCreate = validateNewUserData(request); if (doCreate) { UserManager um = getRealm().getIdentityManager().getUserManager(); String userId = request.getParameter("rp.userId"); String password = request.getParameter("rp.password"); String email = request.getParameter("rp.email"); String userName = request.getParameter("rp.userName"); um.createUser(userId, userName, email, password); // TODO: Handle more than one group String groupId = request.getParameter("rp.group"); if (groupId != null) { GroupManager gm = getRealm().getIdentityManager().getGroupManager(); Group group = gm.getGroup(groupId); group.addMember(um.getUser(userId)); group.save(); } //saveUserPolicy(userId); } else { log.error("Unable to create user: the data introduced was not valid"); } } else { log.error("Unable to create user: the supplied id already exists"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * @see org.wyona.yanel.core.api.attributes.CreatableV2#createRTIProperties(HttpServletRequest) */ public HashMap createRTIProperties(HttpServletRequest request) { HashMap map = new HashMap(); map.put("user", request.getParameter("rp.userId")); return map; } public String getCreateName(String suggestedName) { return suggestedName; } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#getPropertyNames() */ public String[] getPropertyNames() { String[] propertyNames = (String[]) properties.keySet().toArray( new String[properties.keySet().size()]); return propertyNames; } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#setProperty(String, * Object) */ public void setProperty(String name, Object value) { properties.put(name, value); } /** * @see org.wyona.yanel.core.api.attributes.CreatableV1#getProperty(String) */ public Object getProperty(String name) { Object property = properties.get(name); return property; } /** * Create a configuration object with the policies to access the user data. * Only the user and lenya will be able to access the user data. * * @param userId * The user who must be granted permission to modify data * @return Configuration object containing the corresponding policies */ private Configuration createPolicyConfiguration(String userId) { DefaultConfiguration config = new DefaultConfiguration(POLICY); config.setAttribute(XMLNS_ATTR, XMLNS); DefaultConfiguration child = null; child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "view"); config.addChild(child); DefaultConfiguration worldNode = new DefaultConfiguration(WORLD); worldNode.setAttribute(PERMISSION_ATTR, "false"); child.addChild(worldNode); DefaultConfiguration userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "open"); config.addChild(child); userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); child = new DefaultConfiguration(ROLE); child.setAttribute(ID_ATTR, "write"); config.addChild(child); userNode = new DefaultConfiguration(GROUP); userNode.setAttribute(ID_ATTR, "admin"); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); userNode = new DefaultConfiguration(USER); userNode.setAttribute(ID_ATTR, userId); userNode.setAttribute(PERMISSION_ATTR, "true"); child.addChild(userNode); return config; } /** * Updates the user profile * * @param request * The request containing the data to update * @param transformer */ private void updateUserProfile(HttpServletRequest request, Transformer transformer) { String email = request.getParameter("email"); if (email == null || ("").equals(email)) { transformer.setParameter("error", "emailNotSet"); } else if (!validateEmail(email)) { transformer.setParameter("error", "emailNotValid"); } else { try { String userId = getUserId(); User user = realm.getIdentityManager().getUserManager().getUser(userId); user.setEmail(request.getParameter("email")); user.setName(request.getParameter("userName")); user.save(); transformer.setParameter("success", "Profile updated successfully"); } catch (Exception e) { log.error(e.getMessage(), e); } } } /** * Updates the groups the user belongs to. Currently it only allows for * deletion of membership The action parameter is always * submitDeleteFromGroup_X where X is the target group * * @param request * The request containing the group involved * @param transformer */ private void deleteFromGroup(String action, Transformer transformer) { try { String userId = getUserId(); User user = getRealm().getIdentityManager().getUserManager().getUser(userId); Group[] userGroups = user.getGroups(); GroupManager gm = getRealm().getIdentityManager().getGroupManager(); String targetGroup = action.substring(action.indexOf("_")+1); if (userGroups.length > 0) { if(userGroups.length > 1) { Group group = gm.getGroup(targetGroup); if (group.isMember(user)) { group.removeMember(user); group.save(); } transformer.setParameter("success", "User successfully deleted from group: " + targetGroup); } else { transformer.setParameter("error", "User can not be removed from group: " + targetGroup + ". Users must belong to one group at least."); } } else { log.error("The user " + userId + "does not belong to any group!"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * Add a user to a group * @param request Request containing the group the user has to be added to * @param transformer */ private void addToGroup(HttpServletRequest request, Transformer transformer) { String groupId = request.getParameter("Group"); try { String userId = getUserId(); Group group = getRealm().getIdentityManager().getGroupManager().getGroup(groupId); User user = getRealm().getIdentityManager().getUserManager().getUser(userId); group.addMember(user); group.save(); } catch (Exception e) { log.error(e.getMessage(), e); } transformer.setParameter("success", "User successfully added to group: " + groupId); } /** * Change user password * * @param request * @param transformer */ private void updatePassword(HttpServletRequest request, Transformer transformer) { String oldPassword = request.getParameter("oldPassword"); try { String userId = getUserId(); if (getRealm().getIdentityManager().getUserManager().getUser(userId).authenticate( oldPassword)) { String plainPassword = request.getParameter("newPassword"); boolean confirmation = plainPassword.equals(request .getParameter("newPasswordConfirmation")); if (confirmation && !plainPassword.equals("")) { User user = getRealm().getIdentityManager().getUserManager().getUser(userId); user.setPassword(plainPassword); user.save(); transformer.setParameter("success", "Password updated successfully"); } else { transformer.setParameter("error", "Either no new password was supplied " + "or the password supplied and its confirmation do not coincide"); } } else { transformer.setParameter("error", "Authentication failed!"); } } catch (Exception e) { log.error(e.getMessage(), e); } } /** * This method checks if the specified email is valid against a regex * * @param email * @return true if email is valid */ private boolean validateEmail(String email) { String emailRegEx = "(\\w+)@(\\w+\\.)(\\w+)(\\.\\w+)*"; Pattern pattern = Pattern.compile(emailRegEx); Matcher matcher = pattern.matcher(email); return matcher.find(); } /** * Determine the requested view: defaultView, submitProfile, * submitPassword,submitGroup, submitDelete * * @param request * @return name of the desired view */ private String determineAction(HttpServletRequest request) { boolean submit = false; String action = "defaultView"; Enumeration enumeration = request.getParameterNames(); while (enumeration.hasMoreElements() && !submit) { action = enumeration.nextElement().toString(); if (action.startsWith("submit")) { submit = true; } } return action; } /** * Saves the user data access policies to the policies repository */ /* private void saveUserPolicy(String userId) { Configuration policyConfig = createPolicyConfiguration(userId); DefaultConfigurationSerializer serializer = new DefaultConfigurationSerializer(); String policyPath = getPath() + POLICIES_SUFFIX; try { Repository policiesRepository = getRealm().getPolicyManager().getPoliciesRepository(); serializer.serialize(policiesRepository.getNode(policyPath).getOutputStream(), policyConfig); } catch (Exception e) { log.error(e.getMessage(), e); } } */ /** * Validate the supplied user data. userName is optional */ private boolean validateNewUserData(HttpServletRequest request) throws AccessManagementException { boolean isValid = true; if (request.getParameter("rp.password").equals("")) { isValid = false; log.warn("No password supplied"); } if (request.getParameter("rp.passwordConfirmed").equals("")) { isValid = false; log.warn("No password confirmation supplied"); } if (!request.getParameter("rp.passwordConfirmed").equals(request.getParameter("rp.password"))) { isValid = false; log.warn("Password and confirmed password do no match"); } if (request.getParameter("rp.userId").equals("")) { isValid = false; log.warn("No userId supplied"); } /* String group = request.getParameter("rp.group"); if (group.equals("") || !realm.getIdentityManager().getGroupManager().existsGroup(group)) { isValid = false; log.error("Either no group was supplied or the group does not exist"); } */ if (!validateEmail(request.getParameter("rp.email"))) { isValid = false; log.warn("No email supplied or email not valid"); } return isValid; } /** * Get user id from resource configuration */ private String getUserId() throws Exception { String userId = null; ResourceConfiguration resConfig = getConfiguration(); if(resConfig != null) { userId = getConfiguration().getProperty("user"); } else { log.warn("DEPRECATED: Do not use RTI but rather a resource configuration"); userId = getRTI().getProperty("user"); } return userId; } /** * Delete the user from the identities repository and remove the related * user interface files */ private boolean deleteUser(String userId) { boolean success = false; try { UserManager um = getRealm().getIdentityManager().getUserManager(); um.removeUser(userId); String userScreenPolicyPath = getPath() + POLICIES_SUFFIX; Repository policiesRepo = getRealm().getPolicyManager().getPoliciesRepository(); policiesRepo.getNode(userScreenPolicyPath).delete(); Repository rtiRepo = getRealm().getRTIRepository(); String userScreenRCPath = PathUtil.getRCPath(getPath()); if(rtiRepo.existsNode(userScreenRCPath)) { rtiRepo.getNode(userScreenRCPath).delete(); } else { String userScreenRTIPath = PathUtil.getRTIPath(getPath()); rtiRepo.getNode(userScreenRTIPath).delete(); } success = true; } catch (Exception e) { log.error(e.getMessage(), e); } return success; } }
check on the suggested name
src/contributions/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/YanelUserResource.java
check on the suggested name
<ide><path>rc/contributions/resources/yanel-user/src/java/org/wyona/yanel/impl/resources/YanelUserResource.java <ide> return map; <ide> } <ide> <add> /** <add> * If the suggested name is an empty string, then return null such that the resource creator will not create a resource configuration <add> */ <ide> public String getCreateName(String suggestedName) { <del> return suggestedName; <add> if (suggestedName != null && suggestedName.equals("")) { <add> return null; <add> } else { <add> return suggestedName; <add> } <ide> } <ide> <ide> /**
Java
mit
70d8913659c84e89f5a9bb7d4b8c36e1004c199e
0
pennlabs/penn-mobile-android,pennlabs/penn-mobile-android,pennlabs/penn-mobile-android
package com.pennapps.labs.pennmobile; import android.app.Activity; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.ListFragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ListView; import com.pennapps.labs.pennmobile.adapters.DiningAdapter; import com.pennapps.labs.pennmobile.api.DiningAPI; import com.pennapps.labs.pennmobile.classes.DiningHall; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; public class DiningFragment extends ListFragment { private DiningAPI mAPI; private ListView mListView; private ArrayList<DiningHall> mDiningHalls; private DiningAdapter mAdapter; private Activity mActivity; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mAPI = new DiningAPI(); mActivity = getActivity(); mDiningHalls = new ArrayList<DiningHall>(); new GetOpenTask().execute(); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mListView = getListView(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_dining, container, false); return v; } private class GetOpenTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { try { JSONObject resultObj = mAPI.getVenues(); JSONArray venues = resultObj.getJSONObject("document").getJSONArray("venue"); for (int i = 0; i < venues.length(); i++) { JSONObject venue = venues.getJSONObject(i); int id = venue.getInt("id"); String name = venue.getString("name"); boolean isResidential = venue.getString("venueType").equals("residential"); boolean hasMenu = !venue.getString("dailyMenuURL").isEmpty(); mDiningHalls.add(new DiningHall(id, name, isResidential, hasMenu)); } } catch (JSONException e) { } catch (NullPointerException e) { } return null; } @Override protected void onPostExecute(Void params) { new GetMenusTask().execute(); } } private class GetMenusTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { try { for (DiningHall mDiningHall : mDiningHalls) { if (mDiningHall.isResidential() && mDiningHall.hasMenu()) { JSONObject resultObj = mAPI.getDailyMenu(mDiningHall.getId()); JSONArray meals = resultObj.getJSONObject("Document") .getJSONObject("tblMenu") .getJSONArray("tblDayPart"); for (int i = 0; i < meals.length(); i++) { JSONObject meal = meals.getJSONObject(i); parseMeal(meal, mDiningHall); } } } } catch (JSONException e) { } return null; } private void parseMeal(JSONObject meal, DiningHall diningHall) { try { String mealName = meal.getString("txtDayPartDescription"); JSONArray stations = new JSONArray(); try { stations = meal.getJSONArray("tblStation"); } catch (JSONException e) { JSONObject stationsObject = meal.getJSONObject("tblStation"); stations.put(stationsObject); } HashMap<String, String> currentMenu = new HashMap<String, String>(); for (int j = 0; j < stations.length(); j++) { JSONObject station = stations.getJSONObject(j); parseStation(station, currentMenu); } if (mealName.equals("Lunch")) { diningHall.setLunchMenu(currentMenu); } else if (mealName.equals("Dinner")) { diningHall.setDinnerMenu(currentMenu); } } catch (JSONException e) { } } private void parseStation(JSONObject station, HashMap<String, String> menu) { try { String stationName = station.getString("txtStationDescription"); JSONArray stationItems = new JSONArray(); try { stationItems = station.getJSONArray("tblItem"); } catch (JSONException e) { JSONObject stationItem = station.getJSONObject("tblItem"); stationItems.put(stationItem); } for (int k = 0; k < stationItems.length(); k++) { JSONObject foodItem = stationItems.getJSONObject(k); String foodName = foodItem.getString("txtTitle"); menu.put(stationName, foodName); } } catch (JSONException e) { } } @Override protected void onPostExecute(Void params) { mAdapter = new DiningAdapter(mActivity, mDiningHalls); mListView.setAdapter(mAdapter); } } }
PennMobile/src/main/java/com/pennapps/labs/pennmobile/DiningFragment.java
package com.pennapps.labs.pennmobile; import android.app.Activity; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.ListFragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ListView; import com.pennapps.labs.pennmobile.adapters.DiningAdapter; import com.pennapps.labs.pennmobile.api.DiningAPI; import com.pennapps.labs.pennmobile.classes.DiningHall; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; public class DiningFragment extends ListFragment { private DiningAPI mAPI; private ListView mListView; private ArrayList<DiningHall> mDiningHalls; private DiningAdapter mAdapter; private Activity mActivity; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mAPI = new DiningAPI(); mActivity = getActivity(); mDiningHalls = new ArrayList<DiningHall>(); new GetOpenTask().execute(); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mListView = getListView(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_dining, container, false); return v; } private class GetOpenTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { try { JSONObject resultObj = mAPI.getVenues(); JSONArray venues = resultObj.getJSONObject("document").getJSONArray("venue"); for (int i = 0; i < venues.length(); i++) { JSONObject venue = venues.getJSONObject(i); int id = venue.getInt("id"); String name = venue.getString("name"); boolean isResidential = venue.getString("venueType").equals("residential"); boolean hasMenu = !venue.getString("dailyMenuURL").isEmpty(); mDiningHalls.add(new DiningHall(id, name, isResidential, hasMenu)); } } catch (JSONException e) { } catch (NullPointerException e) { } return null; } @Override protected void onPostExecute(Void params) { new GetMenusTask().execute(); } } private class GetMenusTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { try { for (DiningHall mDiningHall : mDiningHalls) { if (mDiningHall.isResidential() && mDiningHall.hasMenu()) { JSONObject resultObj = mAPI.getDailyMenu(mDiningHall.getId()); JSONArray meals = resultObj.getJSONObject("Document") .getJSONObject("tblMenu") .getJSONArray("tblDayPart"); for (int i = 0; i < meals.length(); i++) { JSONObject meal = meals.getJSONObject(i); String mealName = meal.getString("txtDayPartDescription"); JSONArray stations = new JSONArray(); try { stations = meal.getJSONArray("tblStation"); } catch (JSONException e) { JSONObject stationsObject = meal.getJSONObject("tblStation"); stations.put(stationsObject); } HashMap<String, String> currentMenu = new HashMap<String, String>(); for (int j = 0; j < stations.length(); j++) { JSONObject station = stations.getJSONObject(j); String stationName = station.getString("txtStationDescription"); JSONArray stationItems = new JSONArray(); try { stationItems = station.getJSONArray("tblItem"); } catch (JSONException e) { JSONObject stationItem = station.getJSONObject("tblItem"); stationItems.put(stationItem); } for (int k = 0; k < stationItems.length(); k++) { JSONObject foodItem = stationItems.getJSONObject(k); String foodName = foodItem.getString("txtTitle"); currentMenu.put(stationName, foodName); } } if (mealName.equals("Lunch")) { mDiningHall.setLunchMenu(currentMenu); } else if (mealName.equals("Dinner")) { mDiningHall.setDinnerMenu(currentMenu); } } } } } catch (JSONException e) { } return null; } @Override protected void onPostExecute(Void params) { mAdapter = new DiningAdapter(mActivity, mDiningHalls); mListView.setAdapter(mAdapter); } } }
Refactor dining fragment code
PennMobile/src/main/java/com/pennapps/labs/pennmobile/DiningFragment.java
Refactor dining fragment code
<ide><path>ennMobile/src/main/java/com/pennapps/labs/pennmobile/DiningFragment.java <ide> <ide> for (int i = 0; i < meals.length(); i++) { <ide> JSONObject meal = meals.getJSONObject(i); <del> <del> String mealName = meal.getString("txtDayPartDescription"); <del> <del> JSONArray stations = new JSONArray(); <del> try { <del> stations = meal.getJSONArray("tblStation"); <del> } catch (JSONException e) { <del> JSONObject stationsObject = meal.getJSONObject("tblStation"); <del> stations.put(stationsObject); <del> } <del> HashMap<String, String> currentMenu = new HashMap<String, String>(); <del> for (int j = 0; j < stations.length(); j++) { <del> JSONObject station = stations.getJSONObject(j); <del> <del> String stationName = station.getString("txtStationDescription"); <del> JSONArray stationItems = new JSONArray(); <del> try { <del> stationItems = station.getJSONArray("tblItem"); <del> } catch (JSONException e) { <del> JSONObject stationItem = station.getJSONObject("tblItem"); <del> stationItems.put(stationItem); <del> } <del> for (int k = 0; k < stationItems.length(); k++) { <del> JSONObject foodItem = stationItems.getJSONObject(k); <del> String foodName = foodItem.getString("txtTitle"); <del> currentMenu.put(stationName, foodName); <del> } <del> } <del> <del> if (mealName.equals("Lunch")) { <del> mDiningHall.setLunchMenu(currentMenu); <del> } else if (mealName.equals("Dinner")) { <del> mDiningHall.setDinnerMenu(currentMenu); <del> } <add> parseMeal(meal, mDiningHall); <ide> } <ide> } <ide> } <ide> return null; <ide> } <ide> <add> private void parseMeal(JSONObject meal, DiningHall diningHall) { <add> try { <add> String mealName = meal.getString("txtDayPartDescription"); <add> <add> JSONArray stations = new JSONArray(); <add> try { <add> stations = meal.getJSONArray("tblStation"); <add> } catch (JSONException e) { <add> JSONObject stationsObject = meal.getJSONObject("tblStation"); <add> stations.put(stationsObject); <add> } <add> HashMap<String, String> currentMenu = new HashMap<String, String>(); <add> for (int j = 0; j < stations.length(); j++) { <add> JSONObject station = stations.getJSONObject(j); <add> parseStation(station, currentMenu); <add> } <add> <add> if (mealName.equals("Lunch")) { <add> diningHall.setLunchMenu(currentMenu); <add> } else if (mealName.equals("Dinner")) { <add> diningHall.setDinnerMenu(currentMenu); <add> } <add> } catch (JSONException e) { <add> <add> } <add> } <add> <add> private void parseStation(JSONObject station, HashMap<String, String> menu) { <add> try { <add> String stationName = station.getString("txtStationDescription"); <add> JSONArray stationItems = new JSONArray(); <add> try { <add> stationItems = station.getJSONArray("tblItem"); <add> } catch (JSONException e) { <add> JSONObject stationItem = station.getJSONObject("tblItem"); <add> stationItems.put(stationItem); <add> } <add> for (int k = 0; k < stationItems.length(); k++) { <add> JSONObject foodItem = stationItems.getJSONObject(k); <add> String foodName = foodItem.getString("txtTitle"); <add> menu.put(stationName, foodName); <add> } <add> } catch (JSONException e) { <add> <add> } <add> } <add> <ide> @Override <ide> protected void onPostExecute(Void params) { <ide> mAdapter = new DiningAdapter(mActivity, mDiningHalls);
JavaScript
bsd-3-clause
becbe18d7e2df798c1132d9f9c6a10a0af48a13c
0
openaq/openaq.org,openaq/openaq.org,openaq/openaq.org
import React, { useState, useEffect } from 'react'; import PropTypes from 'prop-types'; import config from '../../config'; export default function LocationsSource({ activeParameter, map, children }) { const [sourceId, setSourceId] = useState(null); useEffect(() => { if (!map.getSource(`locations-source-${activeParameter}`)) { map.addSource(`locations-source-${activeParameter}`, { type: 'vector', tiles: [ `${config.api}/locations/tiles/{z}/{x}/{y}.pbf?parameter=${activeParameter}`, ], minzoom: 0, maxzoom: 24, bounds: [-180, -90, 180, 90], }); } setSourceId(`locations-source-${activeParameter}`); return () => { setSourceId(null); }; }, [activeParameter]); return ( <> {!!(map && sourceId && map.getSource(sourceId)) && React.Children.map(children, child => React.cloneElement(child, { map: map, sourceId: sourceId, }) )} </> ); } LocationsSource.propTypes = { activeParameter: PropTypes.string.isRequired, map: PropTypes.object.isRequired, children: PropTypes.oneOfType([ PropTypes.element, PropTypes.arrayOf(PropTypes.element), ]), };
app/assets/scripts/components/map/locations-source.js
import React, { useState, useEffect } from 'react'; import PropTypes from 'prop-types'; import config from '../../config'; export default function LocationsSource({ activeParameter, map, children }) { const [sourceId, setSourceId] = useState(null); useEffect(() => { if (!map.getSource(`locations-source-${activeParameter}`)) { map.addSource(`locations-source-${activeParameter}`, { type: 'vector', tiles: [ `${config.api}/locations/tiles/{z}/{x}/{y}.pbf?parameter=${activeParameter}`, ], minzoom: 0, maxzoom: 24, bounds: [-180, -90, 180, 90], }); } setSourceId(`locations-source-${activeParameter}`); return () => { setSourceId(null); if (map.getSource(`locations-source-${activeParameter}`)) { map.removeSource(`locations-source-${activeParameter}`); } }; }, [activeParameter]); return ( <> {!!(map && sourceId && map.getSource(sourceId)) && React.Children.map(children, child => React.cloneElement(child, { map: map, sourceId: sourceId, }) )} </> ); } LocationsSource.propTypes = { activeParameter: PropTypes.string.isRequired, map: PropTypes.object.isRequired, children: PropTypes.oneOfType([ PropTypes.element, PropTypes.arrayOf(PropTypes.element), ]), };
Don't remove source Its causing more trouble than it helps
app/assets/scripts/components/map/locations-source.js
Don't remove source
<ide><path>pp/assets/scripts/components/map/locations-source.js <ide> <ide> return () => { <ide> setSourceId(null); <del> if (map.getSource(`locations-source-${activeParameter}`)) { <del> map.removeSource(`locations-source-${activeParameter}`); <del> } <ide> }; <ide> }, [activeParameter]); <ide>
Java
apache-2.0
3eef54097b8ec96d0bdd71499f944f0484787021
0
swps/cassandra,fengshao0907/Cassandra-Research,a-buck/cassandra,WorksApplications/cassandra,ollie314/cassandra,rmarchei/cassandra,helena/cassandra,chaordic/cassandra,juiceblender/cassandra,kgreav/cassandra,nvoron23/cassandra,aweisberg/cassandra,yhnishi/cassandra,spodkowinski/cassandra,thelastpickle/cassandra,ibmsoe/cassandra,snazy/cassandra,tjake/cassandra,dongjiaqiang/cassandra,szhou1234/cassandra,mambocab/cassandra,blambov/cassandra,MasahikoSawada/cassandra,lalithsuresh/cassandra-c3,lalithsuresh/cassandra-c3,newrelic-forks/cassandra,caidongyun/cassandra,sriki77/cassandra,yhnishi/cassandra,scylladb/scylla-tools-java,weideng1/cassandra,belliottsmith/cassandra,Jollyplum/cassandra,rackerlabs/cloudmetrics-cassandra,DavidHerzogTU-Berlin/cassandra,phact/cassandra,ben-manes/cassandra,sriki77/cassandra,ibmsoe/cassandra,joesiewert/cassandra,jbellis/cassandra,dongjiaqiang/cassandra,DavidHerzogTU-Berlin/cassandra,yangzhe1991/cassandra,beobal/cassandra,Bj0rnen/cassandra,bpupadhyaya/cassandra,Jaumo/cassandra,tjake/cassandra,dkua/cassandra,miguel0afd/cassandra-cqlMod,LatencyUtils/cassandra-stress2,jrwest/cassandra,Jollyplum/cassandra,mheffner/cassandra-1,pofallon/cassandra,ben-manes/cassandra,miguel0afd/cassandra-cqlMod,yonglehou/cassandra,tommystendahl/cassandra,RyanMagnusson/cassandra,DICL/cassandra,aarushi12002/cassandra,DikangGu/cassandra,asias/cassandra,kgreav/cassandra,ejankan/cassandra,jeffjirsa/cassandra,mkjellman/cassandra,nlalevee/cassandra,miguel0afd/cassandra-cqlMod,sivikt/cassandra,sriki77/cassandra,likaiwalkman/cassandra,instaclustr/cassandra,cooldoger/cassandra,mkjellman/cassandra,jsanda/cassandra,mike-tr-adamson/cassandra,apache/cassandra,tongjixianing/projects,cooldoger/cassandra,swps/cassandra,rmarchei/cassandra,pofallon/cassandra,JeremiahDJordan/cassandra,vaibhi9/cassandra,LatencyUtils/cassandra-stress2,scylladb/scylla-tools-java,pauloricardomg/cassandra,blambov/cassandra,weideng1/cassandra,hengxin/cassandra,matthewtt/cassandra_read,guanxi55nba/key-value-store,rdio/cassandra,WorksApplications/cassandra,yangzhe1991/cassandra,aweisberg/cassandra,nutbunnies/cassandra,mshuler/cassandra,bpupadhyaya/cassandra,bdeggleston/cassandra,sharvanath/cassandra,jasonstack/cassandra,guanxi55nba/db-improvement,hhorii/cassandra,yanbit/cassandra,jbellis/cassandra,szhou1234/cassandra,christian-esken/cassandra,DavidHerzogTU-Berlin/cassandra,newrelic-forks/cassandra,scylladb/scylla-tools-java,pauloricardomg/cassandra,nvoron23/cassandra,shawnkumar/cstargraph,jeromatron/cassandra,yukim/cassandra,scaledata/cassandra,ptuckey/cassandra,ptuckey/cassandra,whitepages/cassandra,blerer/cassandra,chaordic/cassandra,belliottsmith/cassandra,tommystendahl/cassandra,whitepages/cassandra,tjake/cassandra,ifesdjeen/cassandra,blerer/cassandra,helena/cassandra,MasahikoSawada/cassandra,regispl/cassandra,rdio/cassandra,stef1927/cassandra,DikangGu/cassandra,nakomis/cassandra,carlyeks/cassandra,boneill42/cassandra,kangkot/stratio-cassandra,ptnapoleon/cassandra,AtwooTM/cassandra,jasonwee/cassandra,kangkot/stratio-cassandra,bcoverston/cassandra,thobbs/cassandra,RyanMagnusson/cassandra,mike-tr-adamson/cassandra,iamaleksey/cassandra,jasonwee/cassandra,qinjin/mdtc-cassandra,helena/cassandra,jasobrown/cassandra,scaledata/cassandra,driftx/cassandra,rogerchina/cassandra,iburmistrov/Cassandra,weipinghe/cassandra,spodkowinski/cassandra,blambov/cassandra,spodkowinski/cassandra,michaelmior/cassandra,shawnkumar/cstargraph,yukim/cassandra,chaordic/cassandra,instaclustr/cassandra,jeffjirsa/cassandra,juiceblender/cassandra,mt0803/cassandra,regispl/cassandra,nvoron23/cassandra,knifewine/cassandra,Imran-C/cassandra,sluk3r/cassandra,rmarchei/cassandra,tommystendahl/cassandra,clohfink/cassandra,nutbunnies/cassandra,beobal/cassandra,strapdata/cassandra,yanbit/cassandra,rdio/cassandra,strapdata/cassandra,Imran-C/cassandra,mshuler/cassandra,wreda/cassandra,blambov/cassandra,rogerchina/cassandra,pcn/cassandra-1,jrwest/cassandra,taigetco/cassandra_read,fengshao0907/Cassandra-Research,hhorii/cassandra,yanbit/cassandra,Instagram/cassandra,nakomis/cassandra,mheffner/cassandra-1,DICL/cassandra,knifewine/cassandra,GabrielNicolasAvellaneda/cassandra,carlyeks/cassandra,strapdata/cassandra,Jollyplum/cassandra,emolsson/cassandra,mike-tr-adamson/cassandra,mshuler/cassandra,dkua/cassandra,ptnapoleon/cassandra,michaelsembwever/cassandra,wreda/cassandra,aboudreault/cassandra,iburmistrov/Cassandra,jsanda/cassandra,guard163/cassandra,AtwooTM/cassandra,sharvanath/cassandra,hengxin/cassandra,DikangGu/cassandra,mshuler/cassandra,guanxi55nba/key-value-store,Stratio/stratio-cassandra,newrelic-forks/cassandra,taigetco/cassandra_read,Bj0rnen/cassandra,RyanMagnusson/cassandra,rogerchina/cassandra,clohfink/cassandra,qinjin/mdtc-cassandra,jbellis/cassandra,Bj0rnen/cassandra,gdusbabek/cassandra,nitsanw/cassandra,vaibhi9/cassandra,adelapena/cassandra,sharvanath/cassandra,snazy/cassandra,vaibhi9/cassandra,JeremiahDJordan/cassandra,strapdata/cassandra,thelastpickle/cassandra,jkni/cassandra,gdusbabek/cassandra,WorksApplications/cassandra,Jaumo/cassandra,jasobrown/cassandra,stef1927/cassandra,a-buck/cassandra,vramaswamy456/cassandra,aarushi12002/cassandra,macintoshio/cassandra,aboudreault/cassandra,xiongzheng/Cassandra-Research,lalithsuresh/cassandra-c3,yukim/cassandra,bdeggleston/cassandra,mgmuscari/cassandra-cdh4,phact/cassandra,gdusbabek/cassandra,jsanda/cassandra,sayanh/ViewMaintenanceSupport,WorksApplications/cassandra,kangkot/stratio-cassandra,darach/cassandra,jrwest/cassandra,ejankan/cassandra,ptuckey/cassandra,adelapena/cassandra,mike-tr-adamson/cassandra,jasobrown/cassandra,swps/cassandra,adejanovski/cassandra,krummas/cassandra,szhou1234/cassandra,MasahikoSawada/cassandra,matthewtt/cassandra_read,ollie314/cassandra,stef1927/cassandra,caidongyun/cassandra,Instagram/cassandra,ifesdjeen/cassandra,nitsanw/cassandra,bcoverston/cassandra,mkjellman/cassandra,jeffjirsa/cassandra,caidongyun/cassandra,michaelmior/cassandra,mt0803/cassandra,exoscale/cassandra,jrwest/cassandra,kgreav/cassandra,carlyeks/cassandra,mkjellman/cassandra,thelastpickle/cassandra,JeremiahDJordan/cassandra,guanxi55nba/key-value-store,thobbs/cassandra,joesiewert/cassandra,sayanh/ViewMaintenanceCassandra,modempachev4/kassandra,kangkot/stratio-cassandra,bcoverston/cassandra,hengxin/cassandra,mt0803/cassandra,beobal/cassandra,codefollower/Cassandra-Research,juiceblender/cassandra,pauloricardomg/cassandra,kgreav/cassandra,a-buck/cassandra,phact/cassandra,mambocab/cassandra,xiongzheng/Cassandra-Research,pthomaid/cassandra,likaiwalkman/cassandra,vramaswamy456/cassandra,mashuai/Cassandra-Research,codefollower/Cassandra-Research,jkni/cassandra,iamaleksey/cassandra,belliottsmith/cassandra,pallavi510/cassandra,jeromatron/cassandra,EnigmaCurry/cassandra,Instagram/cassandra,boneill42/cassandra,mgmuscari/cassandra-cdh4,yonglehou/cassandra,aweisberg/cassandra,darach/cassandra,Stratio/stratio-cassandra,GabrielNicolasAvellaneda/cassandra,Stratio/cassandra,weipinghe/cassandra,project-zerus/cassandra,apache/cassandra,mgmuscari/cassandra-cdh4,likaiwalkman/cassandra,driftx/cassandra,pcmanus/cassandra,sayanh/ViewMaintenanceCassandra,tjake/cassandra,nutbunnies/cassandra,scylladb/scylla-tools-java,adelapena/cassandra,kangkot/stratio-cassandra,michaelsembwever/cassandra,emolsson/cassandra,mashuai/Cassandra-Research,snazy/cassandra,jasonstack/cassandra,pauloricardomg/cassandra,chbatey/cassandra-1,exoscale/cassandra,pkdevbox/cassandra,bdeggleston/cassandra,pofallon/cassandra,bcoverston/cassandra,dkua/cassandra,shawnkumar/cstargraph,josh-mckenzie/cassandra,jasobrown/cassandra,cooldoger/cassandra,aarushi12002/cassandra,aboudreault/cassandra,chbatey/cassandra-1,driftx/cassandra,modempachev4/kassandra,cooldoger/cassandra,krummas/cassandra,instaclustr/cassandra,beobal/cassandra,bmel/cassandra,iamaleksey/cassandra,aureagle/cassandra,nakomis/cassandra,ben-manes/cassandra,michaelsembwever/cassandra,joesiewert/cassandra,thobbs/cassandra,clohfink/cassandra,iburmistrov/Cassandra,bpupadhyaya/cassandra,michaelsembwever/cassandra,ifesdjeen/cassandra,sedulam/CASSANDRA-12201,tongjixianing/projects,jeromatron/cassandra,apache/cassandra,AtwooTM/cassandra,sayanh/ViewMaintenanceSupport,iamaleksey/cassandra,guanxi55nba/db-improvement,chbatey/cassandra-1,fengshao0907/cassandra-1,pthomaid/cassandra,Stratio/stratio-cassandra,macintoshio/cassandra,christian-esken/cassandra,weipinghe/cassandra,driftx/cassandra,clohfink/cassandra,project-zerus/cassandra,DICL/cassandra,vramaswamy456/cassandra,thelastpickle/cassandra,hhorii/cassandra,emolsson/cassandra,Imran-C/cassandra,belliottsmith/cassandra,Instagram/cassandra,stef1927/cassandra,rackerlabs/cloudmetrics-cassandra,macintoshio/cassandra,matthewtt/cassandra_read,pallavi510/cassandra,spodkowinski/cassandra,modempachev4/kassandra,sluk3r/cassandra,GabrielNicolasAvellaneda/cassandra,josh-mckenzie/cassandra,snazy/cassandra,heiko-braun/cassandra,aureagle/cassandra,Jaumo/cassandra,mambocab/cassandra,tongjixianing/projects,xiongzheng/Cassandra-Research,whitepages/cassandra,pthomaid/cassandra,josh-mckenzie/cassandra,christian-esken/cassandra,fengshao0907/cassandra-1,yangzhe1991/cassandra,EnigmaCurry/cassandra,pcn/cassandra-1,qinjin/mdtc-cassandra,LatencyUtils/cassandra-stress2,boneill42/cassandra,bdeggleston/cassandra,Stratio/stratio-cassandra,sedulam/CASSANDRA-12201,jeffjirsa/cassandra,apache/cassandra,nlalevee/cassandra,ibmsoe/cassandra,fengshao0907/Cassandra-Research,szhou1234/cassandra,codefollower/Cassandra-Research,sedulam/CASSANDRA-12201,sayanh/ViewMaintenanceCassandra,guard163/cassandra,jasonstack/cassandra,heiko-braun/cassandra,asias/cassandra,yukim/cassandra,jasonwee/cassandra,darach/cassandra,pkdevbox/cassandra,krummas/cassandra,michaelmior/cassandra,sluk3r/cassandra,nitsanw/cassandra,knifewine/cassandra,krummas/cassandra,scaledata/cassandra,nlalevee/cassandra,josh-mckenzie/cassandra,Stratio/stratio-cassandra,fengshao0907/cassandra-1,guard163/cassandra,pcmanus/cassandra,pkdevbox/cassandra,ifesdjeen/cassandra,guanxi55nba/db-improvement,pcmanus/cassandra,ejankan/cassandra,Stratio/cassandra,aureagle/cassandra,rackerlabs/cloudmetrics-cassandra,bmel/cassandra,aweisberg/cassandra,sivikt/cassandra,yonglehou/cassandra,regispl/cassandra,sivikt/cassandra,tommystendahl/cassandra,mheffner/cassandra-1,asias/cassandra,taigetco/cassandra_read,jkni/cassandra,yhnishi/cassandra,juiceblender/cassandra,mashuai/Cassandra-Research,instaclustr/cassandra,wreda/cassandra,adejanovski/cassandra,ollie314/cassandra,EnigmaCurry/cassandra,adelapena/cassandra,blerer/cassandra,heiko-braun/cassandra,project-zerus/cassandra,ptnapoleon/cassandra,Stratio/cassandra,exoscale/cassandra,dongjiaqiang/cassandra,weideng1/cassandra,pallavi510/cassandra,bmel/cassandra,pcn/cassandra-1,adejanovski/cassandra,blerer/cassandra
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.compress; import java.io.*; import java.util.*; import com.google.common.annotations.VisibleForTesting; import com.google.common.primitives.Longs; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.db.TypeSizes; import org.apache.cassandra.io.FSReadError; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.CorruptSSTableException; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.io.util.Memory; import org.apache.cassandra.utils.Pair; /** * Holds metadata about compressed file */ public class CompressionMetadata { public final long dataLength; public final long compressedFileLength; private final Memory chunkOffsets; public final String indexFilePath; public final CompressionParameters parameters; /** * Create metadata about given compressed file including uncompressed data length, chunk size * and list of the chunk offsets of the compressed data. * * This is an expensive operation! Don't create more than one for each * sstable. * * @param dataFilePath Path to the compressed file * * @return metadata about given compressed file. */ public static CompressionMetadata create(String dataFilePath) { Descriptor desc = Descriptor.fromFilename(dataFilePath); return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), new File(dataFilePath).length()); } @VisibleForTesting CompressionMetadata(String indexFilePath, long compressedLength) { this.indexFilePath = indexFilePath; DataInputStream stream; try { stream = new DataInputStream(new FileInputStream(indexFilePath)); } catch (FileNotFoundException e) { throw new RuntimeException(e); } try { String compressorName = stream.readUTF(); int optionCount = stream.readInt(); Map<String, String> options = new HashMap<String, String>(); for (int i = 0; i < optionCount; ++i) { String key = stream.readUTF(); String value = stream.readUTF(); options.put(key, value); } int chunkLength = stream.readInt(); try { parameters = new CompressionParameters(compressorName, chunkLength, options); } catch (ConfigurationException e) { throw new RuntimeException("Cannot create CompressionParameters for stored parameters", e); } dataLength = stream.readLong(); compressedFileLength = compressedLength; chunkOffsets = readChunkOffsets(stream); } catch (IOException e) { throw new CorruptSSTableException(e, indexFilePath); } finally { FileUtils.closeQuietly(stream); } } public ICompressor compressor() { return parameters.sstableCompressor; } public int chunkLength() { return parameters.chunkLength(); } /** * Read offsets of the individual chunks from the given input. * * @param input Source of the data. * * @return collection of the chunk offsets. */ private Memory readChunkOffsets(DataInput input) { try { int chunkCount = input.readInt(); Memory offsets = Memory.allocate(chunkCount * 8); for (int i = 0; i < chunkCount; i++) { try { offsets.setLong(i * 8, input.readLong()); } catch (EOFException e) { String msg = String.format("Corrupted Index File %s: read %d but expected %d chunks.", indexFilePath, i, chunkCount); throw new CorruptSSTableException(new IOException(msg, e), indexFilePath); } } return offsets; } catch (IOException e) { throw new FSReadError(e, indexFilePath); } } /** * Get a chunk of compressed data (offset, length) corresponding to given position * * @param position Position in the file. * @return pair of chunk offset and length. */ public Chunk chunkFor(long position) { // position of the chunk int idx = 8 * (int) (position / parameters.chunkLength()); if (idx >= chunkOffsets.size()) throw new CorruptSSTableException(new EOFException(), indexFilePath); long chunkOffset = chunkOffsets.getLong(idx); long nextChunkOffset = (idx + 8 == chunkOffsets.size()) ? compressedFileLength : chunkOffsets.getLong(idx + 8); return new Chunk(chunkOffset, (int) (nextChunkOffset - chunkOffset - 4)); // "4" bytes reserved for checksum } /** * @param sections Collection of sections in uncompressed file * @return Array of chunks which corresponds to given sections of uncompressed file, sorted by chunk offset */ public Chunk[] getChunksForSections(Collection<Pair<Long, Long>> sections) { // use SortedSet to eliminate duplicates and sort by chunk offset SortedSet<Chunk> offsets = new TreeSet<Chunk>(new Comparator<Chunk>() { public int compare(Chunk o1, Chunk o2) { return Longs.compare(o1.offset, o2.offset); } }); for (Pair<Long, Long> section : sections) { int startIndex = (int) (section.left / parameters.chunkLength()); int endIndex = (int) (section.right / parameters.chunkLength()); endIndex = section.right % parameters.chunkLength() == 0 ? endIndex - 1 : endIndex; for (int i = startIndex; i <= endIndex; i++) { long offset = i * 8; long chunkOffset = chunkOffsets.getLong(offset); long nextChunkOffset = offset + 8 == chunkOffsets.size() ? compressedFileLength : chunkOffsets.getLong(offset + 8); offsets.add(new Chunk(chunkOffset, (int) (nextChunkOffset - chunkOffset - 4))); // "4" bytes reserved for checksum } } return offsets.toArray(new Chunk[offsets.size()]); } public void close() { chunkOffsets.free(); } public static class Writer extends RandomAccessFile { // place for uncompressed data length in the index file private long dataLengthOffset = -1; // path to the file private final String filePath; private Writer(String path) throws FileNotFoundException { super(path, "rw"); filePath = path; } public static Writer open(String path) { try { return new Writer(path); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } public void writeHeader(CompressionParameters parameters) { try { writeUTF(parameters.sstableCompressor.getClass().getSimpleName()); writeInt(parameters.otherOptions.size()); for (Map.Entry<String, String> entry : parameters.otherOptions.entrySet()) { writeUTF(entry.getKey()); writeUTF(entry.getValue()); } // store the length of the chunk writeInt(parameters.chunkLength()); // store position and reserve a place for uncompressed data length and chunks count dataLengthOffset = getFilePointer(); writeLong(-1); writeInt(-1); } catch (IOException e) { throw new FSWriteError(e, filePath); } } public void finalizeHeader(long dataLength, int chunks) { assert dataLengthOffset != -1 : "writeHeader wasn't called"; long currentPosition; try { currentPosition = getFilePointer(); } catch (IOException e) { throw new FSReadError(e, filePath); } try { // seek back to the data length position seek(dataLengthOffset); // write uncompressed data length and chunks count writeLong(dataLength); writeInt(chunks); // seek forward to the previous position seek(currentPosition); } catch (IOException e) { throw new FSWriteError(e, filePath); } } /** * Get a chunk offset by it's index. * * @param chunkIndex Index of the chunk. * * @return offset of the chunk in the compressed file. */ public long chunkOffsetBy(int chunkIndex) { if (dataLengthOffset == -1) throw new IllegalStateException("writeHeader wasn't called"); try { long position = getFilePointer(); // seek to the position of the given chunk seek(dataLengthOffset + 8 // size reserved for uncompressed data length + 4 // size reserved for chunk count + (chunkIndex * 8L)); try { return readLong(); } finally { // back to the original position seek(position); } } catch (IOException e) { throw new FSReadError(e, filePath); } } /** * Reset the writer so that the next chunk offset written will be the * one of {@code chunkIndex}. */ public void resetAndTruncate(int chunkIndex) { try { seek(dataLengthOffset + 8 // size reserved for uncompressed data length + 4 // size reserved for chunk count + (chunkIndex * 8L)); getChannel().truncate(getFilePointer()); } catch (IOException e) { throw new FSWriteError(e, filePath); } } public void close() throws IOException { if (getChannel().isOpen()) // if RAF.closed were public we could just use that, but it's not getChannel().force(true); super.close(); } } /** * Holds offset and length of the file chunk */ public static class Chunk { public static final IVersionedSerializer<Chunk> serializer = new ChunkSerializer(); public final long offset; public final int length; public Chunk(long offset, int length) { this.offset = offset; this.length = length; } public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Chunk chunk = (Chunk) o; return length == chunk.length && offset == chunk.offset; } public int hashCode() { int result = (int) (offset ^ (offset >>> 32)); result = 31 * result + length; return result; } public String toString() { return String.format("Chunk<offset: %d, length: %d>", offset, length); } } static class ChunkSerializer implements IVersionedSerializer<Chunk> { public void serialize(Chunk chunk, DataOutput out, int version) throws IOException { out.writeLong(chunk.offset); out.writeInt(chunk.length); } public Chunk deserialize(DataInput in, int version) throws IOException { return new Chunk(in.readLong(), in.readInt()); } public long serializedSize(Chunk chunk, int version) { long size = TypeSizes.NATIVE.sizeof(chunk.offset); size += TypeSizes.NATIVE.sizeof(chunk.length); return size; } } }
src/java/org/apache/cassandra/io/compress/CompressionMetadata.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.compress; import java.io.*; import java.util.*; import com.google.common.annotations.VisibleForTesting; import com.google.common.primitives.Longs; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.db.TypeSizes; import org.apache.cassandra.io.FSReadError; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.sstable.Component; import org.apache.cassandra.io.sstable.CorruptSSTableException; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.io.util.Memory; import org.apache.cassandra.utils.Pair; /** * Holds metadata about compressed file */ public class CompressionMetadata { public final long dataLength; public final long compressedFileLength; private final Memory chunkOffsets; public final String indexFilePath; public final CompressionParameters parameters; /** * Create metadata about given compressed file including uncompressed data length, chunk size * and list of the chunk offsets of the compressed data. * * This is an expensive operation! Don't create more than one for each * sstable. * * @param dataFilePath Path to the compressed file * * @return metadata about given compressed file. */ public static CompressionMetadata create(String dataFilePath) { Descriptor desc = Descriptor.fromFilename(dataFilePath); return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), new File(dataFilePath).length()); } @VisibleForTesting CompressionMetadata(String indexFilePath, long compressedLength) { this.indexFilePath = indexFilePath; DataInputStream stream; try { stream = new DataInputStream(new FileInputStream(indexFilePath)); } catch (FileNotFoundException e) { throw new RuntimeException(e); } try { String compressorName = stream.readUTF(); int optionCount = stream.readInt(); Map<String, String> options = new HashMap<String, String>(); for (int i = 0; i < optionCount; ++i) { String key = stream.readUTF(); String value = stream.readUTF(); options.put(key, value); } int chunkLength = stream.readInt(); try { parameters = new CompressionParameters(compressorName, chunkLength, options); } catch (ConfigurationException e) { throw new RuntimeException("Cannot create CompressionParameters for stored parameters", e); } dataLength = stream.readLong(); compressedFileLength = compressedLength; chunkOffsets = readChunkOffsets(stream); } catch (IOException e) { throw new CorruptSSTableException(e, indexFilePath); } finally { FileUtils.closeQuietly(stream); } } public ICompressor compressor() { return parameters.sstableCompressor; } public int chunkLength() { return parameters.chunkLength(); } /** * Read offsets of the individual chunks from the given input. * * @param input Source of the data. * * @return collection of the chunk offsets. */ private Memory readChunkOffsets(DataInput input) { try { int chunkCount = input.readInt(); Memory offsets = Memory.allocate(chunkCount * 8); for (int i = 0; i < chunkCount; i++) { try { offsets.setLong(i * 8, input.readLong()); } catch (EOFException e) { String msg = String.format("Corrupted Index File %s: read %d but expected %d chunks.", indexFilePath, i, chunkCount); throw new CorruptSSTableException(new IOException(msg, e), indexFilePath); } } return offsets; } catch (IOException e) { throw new FSReadError(e, indexFilePath); } } /** * Get a chunk of compressed data (offset, length) corresponding to given position * * @param position Position in the file. * @return pair of chunk offset and length. */ public Chunk chunkFor(long position) { // position of the chunk int idx = 8 * (int) (position / parameters.chunkLength()); if (idx >= chunkOffsets.size()) throw new CorruptSSTableException(new EOFException(), indexFilePath); long chunkOffset = chunkOffsets.getLong(idx); long nextChunkOffset = (idx + 8 == chunkOffsets.size()) ? compressedFileLength : chunkOffsets.getLong(idx + 8); return new Chunk(chunkOffset, (int) (nextChunkOffset - chunkOffset - 4)); // "4" bytes reserved for checksum } /** * @param sections Collection of sections in uncompressed file * @return Array of chunks which corresponds to given sections of uncompressed file, sorted by chunk offset */ public Chunk[] getChunksForSections(Collection<Pair<Long, Long>> sections) { // use SortedSet to eliminate duplicates and sort by chunk offset SortedSet<Chunk> offsets = new TreeSet<Chunk>(new Comparator<Chunk>() { public int compare(Chunk o1, Chunk o2) { return Longs.compare(o1.offset, o2.offset); } }); for (Pair<Long, Long> section : sections) { int startIndex = (int) (section.left / parameters.chunkLength()); int endIndex = (int) (section.right / parameters.chunkLength()); endIndex = section.right % parameters.chunkLength() == 0 ? endIndex - 1 : endIndex; for (int i = startIndex; i <= endIndex; i++) { long offset = i * 8; long chunkOffset = chunkOffsets.getLong(offset); long nextChunkOffset = offset + 8 == chunkOffsets.size() ? compressedFileLength : chunkOffsets.getLong(offset + 8); offsets.add(new Chunk(chunkOffset, (int) (nextChunkOffset - chunkOffset - 4))); // "4" bytes reserved for checksum } } return offsets.toArray(new Chunk[offsets.size()]); } public void close() { chunkOffsets.free(); } public static class Writer extends RandomAccessFile { // place for uncompressed data length in the index file private long dataLengthOffset = -1; // path to the file private final String filePath; private Writer(String path) throws FileNotFoundException { super(path, "rw"); filePath = path; } public static Writer open(String path) { try { return new Writer(path); } catch (FileNotFoundException e) { throw new RuntimeException(e); } } public void writeHeader(CompressionParameters parameters) { try { writeUTF(parameters.sstableCompressor.getClass().getSimpleName()); writeInt(parameters.otherOptions.size()); for (Map.Entry<String, String> entry : parameters.otherOptions.entrySet()) { writeUTF(entry.getKey()); writeUTF(entry.getValue()); } // store the length of the chunk writeInt(parameters.chunkLength()); // store position and reserve a place for uncompressed data length and chunks count dataLengthOffset = getFilePointer(); writeLong(-1); writeInt(-1); } catch (IOException e) { throw new FSWriteError(e, filePath); } } public void finalizeHeader(long dataLength, int chunks) { assert dataLengthOffset != -1 : "writeHeader wasn't called"; long currentPosition; try { currentPosition = getFilePointer(); } catch (IOException e) { throw new FSReadError(e, filePath); } try { // seek back to the data length position seek(dataLengthOffset); // write uncompressed data length and chunks count writeLong(dataLength); writeInt(chunks); // seek forward to the previous position seek(currentPosition); } catch (IOException e) { throw new FSWriteError(e, filePath); } } /** * Get a chunk offset by it's index. * * @param chunkIndex Index of the chunk. * * @return offset of the chunk in the compressed file. */ public long chunkOffsetBy(int chunkIndex) { if (dataLengthOffset == -1) throw new IllegalStateException("writeHeader wasn't called"); try { long position = getFilePointer(); // seek to the position of the given chunk seek(dataLengthOffset + 8 // size reserved for uncompressed data length + 4 // size reserved for chunk count + (chunkIndex * 8L)); try { return readLong(); } finally { // back to the original position seek(position); } } catch (IOException e) { throw new FSReadError(e, filePath); } } /** * Reset the writer so that the next chunk offset written will be the * one of {@code chunkIndex}. */ public void resetAndTruncate(int chunkIndex) { try { seek(dataLengthOffset + 8 // size reserved for uncompressed data length + 4 // size reserved for chunk count + (chunkIndex * 8L)); getChannel().truncate(getFilePointer()); } catch (IOException e) { throw new FSWriteError(e, filePath); } } public void close() throws IOException { getFD().sync(); super.close(); } } /** * Holds offset and length of the file chunk */ public static class Chunk { public static final IVersionedSerializer<Chunk> serializer = new ChunkSerializer(); public final long offset; public final int length; public Chunk(long offset, int length) { this.offset = offset; this.length = length; } public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Chunk chunk = (Chunk) o; return length == chunk.length && offset == chunk.offset; } public int hashCode() { int result = (int) (offset ^ (offset >>> 32)); result = 31 * result + length; return result; } public String toString() { return String.format("Chunk<offset: %d, length: %d>", offset, length); } } static class ChunkSerializer implements IVersionedSerializer<Chunk> { public void serialize(Chunk chunk, DataOutput out, int version) throws IOException { out.writeLong(chunk.offset); out.writeInt(chunk.length); } public Chunk deserialize(DataInput in, int version) throws IOException { return new Chunk(in.readLong(), in.readInt()); } public long serializedSize(Chunk chunk, int version) { long size = TypeSizes.NATIVE.sizeof(chunk.offset); size += TypeSizes.NATIVE.sizeof(chunk.length); return size; } } }
make CM.Writer.close idempotent
src/java/org/apache/cassandra/io/compress/CompressionMetadata.java
make CM.Writer.close idempotent
<ide><path>rc/java/org/apache/cassandra/io/compress/CompressionMetadata.java <ide> <ide> public void close() throws IOException <ide> { <del> getFD().sync(); <add> if (getChannel().isOpen()) // if RAF.closed were public we could just use that, but it's not <add> getChannel().force(true); <ide> super.close(); <ide> } <ide> }
Java
apache-2.0
07f998e45746f2da0f0364dfc763a50979505c7f
0
apache/logging-log4j2,codescale/logging-log4j2,GFriedrich/logging-log4j2,neuro-sys/logging-log4j2,lqbweb/logging-log4j2,MagicWiz/log4j2,ChetnaChaudhari/logging-log4j2,MagicWiz/log4j2,xnslong/logging-log4j2,renchunxiao/logging-log4j2,neuro-sys/logging-log4j2,GFriedrich/logging-log4j2,lqbweb/logging-log4j2,pisfly/logging-log4j2,lburgazzoli/logging-log4j2,jsnikhil/nj-logging-log4j2,jsnikhil/nj-logging-log4j2,codescale/logging-log4j2,apache/logging-log4j2,lburgazzoli/apache-logging-log4j2,lburgazzoli/apache-logging-log4j2,xnslong/logging-log4j2,renchunxiao/logging-log4j2,ChetnaChaudhari/logging-log4j2,jinxuan/logging-log4j2,lburgazzoli/logging-log4j2,xnslong/logging-log4j2,pisfly/logging-log4j2,lburgazzoli/apache-logging-log4j2,apache/logging-log4j2,lqbweb/logging-log4j2,lburgazzoli/logging-log4j2,GFriedrich/logging-log4j2,codescale/logging-log4j2,jinxuan/logging-log4j2
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.appender.routing; import java.io.File; import java.util.List; import org.apache.logging.log4j.EventLogger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.junit.CleanFiles; import org.apache.logging.log4j.junit.InitialLoggerContext; import org.apache.logging.log4j.message.StructuredDataMessage; import org.apache.logging.log4j.test.appender.ListAppender; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import static org.junit.Assert.*; /** * */ public class RoutingAppenderTest { private static final String CONFIG = "log4j-routing.xml"; private static final String UNKNOWN_LOG_FILE = "target/rolling1/rollingtest-Unknown.log"; private static final String ALERT_LOG_FILE = "target/routing1/routingtest-Alert.log"; private static final String ACTIVITY_LOG_FILE = "target/routing1/routingtest-Activity.log"; private ListAppender app; @Rule public InitialLoggerContext init = new InitialLoggerContext(CONFIG); @Rule public CleanFiles files = new CleanFiles(UNKNOWN_LOG_FILE, ALERT_LOG_FILE, ACTIVITY_LOG_FILE); @Before public void setUp() throws Exception { this.app = this.init.getListAppender("List"); } @After public void tearDown() throws Exception { this.app.clear(); } @Test public void routingTest() { StructuredDataMessage msg = new StructuredDataMessage("Test", "This is a test", "Service"); EventLogger.logEvent(msg); final List<LogEvent> list = app.getEvents(); assertNotNull("No events generated", list); assertTrue("Incorrect number of events. Expected 1, got " + list.size(), list.size() == 1); msg = new StructuredDataMessage("Test", "This is a test", "Alert"); EventLogger.logEvent(msg); File file = new File(ALERT_LOG_FILE); assertTrue("Alert file was not created", file.exists()); msg = new StructuredDataMessage("Test", "This is a test", "Activity"); EventLogger.logEvent(msg); file = new File(ACTIVITY_LOG_FILE); assertTrue("Activity file was not created", file.exists()); } }
log4j-core/src/test/java/org/apache/logging/log4j/core/appender/routing/RoutingAppenderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.appender.routing; import java.io.File; import java.util.List; import org.apache.logging.log4j.EventLogger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.junit.CleanFiles; import org.apache.logging.log4j.junit.InitialLoggerContext; import org.apache.logging.log4j.message.StructuredDataMessage; import org.apache.logging.log4j.test.appender.ListAppender; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import static org.junit.Assert.*; /** * */ public class RoutingAppenderTest { private static final String CONFIG = "log4j-routing.xml"; private static final String UNKNOWN_LOG_FILE = "target/rolling1/rollingtest-Unknown.log"; private static final String ALERT_LOG_FILE = "target/routing1/routingtest-Alert.log"; private static final String ACTIVITY_LOG_FILE = "target/routing1/routingtest-Activity.log"; private ListAppender app; @Rule public InitialLoggerContext init = new InitialLoggerContext(CONFIG); @Rule public CleanFiles files = new CleanFiles(UNKNOWN_LOG_FILE, ALERT_LOG_FILE, ACTIVITY_LOG_FILE); @Before public void setUp() throws Exception { this.app = (ListAppender) this.init.getAppender("List"); } @After public void tearDown() throws Exception { this.app.clear(); } @Test public void routingTest() { StructuredDataMessage msg = new StructuredDataMessage("Test", "This is a test", "Service"); EventLogger.logEvent(msg); final List<LogEvent> list = app.getEvents(); assertNotNull("No events generated", list); assertTrue("Incorrect number of events. Expected 1, got " + list.size(), list.size() == 1); msg = new StructuredDataMessage("Test", "This is a test", "Alert"); EventLogger.logEvent(msg); File file = new File(ALERT_LOG_FILE); assertTrue("Alert file was not created", file.exists()); msg = new StructuredDataMessage("Test", "This is a test", "Activity"); EventLogger.logEvent(msg); file = new File(ACTIVITY_LOG_FILE); assertTrue("Activity file was not created", file.exists()); } }
Use getListAppender. git-svn-id: de5ce936019686f47409c93bcc5e202a9739563b@1618801 13f79535-47bb-0310-9956-ffa450edef68
log4j-core/src/test/java/org/apache/logging/log4j/core/appender/routing/RoutingAppenderTest.java
Use getListAppender.
<ide><path>og4j-core/src/test/java/org/apache/logging/log4j/core/appender/routing/RoutingAppenderTest.java <ide> <ide> @Before <ide> public void setUp() throws Exception { <del> this.app = (ListAppender) this.init.getAppender("List"); <add> this.app = this.init.getListAppender("List"); <ide> } <ide> <ide> @After
Java
apache-2.0
0649c7b9584bb7e0a1f24c25b846e5aa804e2e2f
0
stari4ek/ExoPlayer,google/ExoPlayer,ened/ExoPlayer,ened/ExoPlayer,stari4ek/ExoPlayer,amzn/exoplayer-amazon-port,google/ExoPlayer,amzn/exoplayer-amazon-port,stari4ek/ExoPlayer,amzn/exoplayer-amazon-port,google/ExoPlayer,androidx/media,androidx/media,androidx/media,ened/ExoPlayer
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.mediacodec; import android.annotation.TargetApi; import android.media.MediaCodec; import android.media.MediaCodec.CodecException; import android.media.MediaCodec.CryptoException; import android.media.MediaCrypto; import android.media.MediaCryptoException; import android.media.MediaFormat; import android.os.Bundle; import android.os.SystemClock; import androidx.annotation.CallSuper; import androidx.annotation.CheckResult; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.BaseRenderer; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.drm.DrmSession; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.TimedValueQueue; import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; /** * An abstract renderer that uses {@link MediaCodec} to decode samples for rendering. */ public abstract class MediaCodecRenderer extends BaseRenderer { /** * The modes to operate the {@link MediaCodec}. * * <p>Allowed values: * * <ul> * <li>{@link #OPERATION_MODE_SYNCHRONOUS} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK} * </ul> */ @Documented @Retention(RetentionPolicy.SOURCE) @Target({ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) @IntDef({ OPERATION_MODE_SYNCHRONOUS, OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING }) public @interface MediaCodecOperationMode {} /** Operates the {@link MediaCodec} in synchronous mode. */ public static final int OPERATION_MODE_SYNCHRONOUS = 0; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to the playback thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD = 1; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to a dedicated thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD = 2; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to a dedicated thread. Uses granular locking for input and output buffers. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK = 3; /** * Same as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD}, and offloads queueing to another * thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING = 4; /** * Same as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK}, and offloads queueing * to another thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING = 5; /** Thrown when a failure occurs instantiating a decoder. */ public static class DecoderInitializationException extends Exception { private static final int CUSTOM_ERROR_CODE_BASE = -50000; private static final int NO_SUITABLE_DECODER_ERROR = CUSTOM_ERROR_CODE_BASE + 1; private static final int DECODER_QUERY_ERROR = CUSTOM_ERROR_CODE_BASE + 2; /** * The mime type for which a decoder was being initialized. */ public final String mimeType; /** * Whether it was required that the decoder support a secure output path. */ public final boolean secureDecoderRequired; /** * The {@link MediaCodecInfo} of the decoder that failed to initialize. Null if no suitable * decoder was found. */ @Nullable public final MediaCodecInfo codecInfo; /** An optional developer-readable diagnostic information string. May be null. */ @Nullable public final String diagnosticInfo; /** * If the decoder failed to initialize and another decoder being used as a fallback also failed * to initialize, the {@link DecoderInitializationException} for the fallback decoder. Null if * there was no fallback decoder or no suitable decoders were found. */ @Nullable public final DecoderInitializationException fallbackDecoderInitializationException; public DecoderInitializationException( Format format, @Nullable Throwable cause, boolean secureDecoderRequired, int errorCode) { this( "Decoder init failed: [" + errorCode + "], " + format, cause, format.sampleMimeType, secureDecoderRequired, /* mediaCodecInfo= */ null, buildCustomDiagnosticInfo(errorCode), /* fallbackDecoderInitializationException= */ null); } public DecoderInitializationException( Format format, @Nullable Throwable cause, boolean secureDecoderRequired, MediaCodecInfo mediaCodecInfo) { this( "Decoder init failed: " + mediaCodecInfo.name + ", " + format, cause, format.sampleMimeType, secureDecoderRequired, mediaCodecInfo, Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null, /* fallbackDecoderInitializationException= */ null); } private DecoderInitializationException( String message, @Nullable Throwable cause, String mimeType, boolean secureDecoderRequired, @Nullable MediaCodecInfo mediaCodecInfo, @Nullable String diagnosticInfo, @Nullable DecoderInitializationException fallbackDecoderInitializationException) { super(message, cause); this.mimeType = mimeType; this.secureDecoderRequired = secureDecoderRequired; this.codecInfo = mediaCodecInfo; this.diagnosticInfo = diagnosticInfo; this.fallbackDecoderInitializationException = fallbackDecoderInitializationException; } @CheckResult private DecoderInitializationException copyWithFallbackException( DecoderInitializationException fallbackException) { return new DecoderInitializationException( getMessage(), getCause(), mimeType, secureDecoderRequired, codecInfo, diagnosticInfo, fallbackException); } @RequiresApi(21) @Nullable private static String getDiagnosticInfoV21(@Nullable Throwable cause) { if (cause instanceof CodecException) { return ((CodecException) cause).getDiagnosticInfo(); } return null; } private static String buildCustomDiagnosticInfo(int errorCode) { String sign = errorCode < 0 ? "neg_" : ""; return "com.google.android.exoplayer2.mediacodec.MediaCodecRenderer_" + sign + Math.abs(errorCode); } } /** Thrown when a failure occurs in the decoder. */ public static class DecoderException extends Exception { /** The {@link MediaCodecInfo} of the decoder that failed. Null if unknown. */ @Nullable public final MediaCodecInfo codecInfo; /** An optional developer-readable diagnostic information string. May be null. */ @Nullable public final String diagnosticInfo; public DecoderException(Throwable cause, @Nullable MediaCodecInfo codecInfo) { super("Decoder failed: " + (codecInfo == null ? null : codecInfo.name), cause); this.codecInfo = codecInfo; diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null; } @RequiresApi(21) private static String getDiagnosticInfoV21(Throwable cause) { if (cause instanceof CodecException) { return ((CodecException) cause).getDiagnosticInfo(); } return null; } } /** Indicates no codec operating rate should be set. */ protected static final float CODEC_OPERATING_RATE_UNSET = -1; private static final String TAG = "MediaCodecRenderer"; /** * If the {@link MediaCodec} is hotswapped (i.e. replaced during playback), this is the period of * time during which {@link #isReady()} will report true regardless of whether the new codec has * output frames that are ready to be rendered. * <p> * This allows codec hotswapping to be performed seamlessly, without interrupting the playback of * other renderers, provided the new codec is able to decode some frames within this time period. */ private static final long MAX_CODEC_HOTSWAP_TIME_MS = 1000; // Generally there is zero or one pending output stream offset. We track more offsets to allow for // pending output streams that have fewer frames than the codec latency. private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10; /** * The possible return values for {@link #canKeepCodec(MediaCodec, MediaCodecInfo, Format, * Format)}. */ @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ KEEP_CODEC_RESULT_NO, KEEP_CODEC_RESULT_YES_WITH_FLUSH, KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION, KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION }) protected @interface KeepCodecResult {} /** The codec cannot be kept. */ protected static final int KEEP_CODEC_RESULT_NO = 0; /** The codec can be kept, but must be flushed. */ protected static final int KEEP_CODEC_RESULT_YES_WITH_FLUSH = 1; /** * The codec can be kept. It does not need to be flushed, but must be reconfigured by prefixing * the next input buffer with the new format's configuration data. */ protected static final int KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION = 2; /** The codec can be kept. It does not need to be flushed and no reconfiguration is required. */ protected static final int KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION = 3; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ RECONFIGURATION_STATE_NONE, RECONFIGURATION_STATE_WRITE_PENDING, RECONFIGURATION_STATE_QUEUE_PENDING }) private @interface ReconfigurationState {} /** * There is no pending adaptive reconfiguration work. */ private static final int RECONFIGURATION_STATE_NONE = 0; /** * Codec configuration data needs to be written into the next buffer. */ private static final int RECONFIGURATION_STATE_WRITE_PENDING = 1; /** * Codec configuration data has been written into the next buffer, but that buffer still needs to * be returned to the codec. */ private static final int RECONFIGURATION_STATE_QUEUE_PENDING = 2; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({DRAIN_STATE_NONE, DRAIN_STATE_SIGNAL_END_OF_STREAM, DRAIN_STATE_WAIT_END_OF_STREAM}) private @interface DrainState {} /** The codec is not being drained. */ private static final int DRAIN_STATE_NONE = 0; /** The codec needs to be drained, but we haven't signaled an end of stream to it yet. */ private static final int DRAIN_STATE_SIGNAL_END_OF_STREAM = 1; /** The codec needs to be drained, and we're waiting for it to output an end of stream. */ private static final int DRAIN_STATE_WAIT_END_OF_STREAM = 2; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ DRAIN_ACTION_NONE, DRAIN_ACTION_FLUSH, DRAIN_ACTION_UPDATE_DRM_SESSION, DRAIN_ACTION_REINITIALIZE }) private @interface DrainAction {} /** No special action should be taken. */ private static final int DRAIN_ACTION_NONE = 0; /** The codec should be flushed. */ private static final int DRAIN_ACTION_FLUSH = 1; /** The codec should be flushed and updated to use the pending DRM session. */ private static final int DRAIN_ACTION_UPDATE_DRM_SESSION = 2; /** The codec should be reinitialized. */ private static final int DRAIN_ACTION_REINITIALIZE = 3; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ ADAPTATION_WORKAROUND_MODE_NEVER, ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION, ADAPTATION_WORKAROUND_MODE_ALWAYS }) private @interface AdaptationWorkaroundMode {} /** * The adaptation workaround is never used. */ private static final int ADAPTATION_WORKAROUND_MODE_NEVER = 0; /** * The adaptation workaround is used when adapting between formats of the same resolution only. */ private static final int ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION = 1; /** * The adaptation workaround is always used when adapting between formats. */ private static final int ADAPTATION_WORKAROUND_MODE_ALWAYS = 2; /** * H.264/AVC buffer to queue when using the adaptation workaround (see {@link * #codecAdaptationWorkaroundMode(String)}. Consists of three NAL units with start codes: Baseline * sequence/picture parameter sets and a 32 * 32 pixel IDR slice. This stream can be queued to * force a resolution change when adapting to a new format. */ private static final byte[] ADAPTATION_WORKAROUND_BUFFER = new byte[] { 0, 0, 1, 103, 66, -64, 11, -38, 37, -112, 0, 0, 1, 104, -50, 15, 19, 32, 0, 0, 1, 101, -120, -124, 13, -50, 113, 24, -96, 0, 47, -65, 28, 49, -61, 39, 93, 120 }; private static final int ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT = 32; private final MediaCodecSelector mediaCodecSelector; private final boolean enableDecoderFallback; private final float assumedMinimumCodecOperatingRate; private final DecoderInputBuffer buffer; private final DecoderInputBuffer flagsOnlyBuffer; private final TimedValueQueue<Format> formatQueue; private final ArrayList<Long> decodeOnlyPresentationTimestamps; private final MediaCodec.BufferInfo outputBufferInfo; private final long[] pendingOutputStreamOffsetsUs; private final long[] pendingOutputStreamSwitchTimesUs; @Nullable private Format inputFormat; private Format outputFormat; @Nullable private DrmSession<FrameworkMediaCrypto> codecDrmSession; @Nullable private DrmSession<FrameworkMediaCrypto> sourceDrmSession; @Nullable private MediaCrypto mediaCrypto; private boolean mediaCryptoRequiresSecureDecoder; private long renderTimeLimitMs; private float rendererOperatingRate; @Nullable private MediaCodec codec; @Nullable private MediaCodecAdapter codecAdapter; @Nullable private Format codecFormat; private float codecOperatingRate; @Nullable private ArrayDeque<MediaCodecInfo> availableCodecInfos; @Nullable private DecoderInitializationException preferredDecoderInitializationException; @Nullable private MediaCodecInfo codecInfo; @AdaptationWorkaroundMode private int codecAdaptationWorkaroundMode; private boolean codecNeedsReconfigureWorkaround; private boolean codecNeedsDiscardToSpsWorkaround; private boolean codecNeedsFlushWorkaround; private boolean codecNeedsEosFlushWorkaround; private boolean codecNeedsEosOutputExceptionWorkaround; private boolean codecNeedsMonoChannelCountWorkaround; private boolean codecNeedsAdaptationWorkaroundBuffer; private boolean shouldSkipAdaptationWorkaroundOutputBuffer; private boolean codecNeedsEosPropagation; private ByteBuffer[] inputBuffers; private ByteBuffer[] outputBuffers; private long codecHotswapDeadlineMs; private int inputIndex; private int outputIndex; private ByteBuffer outputBuffer; private boolean isDecodeOnlyOutputBuffer; private boolean isLastOutputBuffer; private boolean codecReconfigured; @ReconfigurationState private int codecReconfigurationState; @DrainState private int codecDrainState; @DrainAction private int codecDrainAction; private boolean codecReceivedBuffers; private boolean codecReceivedEos; private long largestQueuedPresentationTimeUs; private long lastBufferInStreamPresentationTimeUs; private boolean inputStreamEnded; private boolean outputStreamEnded; private boolean waitingForKeys; private boolean waitingForFirstSyncSample; private boolean waitingForFirstSampleInFormat; private boolean pendingOutputEndOfStream; @MediaCodecOperationMode private int mediaCodecOperationMode; protected DecoderCounters decoderCounters; private long outputStreamOffsetUs; private int pendingOutputStreamOffsetCount; /** * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*} * constants defined in {@link C}. * @param mediaCodecSelector A decoder selector. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is less efficient or slower * than the primary decoder. * @param assumedMinimumCodecOperatingRate A codec operating rate that all codecs instantiated by * this renderer are assumed to meet implicitly (i.e. without the operating rate being set * explicitly using {@link MediaFormat#KEY_OPERATING_RATE}). */ public MediaCodecRenderer( int trackType, MediaCodecSelector mediaCodecSelector, boolean enableDecoderFallback, float assumedMinimumCodecOperatingRate) { super(trackType); this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector); this.enableDecoderFallback = enableDecoderFallback; this.assumedMinimumCodecOperatingRate = assumedMinimumCodecOperatingRate; buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED); flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); formatQueue = new TimedValueQueue<>(); decodeOnlyPresentationTimestamps = new ArrayList<>(); outputBufferInfo = new MediaCodec.BufferInfo(); rendererOperatingRate = 1f; renderTimeLimitMs = C.TIME_UNSET; mediaCodecOperationMode = OPERATION_MODE_SYNCHRONOUS; pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; pendingOutputStreamSwitchTimesUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; outputStreamOffsetUs = C.TIME_UNSET; resetCodecStateForRelease(); } /** * Set a limit on the time a single {@link #render(long, long)} call can spend draining and * filling the decoder. * * <p>This method is experimental, and will be renamed or removed in a future release. It should * only be called before the renderer is used. * * @param renderTimeLimitMs The render time limit in milliseconds, or {@link C#TIME_UNSET} for no * limit. */ public void experimental_setRenderTimeLimitMs(long renderTimeLimitMs) { this.renderTimeLimitMs = renderTimeLimitMs; } /** * Set the mode of operation of the underlying {@link MediaCodec}. * * <p>This method is experimental, and will be renamed or removed in a future release. It should * only be called before the renderer is used. * * @param mode The mode of the MediaCodec. The supported modes are: * <ul> * <li>{@link #OPERATION_MODE_SYNCHRONOUS}: The {@link MediaCodec} will operate in * synchronous mode. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD}: The {@link MediaCodec} will * operate in asynchronous mode and {@link MediaCodec.Callback} callbacks will be routed * to the playback thread. This mode requires API level &ge; 21; if the API level is * &le; 20, the operation mode will be set to {@link * MediaCodecRenderer#OPERATION_MODE_SYNCHRONOUS}. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD}: The {@link MediaCodec} will * operate in asynchronous mode and {@link MediaCodec.Callback} callbacks will be routed * to a dedicated thread. This mode requires API level &ge; 23; if the API level is &le; * 22, the operation mode will be set to {@link #OPERATION_MODE_SYNCHRONOUS}. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK}: Same as {@link * #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} and, in addition, input buffers will * submitted to the {@link MediaCodec} in a separate thread. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING}: Same as * {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} and, in addition, input buffers * will be submitted to the {@link MediaCodec} in a separate thread. * <li>{@link * #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING}: Same * as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK} and, in addition, * input buffers will be submitted to the {@link MediaCodec} in a separate thread. * </ul> * By default, the operation mode is set to {@link * MediaCodecRenderer#OPERATION_MODE_SYNCHRONOUS}. */ public void experimental_setMediaCodecOperationMode(@MediaCodecOperationMode int mode) { mediaCodecOperationMode = mode; } @Override @AdaptiveSupport public final int supportsMixedMimeTypeAdaptation() { return ADAPTIVE_NOT_SEAMLESS; } @Override @Capabilities public final int supportsFormat(Format format) throws ExoPlaybackException { try { return supportsFormat(mediaCodecSelector, format); } catch (DecoderQueryException e) { throw createRendererException(e, format); } } /** * Returns the {@link Capabilities} for the given {@link Format}. * * @param mediaCodecSelector The decoder selector. * @param format The {@link Format}. * @return The {@link Capabilities} for this {@link Format}. * @throws DecoderQueryException If there was an error querying decoders. */ @Capabilities protected abstract int supportsFormat( MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException; /** * Returns a list of decoders that can decode media in the specified format, in priority order. * * @param mediaCodecSelector The decoder selector. * @param format The {@link Format} for which a decoder is required. * @param requiresSecureDecoder Whether a secure decoder is required. * @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty. * @throws DecoderQueryException Thrown if there was an error querying decoders. */ protected abstract List<MediaCodecInfo> getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException; /** * Configures a newly created {@link MediaCodec}. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param codec The {@link MediaCodec} to configure. * @param format The {@link Format} for which the codec is being configured. * @param crypto For drm protected playbacks, a {@link MediaCrypto} to use for decryption. * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if * no codec operating rate should be set. */ protected abstract void configureCodec( MediaCodecInfo codecInfo, MediaCodec codec, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate); protected final void maybeInitCodec() throws ExoPlaybackException { if (codec != null || inputFormat == null) { // We have a codec already, or we don't have a format with which to instantiate one. return; } setCodecDrmSession(sourceDrmSession); String mimeType = inputFormat.sampleMimeType; if (codecDrmSession != null) { if (mediaCrypto == null) { FrameworkMediaCrypto sessionMediaCrypto = codecDrmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { DrmSessionException drmError = codecDrmSession.getError(); if (drmError != null) { // Continue for now. We may be able to avoid failure if the session recovers, or if a // new input format causes the session to be replaced before it's used. } else { // The drm session isn't open yet. return; } } else { try { mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { throw createRendererException(e, inputFormat); } mediaCryptoRequiresSecureDecoder = !sessionMediaCrypto.forceAllowInsecureDecoderComponents && mediaCrypto.requiresSecureDecoderComponent(mimeType); } } if (FrameworkMediaCrypto.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC) { @DrmSession.State int drmSessionState = codecDrmSession.getState(); if (drmSessionState == DrmSession.STATE_ERROR) { throw createRendererException(codecDrmSession.getError(), inputFormat); } else if (drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS) { // Wait for keys. return; } } } try { maybeInitCodecWithFallback(mediaCrypto, mediaCryptoRequiresSecureDecoder); } catch (DecoderInitializationException e) { throw createRendererException(e, inputFormat); } } protected boolean shouldInitCodec(MediaCodecInfo codecInfo) { return true; } /** * Returns whether the codec needs the renderer to propagate the end-of-stream signal directly, * rather than by using an end-of-stream buffer queued to the codec. */ protected boolean getCodecNeedsEosPropagation() { return false; } /** * Polls the pending output format queue for a given buffer timestamp. If a format is present, it * is removed and returned. Otherwise returns {@code null}. Subclasses should only call this * method if they are taking over responsibility for output format propagation (e.g., when using * video tunneling). */ @Nullable protected final Format updateOutputFormatForTime(long presentationTimeUs) { Format format = formatQueue.pollFloor(presentationTimeUs); if (format != null) { outputFormat = format; } return format; } @Nullable protected final Format getCurrentOutputFormat() { return outputFormat; } @Nullable protected final MediaCodec getCodec() { return codec; } @Nullable protected final MediaCodecInfo getCodecInfo() { return codecInfo; } @Override protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) throws ExoPlaybackException { decoderCounters = new DecoderCounters(); } @Override protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { if (outputStreamOffsetUs == C.TIME_UNSET) { outputStreamOffsetUs = offsetUs; } else { if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) { Log.w( TAG, "Too many stream changes, so dropping offset: " + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); } else { pendingOutputStreamOffsetCount++; } pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs; pendingOutputStreamSwitchTimesUs[pendingOutputStreamOffsetCount - 1] = largestQueuedPresentationTimeUs; } } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { inputStreamEnded = false; outputStreamEnded = false; pendingOutputEndOfStream = false; flushOrReinitializeCodec(); // If there is a format change on the input side still pending propagation to the output, we // need to queue a format next time a buffer is read. This is because we may not read a new // input format after the position reset. if (formatQueue.size() > 0) { waitingForFirstSampleInFormat = true; } formatQueue.clear(); if (pendingOutputStreamOffsetCount != 0) { outputStreamOffsetUs = pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]; pendingOutputStreamOffsetCount = 0; } } @Override public final void setOperatingRate(float operatingRate) throws ExoPlaybackException { rendererOperatingRate = operatingRate; if (codec != null && codecDrainAction != DRAIN_ACTION_REINITIALIZE && getState() != STATE_DISABLED) { updateCodecOperatingRate(); } } @Override protected void onDisabled() { inputFormat = null; outputStreamOffsetUs = C.TIME_UNSET; pendingOutputStreamOffsetCount = 0; if (sourceDrmSession != null || codecDrmSession != null) { // TODO: Do something better with this case. onReset(); } else { flushOrReleaseCodec(); } } @Override protected void onReset() { try { releaseCodec(); } finally { setSourceDrmSession(null); } } protected void releaseCodec() { try { if (codecAdapter != null) { codecAdapter.shutdown(); } if (codec != null) { decoderCounters.decoderReleaseCount++; codec.release(); } } finally { codec = null; codecAdapter = null; try { if (mediaCrypto != null) { mediaCrypto.release(); } } finally { mediaCrypto = null; setCodecDrmSession(null); resetCodecStateForRelease(); } } } @Override protected void onStarted() { // Do nothing. Overridden to remove throws clause. } @Override protected void onStopped() { // Do nothing. Overridden to remove throws clause. } @Override public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (pendingOutputEndOfStream) { pendingOutputEndOfStream = false; processEndOfStream(); } try { if (outputStreamEnded) { renderToEndOfStream(); return; } if (inputFormat == null && !readToFlagsOnlyBuffer(/* requireFormat= */ true)) { // We still don't have a format and can't make progress without one. return; } // We have a format. maybeInitCodec(); if (codec != null) { long renderStartTimeMs = SystemClock.elapsedRealtime(); TraceUtil.beginSection("drainAndFeed"); while (drainOutputBuffer(positionUs, elapsedRealtimeUs) && shouldContinueRendering(renderStartTimeMs)) {} while (feedInputBuffer() && shouldContinueRendering(renderStartTimeMs)) {} TraceUtil.endSection(); } else { decoderCounters.skippedInputBufferCount += skipSource(positionUs); // We need to read any format changes despite not having a codec so that drmSession can be // updated, and so that we have the most recent format should the codec be initialized. We // may also reach the end of the stream. Note that readSource will not read a sample into a // flags-only buffer. readToFlagsOnlyBuffer(/* requireFormat= */ false); } decoderCounters.ensureUpdated(); } catch (IllegalStateException e) { if (isMediaCodecException(e)) { throw createRendererException(e, inputFormat); } throw e; } } /** * Flushes the codec. If flushing is not possible, the codec will be released and re-instantiated. * This method is a no-op if the codec is {@code null}. * * <p>The implementation of this method calls {@link #flushOrReleaseCodec()}, and {@link * #maybeInitCodec()} if the codec needs to be re-instantiated. * * @return Whether the codec was released and reinitialized, rather than being flushed. * @throws ExoPlaybackException If an error occurs re-instantiating the codec. */ protected final boolean flushOrReinitializeCodec() throws ExoPlaybackException { boolean released = flushOrReleaseCodec(); if (released) { maybeInitCodec(); } return released; } /** * Flushes the codec. If flushing is not possible, the codec will be released. This method is a * no-op if the codec is {@code null}. * * @return Whether the codec was released. */ protected boolean flushOrReleaseCodec() { if (codec == null) { return false; } if (codecDrainAction == DRAIN_ACTION_REINITIALIZE || codecNeedsFlushWorkaround || (codecNeedsEosFlushWorkaround && codecReceivedEos)) { releaseCodec(); return true; } try { codecAdapter.flush(); } finally { resetCodecStateForFlush(); } return false; } /** Resets the renderer internal state after a codec flush. */ @CallSuper protected void resetCodecStateForFlush() { resetInputBuffer(); resetOutputBuffer(); codecHotswapDeadlineMs = C.TIME_UNSET; codecReceivedEos = false; codecReceivedBuffers = false; waitingForFirstSyncSample = true; codecNeedsAdaptationWorkaroundBuffer = false; shouldSkipAdaptationWorkaroundOutputBuffer = false; isDecodeOnlyOutputBuffer = false; isLastOutputBuffer = false; waitingForKeys = false; decodeOnlyPresentationTimestamps.clear(); largestQueuedPresentationTimeUs = C.TIME_UNSET; lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; // Reconfiguration data sent shortly before the flush may not have been processed by the // decoder. If the codec has been reconfigured we always send reconfiguration data again to // guarantee that it's processed. codecReconfigurationState = codecReconfigured ? RECONFIGURATION_STATE_WRITE_PENDING : RECONFIGURATION_STATE_NONE; } /** * Resets the renderer internal state after a codec release. * * <p>Note that this only needs to reset state variables that are changed in addition to those * already changed in {@link #resetCodecStateForFlush()}. */ @CallSuper protected void resetCodecStateForRelease() { resetCodecStateForFlush(); availableCodecInfos = null; codecInfo = null; codecFormat = null; codecOperatingRate = CODEC_OPERATING_RATE_UNSET; codecAdaptationWorkaroundMode = ADAPTATION_WORKAROUND_MODE_NEVER; codecNeedsReconfigureWorkaround = false; codecNeedsDiscardToSpsWorkaround = false; codecNeedsFlushWorkaround = false; codecNeedsEosFlushWorkaround = false; codecNeedsEosOutputExceptionWorkaround = false; codecNeedsMonoChannelCountWorkaround = false; codecNeedsEosPropagation = false; codecReconfigured = false; codecReconfigurationState = RECONFIGURATION_STATE_NONE; resetCodecBuffers(); mediaCryptoRequiresSecureDecoder = false; } protected DecoderException createDecoderException( Throwable cause, @Nullable MediaCodecInfo codecInfo) { return new DecoderException(cause, codecInfo); } /** Reads into {@link #flagsOnlyBuffer} and returns whether a {@link Format} was read. */ private boolean readToFlagsOnlyBuffer(boolean requireFormat) throws ExoPlaybackException { FormatHolder formatHolder = getFormatHolder(); flagsOnlyBuffer.clear(); int result = readSource(formatHolder, flagsOnlyBuffer, requireFormat); if (result == C.RESULT_FORMAT_READ) { onInputFormatChanged(formatHolder); return true; } else if (result == C.RESULT_BUFFER_READ && flagsOnlyBuffer.isEndOfStream()) { inputStreamEnded = true; processEndOfStream(); } return false; } private void maybeInitCodecWithFallback( MediaCrypto crypto, boolean mediaCryptoRequiresSecureDecoder) throws DecoderInitializationException { if (availableCodecInfos == null) { try { List<MediaCodecInfo> allAvailableCodecInfos = getAvailableCodecInfos(mediaCryptoRequiresSecureDecoder); availableCodecInfos = new ArrayDeque<>(); if (enableDecoderFallback) { availableCodecInfos.addAll(allAvailableCodecInfos); } else if (!allAvailableCodecInfos.isEmpty()) { availableCodecInfos.add(allAvailableCodecInfos.get(0)); } preferredDecoderInitializationException = null; } catch (DecoderQueryException e) { throw new DecoderInitializationException( inputFormat, e, mediaCryptoRequiresSecureDecoder, DecoderInitializationException.DECODER_QUERY_ERROR); } } if (availableCodecInfos.isEmpty()) { throw new DecoderInitializationException( inputFormat, /* cause= */ null, mediaCryptoRequiresSecureDecoder, DecoderInitializationException.NO_SUITABLE_DECODER_ERROR); } while (codec == null) { MediaCodecInfo codecInfo = availableCodecInfos.peekFirst(); if (!shouldInitCodec(codecInfo)) { return; } try { initCodec(codecInfo, crypto); } catch (Exception e) { Log.w(TAG, "Failed to initialize decoder: " + codecInfo, e); // This codec failed to initialize, so fall back to the next codec in the list (if any). We // won't try to use this codec again unless there's a format change or the renderer is // disabled and re-enabled. availableCodecInfos.removeFirst(); DecoderInitializationException exception = new DecoderInitializationException( inputFormat, e, mediaCryptoRequiresSecureDecoder, codecInfo); if (preferredDecoderInitializationException == null) { preferredDecoderInitializationException = exception; } else { preferredDecoderInitializationException = preferredDecoderInitializationException.copyWithFallbackException(exception); } if (availableCodecInfos.isEmpty()) { throw preferredDecoderInitializationException; } } } availableCodecInfos = null; } private List<MediaCodecInfo> getAvailableCodecInfos(boolean mediaCryptoRequiresSecureDecoder) throws DecoderQueryException { List<MediaCodecInfo> codecInfos = getDecoderInfos(mediaCodecSelector, inputFormat, mediaCryptoRequiresSecureDecoder); if (codecInfos.isEmpty() && mediaCryptoRequiresSecureDecoder) { // The drm session indicates that a secure decoder is required, but the device does not // have one. Assuming that supportsFormat indicated support for the media being played, we // know that it does not require a secure output path. Most CDM implementations allow // playback to proceed with a non-secure decoder in this case, so we try our luck. codecInfos = getDecoderInfos(mediaCodecSelector, inputFormat, /* requiresSecureDecoder= */ false); if (!codecInfos.isEmpty()) { Log.w( TAG, "Drm session requires secure decoder for " + inputFormat.sampleMimeType + ", but no secure decoder available. Trying to proceed with " + codecInfos + "."); } } return codecInfos; } private void initCodec(MediaCodecInfo codecInfo, MediaCrypto crypto) throws Exception { long codecInitializingTimestamp; long codecInitializedTimestamp; MediaCodec codec = null; String codecName = codecInfo.name; float codecOperatingRate = Util.SDK_INT < 23 ? CODEC_OPERATING_RATE_UNSET : getCodecOperatingRateV23(rendererOperatingRate, inputFormat, getStreamFormats()); if (codecOperatingRate <= assumedMinimumCodecOperatingRate) { codecOperatingRate = CODEC_OPERATING_RATE_UNSET; } MediaCodecAdapter codecAdapter = null; try { codecInitializingTimestamp = SystemClock.elapsedRealtime(); TraceUtil.beginSection("createCodec:" + codecName); codec = MediaCodec.createByCodecName(codecName); if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD && Util.SDK_INT >= 21) { codecAdapter = new AsynchronousMediaCodecAdapter(codec); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD && Util.SDK_INT >= 23) { codecAdapter = new DedicatedThreadAsyncMediaCodecAdapter(codec, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK && Util.SDK_INT >= 23) { codecAdapter = new MultiLockAsyncMediaCodecAdapter(codec, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING && Util.SDK_INT >= 23) { codecAdapter = new DedicatedThreadAsyncMediaCodecAdapter( codec, /* enableAsynchronousQueueing= */ true, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING && Util.SDK_INT >= 23) { codecAdapter = new MultiLockAsyncMediaCodecAdapter( codec, /* enableAsynchronousQueueing= */ true, getTrackType()); } else { codecAdapter = new SynchronousMediaCodecAdapter(codec); } TraceUtil.endSection(); TraceUtil.beginSection("configureCodec"); configureCodec(codecInfo, codec, inputFormat, crypto, codecOperatingRate); TraceUtil.endSection(); TraceUtil.beginSection("startCodec"); codecAdapter.start(); TraceUtil.endSection(); codecInitializedTimestamp = SystemClock.elapsedRealtime(); getCodecBuffers(codec); } catch (Exception e) { if (codecAdapter != null) { codecAdapter.shutdown(); } if (codec != null) { resetCodecBuffers(); codec.release(); } throw e; } this.codec = codec; this.codecAdapter = codecAdapter; this.codecInfo = codecInfo; this.codecOperatingRate = codecOperatingRate; codecFormat = inputFormat; codecAdaptationWorkaroundMode = codecAdaptationWorkaroundMode(codecName); codecNeedsReconfigureWorkaround = codecNeedsReconfigureWorkaround(codecName); codecNeedsDiscardToSpsWorkaround = codecNeedsDiscardToSpsWorkaround(codecName, codecFormat); codecNeedsFlushWorkaround = codecNeedsFlushWorkaround(codecName); codecNeedsEosFlushWorkaround = codecNeedsEosFlushWorkaround(codecName); codecNeedsEosOutputExceptionWorkaround = codecNeedsEosOutputExceptionWorkaround(codecName); codecNeedsMonoChannelCountWorkaround = codecNeedsMonoChannelCountWorkaround(codecName, codecFormat); codecNeedsEosPropagation = codecNeedsEosPropagationWorkaround(codecInfo) || getCodecNeedsEosPropagation(); if (getState() == STATE_STARTED) { codecHotswapDeadlineMs = SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS; } decoderCounters.decoderInitCount++; long elapsed = codecInitializedTimestamp - codecInitializingTimestamp; onCodecInitialized(codecName, codecInitializedTimestamp, elapsed); } private boolean shouldContinueRendering(long renderStartTimeMs) { return renderTimeLimitMs == C.TIME_UNSET || SystemClock.elapsedRealtime() - renderStartTimeMs < renderTimeLimitMs; } private void getCodecBuffers(MediaCodec codec) { if (Util.SDK_INT < 21) { inputBuffers = codec.getInputBuffers(); outputBuffers = codec.getOutputBuffers(); } } private void resetCodecBuffers() { if (Util.SDK_INT < 21) { inputBuffers = null; outputBuffers = null; } } private ByteBuffer getInputBuffer(int inputIndex) { if (Util.SDK_INT >= 21) { return codec.getInputBuffer(inputIndex); } else { return inputBuffers[inputIndex]; } } private ByteBuffer getOutputBuffer(int outputIndex) { if (Util.SDK_INT >= 21) { return codec.getOutputBuffer(outputIndex); } else { return outputBuffers[outputIndex]; } } private boolean hasOutputBuffer() { return outputIndex >= 0; } private void resetInputBuffer() { inputIndex = C.INDEX_UNSET; buffer.data = null; } private void resetOutputBuffer() { outputIndex = C.INDEX_UNSET; outputBuffer = null; } private void setSourceDrmSession(@Nullable DrmSession<FrameworkMediaCrypto> session) { DrmSession.replaceSession(sourceDrmSession, session); sourceDrmSession = session; } private void setCodecDrmSession(@Nullable DrmSession<FrameworkMediaCrypto> session) { DrmSession.replaceSession(codecDrmSession, session); codecDrmSession = session; } /** * @return Whether it may be possible to feed more input data. * @throws ExoPlaybackException If an error occurs feeding the input buffer. */ private boolean feedInputBuffer() throws ExoPlaybackException { if (codec == null || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM || inputStreamEnded) { return false; } if (inputIndex < 0) { inputIndex = codecAdapter.dequeueInputBufferIndex(); if (inputIndex < 0) { return false; } buffer.data = getInputBuffer(inputIndex); buffer.clear(); } if (codecDrainState == DRAIN_STATE_SIGNAL_END_OF_STREAM) { // We need to re-initialize the codec. Send an end of stream signal to the existing codec so // that it outputs any remaining buffers before we release it. if (codecNeedsEosPropagation) { // Do nothing. } else { codecReceivedEos = true; codecAdapter.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); resetInputBuffer(); } codecDrainState = DRAIN_STATE_WAIT_END_OF_STREAM; return false; } if (codecNeedsAdaptationWorkaroundBuffer) { codecNeedsAdaptationWorkaroundBuffer = false; buffer.data.put(ADAPTATION_WORKAROUND_BUFFER); codecAdapter.queueInputBuffer(inputIndex, 0, ADAPTATION_WORKAROUND_BUFFER.length, 0, 0); resetInputBuffer(); codecReceivedBuffers = true; return true; } int result; FormatHolder formatHolder = getFormatHolder(); int adaptiveReconfigurationBytes = 0; if (waitingForKeys) { // We've already read an encrypted sample into buffer, and are waiting for keys. result = C.RESULT_BUFFER_READ; } else { // For adaptive reconfiguration OMX decoders expect all reconfiguration data to be supplied // at the start of the buffer that also contains the first frame in the new format. if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) { for (int i = 0; i < codecFormat.initializationData.size(); i++) { byte[] data = codecFormat.initializationData.get(i); buffer.data.put(data); } codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING; } adaptiveReconfigurationBytes = buffer.data.position(); result = readSource(formatHolder, buffer, false); } if (hasReadStreamToEnd()) { // Notify output queue of the last buffer's timestamp. lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs; } if (result == C.RESULT_NOTHING_READ) { return false; } if (result == C.RESULT_FORMAT_READ) { if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // We received two formats in a row. Clear the current buffer of any reconfiguration data // associated with the first format. buffer.clear(); codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } onInputFormatChanged(formatHolder); return true; } // We've read a buffer. if (buffer.isEndOfStream()) { if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // We received a new format immediately before the end of the stream. We need to clear // the corresponding reconfiguration data from the current buffer, but re-write it into // a subsequent buffer if there are any (e.g. if the user seeks backwards). buffer.clear(); codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } inputStreamEnded = true; if (!codecReceivedBuffers) { processEndOfStream(); return false; } try { if (codecNeedsEosPropagation) { // Do nothing. } else { codecReceivedEos = true; codecAdapter.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); resetInputBuffer(); } } catch (CryptoException e) { throw createRendererException(e, inputFormat); } return false; } if (waitingForFirstSyncSample && !buffer.isKeyFrame()) { buffer.clear(); if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // The buffer we just cleared contained reconfiguration data. We need to re-write this // data into a subsequent buffer (if there is one). codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } return true; } waitingForFirstSyncSample = false; boolean bufferEncrypted = buffer.isEncrypted(); waitingForKeys = shouldWaitForKeys(bufferEncrypted); if (waitingForKeys) { return false; } if (codecNeedsDiscardToSpsWorkaround && !bufferEncrypted) { NalUnitUtil.discardToSps(buffer.data); if (buffer.data.position() == 0) { return true; } codecNeedsDiscardToSpsWorkaround = false; } try { long presentationTimeUs = buffer.timeUs; if (buffer.isDecodeOnly()) { decodeOnlyPresentationTimestamps.add(presentationTimeUs); } if (waitingForFirstSampleInFormat) { formatQueue.add(presentationTimeUs, inputFormat); waitingForFirstSampleInFormat = false; } largestQueuedPresentationTimeUs = Math.max(largestQueuedPresentationTimeUs, presentationTimeUs); buffer.flip(); if (buffer.hasSupplementalData()) { handleInputBufferSupplementalData(buffer); } onQueueInputBuffer(buffer); if (bufferEncrypted) { CryptoInfo cryptoInfo = buffer.cryptoInfo; cryptoInfo.increaseClearDataFirstSubSampleBy(adaptiveReconfigurationBytes); codecAdapter.queueSecureInputBuffer(inputIndex, 0, cryptoInfo, presentationTimeUs, 0); } else { codecAdapter.queueInputBuffer(inputIndex, 0, buffer.data.limit(), presentationTimeUs, 0); } resetInputBuffer(); codecReceivedBuffers = true; codecReconfigurationState = RECONFIGURATION_STATE_NONE; decoderCounters.inputBufferCount++; } catch (CryptoException e) { throw createRendererException(e, inputFormat); } return true; } private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { if (codecDrmSession == null || (!bufferEncrypted && codecDrmSession.playClearSamplesWithoutKeys())) { return false; } @DrmSession.State int drmSessionState = codecDrmSession.getState(); if (drmSessionState == DrmSession.STATE_ERROR) { throw createRendererException(codecDrmSession.getError(), inputFormat); } return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; } /** * Called when a {@link MediaCodec} has been created and configured. * <p> * The default implementation is a no-op. * * @param name The name of the codec that was initialized. * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization * finished. * @param initializationDurationMs The time taken to initialize the codec in milliseconds. */ protected void onCodecInitialized(String name, long initializedTimestampMs, long initializationDurationMs) { // Do nothing. } /** * Called when a new {@link Format} is read from the upstream {@link MediaPeriod}. * * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}. * @throws ExoPlaybackException If an error occurs re-initializing the {@link MediaCodec}. */ @SuppressWarnings("unchecked") protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { waitingForFirstSampleInFormat = true; Format newFormat = Assertions.checkNotNull(formatHolder.format); setSourceDrmSession((DrmSession<FrameworkMediaCrypto>) formatHolder.drmSession); inputFormat = newFormat; if (codec == null) { maybeInitCodec(); return; } // We have an existing codec that we may need to reconfigure or re-initialize. If the existing // codec instance is being kept then its operating rate may need to be updated. if ((sourceDrmSession == null && codecDrmSession != null) || (sourceDrmSession != null && codecDrmSession == null) || (sourceDrmSession != codecDrmSession && !codecInfo.secure && maybeRequiresSecureDecoder(sourceDrmSession, newFormat)) || (Util.SDK_INT < 23 && sourceDrmSession != codecDrmSession)) { // We might need to switch between the clear and protected output paths, or we're using DRM // prior to API level 23 where the codec needs to be re-initialized to switch to the new DRM // session. drainAndReinitializeCodec(); return; } switch (canKeepCodec(codec, codecInfo, codecFormat, newFormat)) { case KEEP_CODEC_RESULT_NO: drainAndReinitializeCodec(); break; case KEEP_CODEC_RESULT_YES_WITH_FLUSH: codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } else { drainAndFlushCodec(); } break; case KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION: if (codecNeedsReconfigureWorkaround) { drainAndReinitializeCodec(); } else { codecReconfigured = true; codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; codecNeedsAdaptationWorkaroundBuffer = codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_ALWAYS || (codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION && newFormat.width == codecFormat.width && newFormat.height == codecFormat.height); codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } } break; case KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION: codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } break; default: throw new IllegalStateException(); // Never happens. } } /** * Called when the output {@link MediaFormat} of the {@link MediaCodec} changes. * * <p>The default implementation is a no-op. * * @param codec The {@link MediaCodec} instance. * @param outputMediaFormat The new output {@link MediaFormat}. * @throws ExoPlaybackException Thrown if an error occurs handling the new output media format. */ protected void onOutputMediaFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) throws ExoPlaybackException { // Do nothing. } /** * Handles supplemental data associated with an input buffer. * * <p>The default implementation is a no-op. * * @param buffer The input buffer that is about to be queued. * @throws ExoPlaybackException Thrown if an error occurs handling supplemental data. */ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) throws ExoPlaybackException { // Do nothing. } /** * Called immediately before an input buffer is queued into the codec. * * <p>The default implementation is a no-op. * * @param buffer The buffer to be queued. */ protected void onQueueInputBuffer(DecoderInputBuffer buffer) { // Do nothing. } /** * Called when an output buffer is successfully processed. * * @param presentationTimeUs The timestamp associated with the output buffer. */ @CallSuper protected void onProcessedOutputBuffer(long presentationTimeUs) { while (pendingOutputStreamOffsetCount != 0 && presentationTimeUs >= pendingOutputStreamSwitchTimesUs[0]) { outputStreamOffsetUs = pendingOutputStreamOffsetsUs[0]; pendingOutputStreamOffsetCount--; System.arraycopy( pendingOutputStreamOffsetsUs, /* srcPos= */ 1, pendingOutputStreamOffsetsUs, /* destPos= */ 0, pendingOutputStreamOffsetCount); System.arraycopy( pendingOutputStreamSwitchTimesUs, /* srcPos= */ 1, pendingOutputStreamSwitchTimesUs, /* destPos= */ 0, pendingOutputStreamOffsetCount); onProcessedStreamChange(); } } /** Called after the last output buffer before a stream change has been processed. */ protected void onProcessedStreamChange() { // Do nothing. } /** * Determines whether the existing {@link MediaCodec} can be kept for a new {@link Format}, and if * it can whether it requires reconfiguration. * * <p>The default implementation returns {@link #KEEP_CODEC_RESULT_NO}. * * @param codec The existing {@link MediaCodec} instance. * @param codecInfo A {@link MediaCodecInfo} describing the decoder. * @param oldFormat The {@link Format} for which the existing instance is configured. * @param newFormat The new {@link Format}. * @return Whether the instance can be kept, and if it can whether it requires reconfiguration. */ protected @KeepCodecResult int canKeepCodec( MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { return KEEP_CODEC_RESULT_NO; } @Override public boolean isEnded() { return outputStreamEnded; } @Override public boolean isReady() { return inputFormat != null && !waitingForKeys && (isSourceReady() || hasOutputBuffer() || (codecHotswapDeadlineMs != C.TIME_UNSET && SystemClock.elapsedRealtime() < codecHotswapDeadlineMs)); } /** * Returns the {@link MediaFormat#KEY_OPERATING_RATE} value for a given renderer operating rate, * current {@link Format} and set of possible stream formats. * * <p>The default implementation returns {@link #CODEC_OPERATING_RATE_UNSET}. * * @param operatingRate The renderer operating rate. * @param format The {@link Format} for which the codec is being configured. * @param streamFormats The possible stream formats. * @return The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if no codec operating * rate should be set. */ protected float getCodecOperatingRateV23( float operatingRate, Format format, Format[] streamFormats) { return CODEC_OPERATING_RATE_UNSET; } /** * Updates the codec operating rate. * * @throws ExoPlaybackException If an error occurs releasing or initializing a codec. */ private void updateCodecOperatingRate() throws ExoPlaybackException { if (Util.SDK_INT < 23) { return; } float newCodecOperatingRate = getCodecOperatingRateV23(rendererOperatingRate, codecFormat, getStreamFormats()); if (codecOperatingRate == newCodecOperatingRate) { // No change. } else if (newCodecOperatingRate == CODEC_OPERATING_RATE_UNSET) { // The only way to clear the operating rate is to instantiate a new codec instance. See // [Internal ref: b/71987865]. drainAndReinitializeCodec(); } else if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET || newCodecOperatingRate > assumedMinimumCodecOperatingRate) { // We need to set the operating rate, either because we've set it previously or because it's // above the assumed minimum rate. Bundle codecParameters = new Bundle(); codecParameters.putFloat(MediaFormat.KEY_OPERATING_RATE, newCodecOperatingRate); codec.setParameters(codecParameters); codecOperatingRate = newCodecOperatingRate; } } /** Starts draining the codec for flush. */ private void drainAndFlushCodec() { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_FLUSH; } } /** * Starts draining the codec to update its DRM session. The update may occur immediately if no * buffers have been queued to the codec. * * @throws ExoPlaybackException If an error occurs updating the codec's DRM session. */ private void drainAndUpdateCodecDrmSession() throws ExoPlaybackException { if (Util.SDK_INT < 23) { // The codec needs to be re-initialized to switch to the source DRM session. drainAndReinitializeCodec(); return; } if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_UPDATE_DRM_SESSION; } else { // Nothing has been queued to the decoder, so we can do the update immediately. updateDrmSessionOrReinitializeCodecV23(); } } /** * Starts draining the codec for re-initialization. Re-initialization may occur immediately if no * buffers have been queued to the codec. * * @throws ExoPlaybackException If an error occurs re-initializing a codec. */ private void drainAndReinitializeCodec() throws ExoPlaybackException { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_REINITIALIZE; } else { // Nothing has been queued to the decoder, so we can re-initialize immediately. reinitializeCodec(); } } /** * @return Whether it may be possible to drain more output data. * @throws ExoPlaybackException If an error occurs draining the output buffer. */ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (!hasOutputBuffer()) { int outputIndex; if (codecNeedsEosOutputExceptionWorkaround && codecReceivedEos) { try { outputIndex = codecAdapter.dequeueOutputBufferIndex(outputBufferInfo); } catch (IllegalStateException e) { processEndOfStream(); if (outputStreamEnded) { // Release the codec, as it's in an error state. releaseCodec(); } return false; } } else { outputIndex = codecAdapter.dequeueOutputBufferIndex(outputBufferInfo); } if (outputIndex < 0) { if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED /* (-2) */) { processOutputMediaFormat(); return true; } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED /* (-3) */) { processOutputBuffersChanged(); return true; } /* MediaCodec.INFO_TRY_AGAIN_LATER (-1) or unknown negative return value */ if (codecNeedsEosPropagation && (inputStreamEnded || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM)) { processEndOfStream(); } return false; } // We've dequeued a buffer. if (shouldSkipAdaptationWorkaroundOutputBuffer) { shouldSkipAdaptationWorkaroundOutputBuffer = false; codec.releaseOutputBuffer(outputIndex, false); return true; } else if (outputBufferInfo.size == 0 && (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // The dequeued buffer indicates the end of the stream. Process it immediately. processEndOfStream(); return false; } this.outputIndex = outputIndex; outputBuffer = getOutputBuffer(outputIndex); // The dequeued buffer is a media buffer. Do some initial setup. // It will be processed by calling processOutputBuffer (possibly multiple times). if (outputBuffer != null) { outputBuffer.position(outputBufferInfo.offset); outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size); } isDecodeOnlyOutputBuffer = isDecodeOnlyBuffer(outputBufferInfo.presentationTimeUs); isLastOutputBuffer = lastBufferInStreamPresentationTimeUs == outputBufferInfo.presentationTimeUs; updateOutputFormatForTime(outputBufferInfo.presentationTimeUs); } boolean processedOutputBuffer; if (codecNeedsEosOutputExceptionWorkaround && codecReceivedEos) { try { processedOutputBuffer = processOutputBuffer( positionUs, elapsedRealtimeUs, codec, outputBuffer, outputIndex, outputBufferInfo.flags, /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, outputFormat); } catch (IllegalStateException e) { processEndOfStream(); if (outputStreamEnded) { // Release the codec, as it's in an error state. releaseCodec(); } return false; } } else { processedOutputBuffer = processOutputBuffer( positionUs, elapsedRealtimeUs, codec, outputBuffer, outputIndex, outputBufferInfo.flags, /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, outputFormat); } if (processedOutputBuffer) { onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs); boolean isEndOfStream = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; resetOutputBuffer(); if (!isEndOfStream) { return true; } processEndOfStream(); } return false; } /** Processes a new output {@link MediaFormat}. */ private void processOutputMediaFormat() throws ExoPlaybackException { MediaFormat mediaFormat = codecAdapter.getOutputFormat(); if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER && mediaFormat.getInteger(MediaFormat.KEY_WIDTH) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT && mediaFormat.getInteger(MediaFormat.KEY_HEIGHT) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT) { // We assume this format changed event was caused by the adaptation workaround. shouldSkipAdaptationWorkaroundOutputBuffer = true; return; } if (codecNeedsMonoChannelCountWorkaround) { mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); } onOutputMediaFormatChanged(codec, mediaFormat); } /** * Processes a change in the output buffers. */ private void processOutputBuffersChanged() { if (Util.SDK_INT < 21) { outputBuffers = codec.getOutputBuffers(); } } /** * Processes an output media buffer. * * <p>When a new {@link ByteBuffer} is passed to this method its position and limit delineate the * data to be processed. The return value indicates whether the buffer was processed in full. If * true is returned then the next call to this method will receive a new buffer to be processed. * If false is returned then the same buffer will be passed to the next call. An implementation of * this method is free to modify the buffer and can assume that the buffer will not be externally * modified between successive calls. Hence an implementation can, for example, modify the * buffer's position to keep track of how much of the data it has processed. * * <p>Note that the first call to this method following a call to {@link #onPositionReset(long, * boolean)} will always receive a new {@link ByteBuffer} to be processed. * * @param positionUs The current media time in microseconds, measured at the start of the current * iteration of the rendering loop. * @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at the * start of the current iteration of the rendering loop. * @param codec The {@link MediaCodec} instance. * @param buffer The output buffer to process. * @param bufferIndex The index of the output buffer. * @param bufferFlags The flags attached to the output buffer. * @param sampleCount The number of samples extracted from the sample queue in the buffer. This * allows handling multiple samples as a batch for efficiency. * @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds. * @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY} * by the source. * @param isLastBuffer Whether the buffer is the last sample of the current stream. * @param format The {@link Format} associated with the buffer. * @return Whether the output buffer was fully processed (e.g. rendered or skipped). * @throws ExoPlaybackException If an error occurs processing the output buffer. */ protected abstract boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, MediaCodec codec, ByteBuffer buffer, int bufferIndex, int bufferFlags, int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, Format format) throws ExoPlaybackException; /** * Incrementally renders any remaining output. * <p> * The default implementation is a no-op. * * @throws ExoPlaybackException Thrown if an error occurs rendering remaining output. */ protected void renderToEndOfStream() throws ExoPlaybackException { // Do nothing. } /** * Processes an end of stream signal. * * @throws ExoPlaybackException If an error occurs processing the signal. */ @TargetApi(23) // codecDrainAction == DRAIN_ACTION_UPDATE_DRM_SESSION implies SDK_INT >= 23. private void processEndOfStream() throws ExoPlaybackException { switch (codecDrainAction) { case DRAIN_ACTION_REINITIALIZE: reinitializeCodec(); break; case DRAIN_ACTION_UPDATE_DRM_SESSION: updateDrmSessionOrReinitializeCodecV23(); break; case DRAIN_ACTION_FLUSH: flushOrReinitializeCodec(); break; case DRAIN_ACTION_NONE: default: outputStreamEnded = true; renderToEndOfStream(); break; } } /** * Notifies the renderer that output end of stream is pending and should be handled on the next * render. */ protected final void setPendingOutputEndOfStream() { pendingOutputEndOfStream = true; } /** Returns the largest queued input presentation time, in microseconds. */ protected final long getLargestQueuedPresentationTimeUs() { return largestQueuedPresentationTimeUs; } /** * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link * #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, int, long, boolean, boolean, * Format)} to get the playback position with respect to the media. */ protected final long getOutputStreamOffsetUs() { return outputStreamOffsetUs; } private void reinitializeCodec() throws ExoPlaybackException { releaseCodec(); maybeInitCodec(); } private boolean isDecodeOnlyBuffer(long presentationTimeUs) { // We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would // box presentationTimeUs, creating a Long object that would need to be garbage collected. int size = decodeOnlyPresentationTimestamps.size(); for (int i = 0; i < size; i++) { if (decodeOnlyPresentationTimestamps.get(i) == presentationTimeUs) { decodeOnlyPresentationTimestamps.remove(i); return true; } } return false; } @RequiresApi(23) private void updateDrmSessionOrReinitializeCodecV23() throws ExoPlaybackException { @Nullable FrameworkMediaCrypto sessionMediaCrypto = sourceDrmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { // We'd only expect this to happen if the CDM from which the pending session is obtained needs // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme // to another, where the new CDM hasn't been used before and needs provisioning). It would be // possible to handle this case more efficiently (i.e. with a new renderer state that waits // for provisioning to finish and then calls mediaCrypto.setMediaDrmSession), but the extra // complexity is not warranted given how unlikely the case is to occur. reinitializeCodec(); return; } if (C.PLAYREADY_UUID.equals(sessionMediaCrypto.uuid)) { // The PlayReady CDM does not implement setMediaDrmSession. // TODO: Add API check once [Internal ref: b/128835874] is fixed. reinitializeCodec(); return; } if (flushOrReinitializeCodec()) { // The codec was reinitialized. The new codec will be using the new DRM session, so there's // nothing more to do. return; } try { mediaCrypto.setMediaDrmSession(sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { throw createRendererException(e, inputFormat); } setCodecDrmSession(sourceDrmSession); codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; } /** * Returns whether a {@link DrmSession} may require a secure decoder for a given {@link Format}. * * @param drmSession The {@link DrmSession}. * @param format The {@link Format}. * @return Whether a secure decoder may be required. */ private static boolean maybeRequiresSecureDecoder( DrmSession<FrameworkMediaCrypto> drmSession, Format format) { @Nullable FrameworkMediaCrypto sessionMediaCrypto = drmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { // We'd only expect this to happen if the CDM from which the pending session is obtained needs // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme // to another, where the new CDM hasn't been used before and needs provisioning). Assume that // a secure decoder may be required. return true; } if (sessionMediaCrypto.forceAllowInsecureDecoderComponents) { return false; } MediaCrypto mediaCrypto; try { mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { // This shouldn't happen, but if it does then assume that a secure decoder may be required. return true; } try { return mediaCrypto.requiresSecureDecoderComponent(format.sampleMimeType); } finally { mediaCrypto.release(); } } private static boolean isMediaCodecException(IllegalStateException error) { if (Util.SDK_INT >= 21 && isMediaCodecExceptionV21(error)) { return true; } StackTraceElement[] stackTrace = error.getStackTrace(); return stackTrace.length > 0 && stackTrace[0].getClassName().equals("android.media.MediaCodec"); } @RequiresApi(21) private static boolean isMediaCodecExceptionV21(IllegalStateException error) { return error instanceof MediaCodec.CodecException; } /** * Returns whether the decoder is known to fail when flushed. * <p> * If true is returned, the renderer will work around the issue by releasing the decoder and * instantiating a new one rather than flushing the current instance. * <p> * See [Internal: b/8347958, b/8543366]. * * @param name The name of the decoder. * @return True if the decoder is known to fail when flushed. */ private static boolean codecNeedsFlushWorkaround(String name) { return Util.SDK_INT < 18 || (Util.SDK_INT == 18 && ("OMX.SEC.avc.dec".equals(name) || "OMX.SEC.avc.dec.secure".equals(name))) || (Util.SDK_INT == 19 && Util.MODEL.startsWith("SM-G800") && ("OMX.Exynos.avc.dec".equals(name) || "OMX.Exynos.avc.dec.secure".equals(name))); } /** * Returns a mode that specifies when the adaptation workaround should be enabled. * * <p>When enabled, the workaround queues and discards a blank frame with a resolution whose width * and height both equal {@link #ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT}, to reset the decoder's * internal state when a format change occurs. * * <p>See [Internal: b/27807182]. See <a * href="https://github.com/google/ExoPlayer/issues/3257">GitHub issue #3257</a>. * * @param name The name of the decoder. * @return The mode specifying when the adaptation workaround should be enabled. */ private @AdaptationWorkaroundMode int codecAdaptationWorkaroundMode(String name) { if (Util.SDK_INT <= 25 && "OMX.Exynos.avc.dec.secure".equals(name) && (Util.MODEL.startsWith("SM-T585") || Util.MODEL.startsWith("SM-A510") || Util.MODEL.startsWith("SM-A520") || Util.MODEL.startsWith("SM-J700"))) { return ADAPTATION_WORKAROUND_MODE_ALWAYS; } else if (Util.SDK_INT < 24 && ("OMX.Nvidia.h264.decode".equals(name) || "OMX.Nvidia.h264.decode.secure".equals(name)) && ("flounder".equals(Util.DEVICE) || "flounder_lte".equals(Util.DEVICE) || "grouper".equals(Util.DEVICE) || "tilapia".equals(Util.DEVICE))) { return ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION; } else { return ADAPTATION_WORKAROUND_MODE_NEVER; } } /** * Returns whether the decoder is known to fail when an attempt is made to reconfigure it with a * new format's configuration data. * * <p>When enabled, the workaround will always release and recreate the decoder, rather than * attempting to reconfigure the existing instance. * * @param name The name of the decoder. * @return True if the decoder is known to fail when an attempt is made to reconfigure it with a * new format's configuration data. */ private static boolean codecNeedsReconfigureWorkaround(String name) { return Util.MODEL.startsWith("SM-T230") && "OMX.MARVELL.VIDEO.HW.CODA7542DECODER".equals(name); } /** * Returns whether the decoder is an H.264/AVC decoder known to fail if NAL units are queued * before the codec specific data. * * <p>If true is returned, the renderer will work around the issue by discarding data up to the * SPS. * * @param name The name of the decoder. * @param format The {@link Format} used to configure the decoder. * @return True if the decoder is known to fail if NAL units are queued before CSD. */ private static boolean codecNeedsDiscardToSpsWorkaround(String name, Format format) { return Util.SDK_INT < 21 && format.initializationData.isEmpty() && "OMX.MTK.VIDEO.DECODER.AVC".equals(name); } /** * Returns whether the decoder is known to handle the propagation of the {@link * MediaCodec#BUFFER_FLAG_END_OF_STREAM} flag incorrectly on the host device. * * <p>If true is returned, the renderer will work around the issue by approximating end of stream * behavior without relying on the flag being propagated through to an output buffer by the * underlying decoder. * * @param codecInfo Information about the {@link MediaCodec}. * @return True if the decoder is known to handle {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} * propagation incorrectly on the host device. False otherwise. */ private static boolean codecNeedsEosPropagationWorkaround(MediaCodecInfo codecInfo) { String name = codecInfo.name; return (Util.SDK_INT <= 25 && "OMX.rk.video_decoder.avc".equals(name)) || (Util.SDK_INT <= 17 && "OMX.allwinner.video.decoder.avc".equals(name)) || ("Amazon".equals(Util.MANUFACTURER) && "AFTS".equals(Util.MODEL) && codecInfo.secure); } /** * Returns whether the decoder is known to behave incorrectly if flushed after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. * <p> * If true is returned, the renderer will work around the issue by instantiating a new decoder * when this case occurs. * <p> * See [Internal: b/8578467, b/23361053]. * * @param name The name of the decoder. * @return True if the decoder is known to behave incorrectly if flushed after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. False otherwise. */ private static boolean codecNeedsEosFlushWorkaround(String name) { return (Util.SDK_INT <= 23 && "OMX.google.vorbis.decoder".equals(name)) || (Util.SDK_INT <= 19 && ("hb2000".equals(Util.DEVICE) || "stvm8".equals(Util.DEVICE)) && ("OMX.amlogic.avc.decoder.awesome".equals(name) || "OMX.amlogic.avc.decoder.awesome.secure".equals(name))); } /** * Returns whether the decoder may throw an {@link IllegalStateException} from * {@link MediaCodec#dequeueOutputBuffer(MediaCodec.BufferInfo, long)} or * {@link MediaCodec#releaseOutputBuffer(int, boolean)} after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. * <p> * See [Internal: b/17933838]. * * @param name The name of the decoder. * @return True if the decoder may throw an exception after receiving an end-of-stream buffer. */ private static boolean codecNeedsEosOutputExceptionWorkaround(String name) { return Util.SDK_INT == 21 && "OMX.google.aac.decoder".equals(name); } /** * Returns whether the decoder is known to set the number of audio channels in the output {@link * Format} to 2 for the given input {@link Format}, whilst only actually outputting a single * channel. * * <p>If true is returned then we explicitly override the number of channels in the output {@link * Format}, setting it to 1. * * @param name The decoder name. * @param format The input {@link Format}. * @return True if the decoder is known to set the number of audio channels in the output {@link * Format} to 2 for the given input {@link Format}, whilst only actually outputting a single * channel. False otherwise. */ private static boolean codecNeedsMonoChannelCountWorkaround(String name, Format format) { return Util.SDK_INT <= 18 && format.channelCount == 1 && "OMX.MTK.AUDIO.DECODER.MP3".equals(name); } }
library/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.mediacodec; import android.annotation.TargetApi; import android.media.MediaCodec; import android.media.MediaCodec.CodecException; import android.media.MediaCodec.CryptoException; import android.media.MediaCrypto; import android.media.MediaCryptoException; import android.media.MediaFormat; import android.os.Bundle; import android.os.SystemClock; import androidx.annotation.CallSuper; import androidx.annotation.CheckResult; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import com.google.android.exoplayer2.BaseRenderer; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.decoder.CryptoInfo; import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.drm.DrmSession; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.NalUnitUtil; import com.google.android.exoplayer2.util.TimedValueQueue; import com.google.android.exoplayer2.util.TraceUtil; import com.google.android.exoplayer2.util.Util; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.List; /** * An abstract renderer that uses {@link MediaCodec} to decode samples for rendering. */ public abstract class MediaCodecRenderer extends BaseRenderer { /** * The modes to operate the {@link MediaCodec}. * * <p>Allowed values: * * <ul> * <li>{@link #OPERATION_MODE_SYNCHRONOUS} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK} * </ul> */ @Documented @Retention(RetentionPolicy.SOURCE) @Target({ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) @IntDef({ OPERATION_MODE_SYNCHRONOUS, OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING, OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING }) public @interface MediaCodecOperationMode {} /** Operates the {@link MediaCodec} in synchronous mode. */ public static final int OPERATION_MODE_SYNCHRONOUS = 0; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to the playback thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD = 1; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to a dedicated thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD = 2; /** * Operates the {@link MediaCodec} in asynchronous mode and routes {@link MediaCodec.Callback} * callbacks to a dedicated thread. Uses granular locking for input and output buffers. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK = 3; /** * Same as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD}, and offloads queueing to another * thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING = 4; /** * Same as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK}, and offloads queueing * to another thread. */ public static final int OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING = 5; /** Thrown when a failure occurs instantiating a decoder. */ public static class DecoderInitializationException extends Exception { private static final int CUSTOM_ERROR_CODE_BASE = -50000; private static final int NO_SUITABLE_DECODER_ERROR = CUSTOM_ERROR_CODE_BASE + 1; private static final int DECODER_QUERY_ERROR = CUSTOM_ERROR_CODE_BASE + 2; /** * The mime type for which a decoder was being initialized. */ public final String mimeType; /** * Whether it was required that the decoder support a secure output path. */ public final boolean secureDecoderRequired; /** * The {@link MediaCodecInfo} of the decoder that failed to initialize. Null if no suitable * decoder was found. */ @Nullable public final MediaCodecInfo codecInfo; /** An optional developer-readable diagnostic information string. May be null. */ @Nullable public final String diagnosticInfo; /** * If the decoder failed to initialize and another decoder being used as a fallback also failed * to initialize, the {@link DecoderInitializationException} for the fallback decoder. Null if * there was no fallback decoder or no suitable decoders were found. */ @Nullable public final DecoderInitializationException fallbackDecoderInitializationException; public DecoderInitializationException( Format format, @Nullable Throwable cause, boolean secureDecoderRequired, int errorCode) { this( "Decoder init failed: [" + errorCode + "], " + format, cause, format.sampleMimeType, secureDecoderRequired, /* mediaCodecInfo= */ null, buildCustomDiagnosticInfo(errorCode), /* fallbackDecoderInitializationException= */ null); } public DecoderInitializationException( Format format, @Nullable Throwable cause, boolean secureDecoderRequired, MediaCodecInfo mediaCodecInfo) { this( "Decoder init failed: " + mediaCodecInfo.name + ", " + format, cause, format.sampleMimeType, secureDecoderRequired, mediaCodecInfo, Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null, /* fallbackDecoderInitializationException= */ null); } private DecoderInitializationException( String message, @Nullable Throwable cause, String mimeType, boolean secureDecoderRequired, @Nullable MediaCodecInfo mediaCodecInfo, @Nullable String diagnosticInfo, @Nullable DecoderInitializationException fallbackDecoderInitializationException) { super(message, cause); this.mimeType = mimeType; this.secureDecoderRequired = secureDecoderRequired; this.codecInfo = mediaCodecInfo; this.diagnosticInfo = diagnosticInfo; this.fallbackDecoderInitializationException = fallbackDecoderInitializationException; } @CheckResult private DecoderInitializationException copyWithFallbackException( DecoderInitializationException fallbackException) { return new DecoderInitializationException( getMessage(), getCause(), mimeType, secureDecoderRequired, codecInfo, diagnosticInfo, fallbackException); } @RequiresApi(21) @Nullable private static String getDiagnosticInfoV21(@Nullable Throwable cause) { if (cause instanceof CodecException) { return ((CodecException) cause).getDiagnosticInfo(); } return null; } private static String buildCustomDiagnosticInfo(int errorCode) { String sign = errorCode < 0 ? "neg_" : ""; return "com.google.android.exoplayer2.mediacodec.MediaCodecRenderer_" + sign + Math.abs(errorCode); } } /** Thrown when a failure occurs in the decoder. */ public static class DecoderException extends Exception { /** The {@link MediaCodecInfo} of the decoder that failed. Null if unknown. */ @Nullable public final MediaCodecInfo codecInfo; /** An optional developer-readable diagnostic information string. May be null. */ @Nullable public final String diagnosticInfo; public DecoderException(Throwable cause, @Nullable MediaCodecInfo codecInfo) { super("Decoder failed: " + (codecInfo == null ? null : codecInfo.name), cause); this.codecInfo = codecInfo; diagnosticInfo = Util.SDK_INT >= 21 ? getDiagnosticInfoV21(cause) : null; } @RequiresApi(21) private static String getDiagnosticInfoV21(Throwable cause) { if (cause instanceof CodecException) { return ((CodecException) cause).getDiagnosticInfo(); } return null; } } /** Indicates no codec operating rate should be set. */ protected static final float CODEC_OPERATING_RATE_UNSET = -1; private static final String TAG = "MediaCodecRenderer"; /** * If the {@link MediaCodec} is hotswapped (i.e. replaced during playback), this is the period of * time during which {@link #isReady()} will report true regardless of whether the new codec has * output frames that are ready to be rendered. * <p> * This allows codec hotswapping to be performed seamlessly, without interrupting the playback of * other renderers, provided the new codec is able to decode some frames within this time period. */ private static final long MAX_CODEC_HOTSWAP_TIME_MS = 1000; // Generally there is zero or one pending output stream offset. We track more offsets to allow for // pending output streams that have fewer frames than the codec latency. private static final int MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT = 10; /** * The possible return values for {@link #canKeepCodec(MediaCodec, MediaCodecInfo, Format, * Format)}. */ @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ KEEP_CODEC_RESULT_NO, KEEP_CODEC_RESULT_YES_WITH_FLUSH, KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION, KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION }) protected @interface KeepCodecResult {} /** The codec cannot be kept. */ protected static final int KEEP_CODEC_RESULT_NO = 0; /** The codec can be kept, but must be flushed. */ protected static final int KEEP_CODEC_RESULT_YES_WITH_FLUSH = 1; /** * The codec can be kept. It does not need to be flushed, but must be reconfigured by prefixing * the next input buffer with the new format's configuration data. */ protected static final int KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION = 2; /** The codec can be kept. It does not need to be flushed and no reconfiguration is required. */ protected static final int KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION = 3; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ RECONFIGURATION_STATE_NONE, RECONFIGURATION_STATE_WRITE_PENDING, RECONFIGURATION_STATE_QUEUE_PENDING }) private @interface ReconfigurationState {} /** * There is no pending adaptive reconfiguration work. */ private static final int RECONFIGURATION_STATE_NONE = 0; /** * Codec configuration data needs to be written into the next buffer. */ private static final int RECONFIGURATION_STATE_WRITE_PENDING = 1; /** * Codec configuration data has been written into the next buffer, but that buffer still needs to * be returned to the codec. */ private static final int RECONFIGURATION_STATE_QUEUE_PENDING = 2; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({DRAIN_STATE_NONE, DRAIN_STATE_SIGNAL_END_OF_STREAM, DRAIN_STATE_WAIT_END_OF_STREAM}) private @interface DrainState {} /** The codec is not being drained. */ private static final int DRAIN_STATE_NONE = 0; /** The codec needs to be drained, but we haven't signaled an end of stream to it yet. */ private static final int DRAIN_STATE_SIGNAL_END_OF_STREAM = 1; /** The codec needs to be drained, and we're waiting for it to output an end of stream. */ private static final int DRAIN_STATE_WAIT_END_OF_STREAM = 2; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ DRAIN_ACTION_NONE, DRAIN_ACTION_FLUSH, DRAIN_ACTION_UPDATE_DRM_SESSION, DRAIN_ACTION_REINITIALIZE }) private @interface DrainAction {} /** No special action should be taken. */ private static final int DRAIN_ACTION_NONE = 0; /** The codec should be flushed. */ private static final int DRAIN_ACTION_FLUSH = 1; /** The codec should be flushed and updated to use the pending DRM session. */ private static final int DRAIN_ACTION_UPDATE_DRM_SESSION = 2; /** The codec should be reinitialized. */ private static final int DRAIN_ACTION_REINITIALIZE = 3; @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ ADAPTATION_WORKAROUND_MODE_NEVER, ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION, ADAPTATION_WORKAROUND_MODE_ALWAYS }) private @interface AdaptationWorkaroundMode {} /** * The adaptation workaround is never used. */ private static final int ADAPTATION_WORKAROUND_MODE_NEVER = 0; /** * The adaptation workaround is used when adapting between formats of the same resolution only. */ private static final int ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION = 1; /** * The adaptation workaround is always used when adapting between formats. */ private static final int ADAPTATION_WORKAROUND_MODE_ALWAYS = 2; /** * H.264/AVC buffer to queue when using the adaptation workaround (see {@link * #codecAdaptationWorkaroundMode(String)}. Consists of three NAL units with start codes: Baseline * sequence/picture parameter sets and a 32 * 32 pixel IDR slice. This stream can be queued to * force a resolution change when adapting to a new format. */ private static final byte[] ADAPTATION_WORKAROUND_BUFFER = new byte[] { 0, 0, 1, 103, 66, -64, 11, -38, 37, -112, 0, 0, 1, 104, -50, 15, 19, 32, 0, 0, 1, 101, -120, -124, 13, -50, 113, 24, -96, 0, 47, -65, 28, 49, -61, 39, 93, 120 }; private static final int ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT = 32; private final MediaCodecSelector mediaCodecSelector; private final boolean enableDecoderFallback; private final float assumedMinimumCodecOperatingRate; private final DecoderInputBuffer buffer; private final DecoderInputBuffer flagsOnlyBuffer; private final TimedValueQueue<Format> formatQueue; private final ArrayList<Long> decodeOnlyPresentationTimestamps; private final MediaCodec.BufferInfo outputBufferInfo; private final long[] pendingOutputStreamOffsetsUs; private final long[] pendingOutputStreamSwitchTimesUs; @Nullable private Format inputFormat; private Format outputFormat; @Nullable private DrmSession<FrameworkMediaCrypto> codecDrmSession; @Nullable private DrmSession<FrameworkMediaCrypto> sourceDrmSession; @Nullable private MediaCrypto mediaCrypto; private boolean mediaCryptoRequiresSecureDecoder; private long renderTimeLimitMs; private float rendererOperatingRate; @Nullable private MediaCodec codec; @Nullable private MediaCodecAdapter codecAdapter; @Nullable private Format codecFormat; private float codecOperatingRate; @Nullable private ArrayDeque<MediaCodecInfo> availableCodecInfos; @Nullable private DecoderInitializationException preferredDecoderInitializationException; @Nullable private MediaCodecInfo codecInfo; @AdaptationWorkaroundMode private int codecAdaptationWorkaroundMode; private boolean codecNeedsReconfigureWorkaround; private boolean codecNeedsDiscardToSpsWorkaround; private boolean codecNeedsFlushWorkaround; private boolean codecNeedsEosFlushWorkaround; private boolean codecNeedsEosOutputExceptionWorkaround; private boolean codecNeedsMonoChannelCountWorkaround; private boolean codecNeedsAdaptationWorkaroundBuffer; private boolean shouldSkipAdaptationWorkaroundOutputBuffer; private boolean codecNeedsEosPropagation; private ByteBuffer[] inputBuffers; private ByteBuffer[] outputBuffers; private long codecHotswapDeadlineMs; private int inputIndex; private int outputIndex; private ByteBuffer outputBuffer; private boolean isDecodeOnlyOutputBuffer; private boolean isLastOutputBuffer; private boolean codecReconfigured; @ReconfigurationState private int codecReconfigurationState; @DrainState private int codecDrainState; @DrainAction private int codecDrainAction; private boolean codecReceivedBuffers; private boolean codecReceivedEos; private long largestQueuedPresentationTimeUs; private long lastBufferInStreamPresentationTimeUs; private boolean inputStreamEnded; private boolean outputStreamEnded; private boolean waitingForKeys; private boolean waitingForFirstSyncSample; private boolean waitingForFirstSampleInFormat; private boolean pendingOutputEndOfStream; @MediaCodecOperationMode private int mediaCodecOperationMode; protected DecoderCounters decoderCounters; private long outputStreamOffsetUs; private int pendingOutputStreamOffsetCount; /** * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*} * constants defined in {@link C}. * @param mediaCodecSelector A decoder selector. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is less efficient or slower * than the primary decoder. * @param assumedMinimumCodecOperatingRate A codec operating rate that all codecs instantiated by * this renderer are assumed to meet implicitly (i.e. without the operating rate being set * explicitly using {@link MediaFormat#KEY_OPERATING_RATE}). */ public MediaCodecRenderer( int trackType, MediaCodecSelector mediaCodecSelector, boolean enableDecoderFallback, float assumedMinimumCodecOperatingRate) { super(trackType); this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector); this.enableDecoderFallback = enableDecoderFallback; this.assumedMinimumCodecOperatingRate = assumedMinimumCodecOperatingRate; buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED); flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); formatQueue = new TimedValueQueue<>(); decodeOnlyPresentationTimestamps = new ArrayList<>(); outputBufferInfo = new MediaCodec.BufferInfo(); rendererOperatingRate = 1f; renderTimeLimitMs = C.TIME_UNSET; mediaCodecOperationMode = OPERATION_MODE_SYNCHRONOUS; pendingOutputStreamOffsetsUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; pendingOutputStreamSwitchTimesUs = new long[MAX_PENDING_OUTPUT_STREAM_OFFSET_COUNT]; outputStreamOffsetUs = C.TIME_UNSET; resetCodecStateForRelease(); } /** * Set a limit on the time a single {@link #render(long, long)} call can spend draining and * filling the decoder. * * <p>This method is experimental, and will be renamed or removed in a future release. It should * only be called before the renderer is used. * * @param renderTimeLimitMs The render time limit in milliseconds, or {@link C#TIME_UNSET} for no * limit. */ public void experimental_setRenderTimeLimitMs(long renderTimeLimitMs) { this.renderTimeLimitMs = renderTimeLimitMs; } /** * Set the mode of operation of the underlying {@link MediaCodec}. * * <p>This method is experimental, and will be renamed or removed in a future release. It should * only be called before the renderer is used. * * @param mode The mode of the MediaCodec. The supported modes are: * <ul> * <li>{@link #OPERATION_MODE_SYNCHRONOUS}: The {@link MediaCodec} will operate in * synchronous mode. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD}: The {@link MediaCodec} will * operate in asynchronous mode and {@link MediaCodec.Callback} callbacks will be routed * to the playback thread. This mode requires API level &ge; 21; if the API level is * &le; 20, the operation mode will be set to {@link * MediaCodecRenderer#OPERATION_MODE_SYNCHRONOUS}. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD}: The {@link MediaCodec} will * operate in asynchronous mode and {@link MediaCodec.Callback} callbacks will be routed * to a dedicated thread. This mode requires API level &ge; 23; if the API level is &le; * 22, the operation mode will be set to {@link #OPERATION_MODE_SYNCHRONOUS}. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK}: Same as {@link * #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} and, in addition, input buffers will * submitted to the {@link MediaCodec} in a separate thread. * <li>{@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING}: Same as * {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD} and, in addition, input buffers * will be submitted to the {@link MediaCodec} in a separate thread. * <li>{@link * #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING}: Same * as {@link #OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK} and, in addition, * input buffers will be submitted to the {@link MediaCodec} in a separate thread. * </ul> * By default, the operation mode is set to {@link * MediaCodecRenderer#OPERATION_MODE_SYNCHRONOUS}. */ public void experimental_setMediaCodecOperationMode(@MediaCodecOperationMode int mode) { mediaCodecOperationMode = mode; } @Override @AdaptiveSupport public final int supportsMixedMimeTypeAdaptation() { return ADAPTIVE_NOT_SEAMLESS; } @Override @Capabilities public final int supportsFormat(Format format) throws ExoPlaybackException { try { return supportsFormat(mediaCodecSelector, format); } catch (DecoderQueryException e) { throw createRendererException(e, format); } } /** * Returns the {@link Capabilities} for the given {@link Format}. * * @param mediaCodecSelector The decoder selector. * @param format The {@link Format}. * @return The {@link Capabilities} for this {@link Format}. * @throws DecoderQueryException If there was an error querying decoders. */ @Capabilities protected abstract int supportsFormat( MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException; /** * Returns a list of decoders that can decode media in the specified format, in priority order. * * @param mediaCodecSelector The decoder selector. * @param format The {@link Format} for which a decoder is required. * @param requiresSecureDecoder Whether a secure decoder is required. * @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty. * @throws DecoderQueryException Thrown if there was an error querying decoders. */ protected abstract List<MediaCodecInfo> getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException; /** * Configures a newly created {@link MediaCodec}. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param codec The {@link MediaCodec} to configure. * @param format The {@link Format} for which the codec is being configured. * @param crypto For drm protected playbacks, a {@link MediaCrypto} to use for decryption. * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if * no codec operating rate should be set. */ protected abstract void configureCodec( MediaCodecInfo codecInfo, MediaCodec codec, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate); protected final void maybeInitCodec() throws ExoPlaybackException { if (codec != null || inputFormat == null) { // We have a codec already, or we don't have a format with which to instantiate one. return; } setCodecDrmSession(sourceDrmSession); String mimeType = inputFormat.sampleMimeType; if (codecDrmSession != null) { if (mediaCrypto == null) { FrameworkMediaCrypto sessionMediaCrypto = codecDrmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { DrmSessionException drmError = codecDrmSession.getError(); if (drmError != null) { // Continue for now. We may be able to avoid failure if the session recovers, or if a // new input format causes the session to be replaced before it's used. } else { // The drm session isn't open yet. return; } } else { try { mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { throw createRendererException(e, inputFormat); } mediaCryptoRequiresSecureDecoder = !sessionMediaCrypto.forceAllowInsecureDecoderComponents && mediaCrypto.requiresSecureDecoderComponent(mimeType); } } if (FrameworkMediaCrypto.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC) { @DrmSession.State int drmSessionState = codecDrmSession.getState(); if (drmSessionState == DrmSession.STATE_ERROR) { throw createRendererException(codecDrmSession.getError(), inputFormat); } else if (drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS) { // Wait for keys. return; } } } try { maybeInitCodecWithFallback(mediaCrypto, mediaCryptoRequiresSecureDecoder); } catch (DecoderInitializationException e) { throw createRendererException(e, inputFormat); } } protected boolean shouldInitCodec(MediaCodecInfo codecInfo) { return true; } /** * Returns whether the codec needs the renderer to propagate the end-of-stream signal directly, * rather than by using an end-of-stream buffer queued to the codec. */ protected boolean getCodecNeedsEosPropagation() { return false; } /** * Polls the pending output format queue for a given buffer timestamp. If a format is present, it * is removed and returned. Otherwise returns {@code null}. Subclasses should only call this * method if they are taking over responsibility for output format propagation (e.g., when using * video tunneling). */ @Nullable protected final Format updateOutputFormatForTime(long presentationTimeUs) { Format format = formatQueue.pollFloor(presentationTimeUs); if (format != null) { outputFormat = format; } return format; } @Nullable protected final Format getCurrentOutputFormat() { return outputFormat; } @Nullable protected final MediaCodec getCodec() { return codec; } @Nullable protected final MediaCodecInfo getCodecInfo() { return codecInfo; } @Override protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) throws ExoPlaybackException { decoderCounters = new DecoderCounters(); } @Override protected void onStreamChanged(Format[] formats, long offsetUs) throws ExoPlaybackException { if (outputStreamOffsetUs == C.TIME_UNSET) { outputStreamOffsetUs = offsetUs; } else { if (pendingOutputStreamOffsetCount == pendingOutputStreamOffsetsUs.length) { Log.w( TAG, "Too many stream changes, so dropping offset: " + pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]); } else { pendingOutputStreamOffsetCount++; } pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1] = offsetUs; pendingOutputStreamSwitchTimesUs[pendingOutputStreamOffsetCount - 1] = largestQueuedPresentationTimeUs; } } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { inputStreamEnded = false; outputStreamEnded = false; pendingOutputEndOfStream = false; flushOrReinitializeCodec(); // If there is a format change on the input side still pending propagation to the output, we // need to queue a format next time a buffer is read. This is because we may not read a new // input format after the position reset. if (formatQueue.size() > 0) { waitingForFirstSampleInFormat = true; } formatQueue.clear(); if (pendingOutputStreamOffsetCount != 0) { outputStreamOffsetUs = pendingOutputStreamOffsetsUs[pendingOutputStreamOffsetCount - 1]; pendingOutputStreamOffsetCount = 0; } } @Override public final void setOperatingRate(float operatingRate) throws ExoPlaybackException { rendererOperatingRate = operatingRate; if (codec != null && codecDrainAction != DRAIN_ACTION_REINITIALIZE && getState() != STATE_DISABLED) { updateCodecOperatingRate(); } } @Override protected void onDisabled() { inputFormat = null; outputStreamOffsetUs = C.TIME_UNSET; pendingOutputStreamOffsetCount = 0; if (sourceDrmSession != null || codecDrmSession != null) { // TODO: Do something better with this case. onReset(); } else { flushOrReleaseCodec(); } } @Override protected void onReset() { try { releaseCodec(); } finally { setSourceDrmSession(null); } } protected void releaseCodec() { try { if (codecAdapter != null) { codecAdapter.shutdown(); } if (codec != null) { decoderCounters.decoderReleaseCount++; codec.release(); } } finally { codec = null; codecAdapter = null; try { if (mediaCrypto != null) { mediaCrypto.release(); } } finally { mediaCrypto = null; setCodecDrmSession(null); resetCodecStateForRelease(); } } } @Override protected void onStarted() { // Do nothing. Overridden to remove throws clause. } @Override protected void onStopped() { // Do nothing. Overridden to remove throws clause. } @Override public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (pendingOutputEndOfStream) { pendingOutputEndOfStream = false; processEndOfStream(); } try { if (outputStreamEnded) { renderToEndOfStream(); return; } if (inputFormat == null && !readToFlagsOnlyBuffer(/* requireFormat= */ true)) { // We still don't have a format and can't make progress without one. return; } // We have a format. maybeInitCodec(); if (codec != null) { long drainStartTimeMs = SystemClock.elapsedRealtime(); TraceUtil.beginSection("drainAndFeed"); while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {} while (feedInputBuffer() && shouldContinueFeeding(drainStartTimeMs)) {} TraceUtil.endSection(); } else { decoderCounters.skippedInputBufferCount += skipSource(positionUs); // We need to read any format changes despite not having a codec so that drmSession can be // updated, and so that we have the most recent format should the codec be initialized. We // may also reach the end of the stream. Note that readSource will not read a sample into a // flags-only buffer. readToFlagsOnlyBuffer(/* requireFormat= */ false); } decoderCounters.ensureUpdated(); } catch (IllegalStateException e) { if (isMediaCodecException(e)) { throw createRendererException(e, inputFormat); } throw e; } } /** * Flushes the codec. If flushing is not possible, the codec will be released and re-instantiated. * This method is a no-op if the codec is {@code null}. * * <p>The implementation of this method calls {@link #flushOrReleaseCodec()}, and {@link * #maybeInitCodec()} if the codec needs to be re-instantiated. * * @return Whether the codec was released and reinitialized, rather than being flushed. * @throws ExoPlaybackException If an error occurs re-instantiating the codec. */ protected final boolean flushOrReinitializeCodec() throws ExoPlaybackException { boolean released = flushOrReleaseCodec(); if (released) { maybeInitCodec(); } return released; } /** * Flushes the codec. If flushing is not possible, the codec will be released. This method is a * no-op if the codec is {@code null}. * * @return Whether the codec was released. */ protected boolean flushOrReleaseCodec() { if (codec == null) { return false; } if (codecDrainAction == DRAIN_ACTION_REINITIALIZE || codecNeedsFlushWorkaround || (codecNeedsEosFlushWorkaround && codecReceivedEos)) { releaseCodec(); return true; } try { codecAdapter.flush(); } finally { resetCodecStateForFlush(); } return false; } /** Resets the renderer internal state after a codec flush. */ @CallSuper protected void resetCodecStateForFlush() { resetInputBuffer(); resetOutputBuffer(); codecHotswapDeadlineMs = C.TIME_UNSET; codecReceivedEos = false; codecReceivedBuffers = false; waitingForFirstSyncSample = true; codecNeedsAdaptationWorkaroundBuffer = false; shouldSkipAdaptationWorkaroundOutputBuffer = false; isDecodeOnlyOutputBuffer = false; isLastOutputBuffer = false; waitingForKeys = false; decodeOnlyPresentationTimestamps.clear(); largestQueuedPresentationTimeUs = C.TIME_UNSET; lastBufferInStreamPresentationTimeUs = C.TIME_UNSET; codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; // Reconfiguration data sent shortly before the flush may not have been processed by the // decoder. If the codec has been reconfigured we always send reconfiguration data again to // guarantee that it's processed. codecReconfigurationState = codecReconfigured ? RECONFIGURATION_STATE_WRITE_PENDING : RECONFIGURATION_STATE_NONE; } /** * Resets the renderer internal state after a codec release. * * <p>Note that this only needs to reset state variables that are changed in addition to those * already changed in {@link #resetCodecStateForFlush()}. */ @CallSuper protected void resetCodecStateForRelease() { resetCodecStateForFlush(); availableCodecInfos = null; codecInfo = null; codecFormat = null; codecOperatingRate = CODEC_OPERATING_RATE_UNSET; codecAdaptationWorkaroundMode = ADAPTATION_WORKAROUND_MODE_NEVER; codecNeedsReconfigureWorkaround = false; codecNeedsDiscardToSpsWorkaround = false; codecNeedsFlushWorkaround = false; codecNeedsEosFlushWorkaround = false; codecNeedsEosOutputExceptionWorkaround = false; codecNeedsMonoChannelCountWorkaround = false; codecNeedsEosPropagation = false; codecReconfigured = false; codecReconfigurationState = RECONFIGURATION_STATE_NONE; resetCodecBuffers(); mediaCryptoRequiresSecureDecoder = false; } protected DecoderException createDecoderException( Throwable cause, @Nullable MediaCodecInfo codecInfo) { return new DecoderException(cause, codecInfo); } /** Reads into {@link #flagsOnlyBuffer} and returns whether a {@link Format} was read. */ private boolean readToFlagsOnlyBuffer(boolean requireFormat) throws ExoPlaybackException { FormatHolder formatHolder = getFormatHolder(); flagsOnlyBuffer.clear(); int result = readSource(formatHolder, flagsOnlyBuffer, requireFormat); if (result == C.RESULT_FORMAT_READ) { onInputFormatChanged(formatHolder); return true; } else if (result == C.RESULT_BUFFER_READ && flagsOnlyBuffer.isEndOfStream()) { inputStreamEnded = true; processEndOfStream(); } return false; } private void maybeInitCodecWithFallback( MediaCrypto crypto, boolean mediaCryptoRequiresSecureDecoder) throws DecoderInitializationException { if (availableCodecInfos == null) { try { List<MediaCodecInfo> allAvailableCodecInfos = getAvailableCodecInfos(mediaCryptoRequiresSecureDecoder); availableCodecInfos = new ArrayDeque<>(); if (enableDecoderFallback) { availableCodecInfos.addAll(allAvailableCodecInfos); } else if (!allAvailableCodecInfos.isEmpty()) { availableCodecInfos.add(allAvailableCodecInfos.get(0)); } preferredDecoderInitializationException = null; } catch (DecoderQueryException e) { throw new DecoderInitializationException( inputFormat, e, mediaCryptoRequiresSecureDecoder, DecoderInitializationException.DECODER_QUERY_ERROR); } } if (availableCodecInfos.isEmpty()) { throw new DecoderInitializationException( inputFormat, /* cause= */ null, mediaCryptoRequiresSecureDecoder, DecoderInitializationException.NO_SUITABLE_DECODER_ERROR); } while (codec == null) { MediaCodecInfo codecInfo = availableCodecInfos.peekFirst(); if (!shouldInitCodec(codecInfo)) { return; } try { initCodec(codecInfo, crypto); } catch (Exception e) { Log.w(TAG, "Failed to initialize decoder: " + codecInfo, e); // This codec failed to initialize, so fall back to the next codec in the list (if any). We // won't try to use this codec again unless there's a format change or the renderer is // disabled and re-enabled. availableCodecInfos.removeFirst(); DecoderInitializationException exception = new DecoderInitializationException( inputFormat, e, mediaCryptoRequiresSecureDecoder, codecInfo); if (preferredDecoderInitializationException == null) { preferredDecoderInitializationException = exception; } else { preferredDecoderInitializationException = preferredDecoderInitializationException.copyWithFallbackException(exception); } if (availableCodecInfos.isEmpty()) { throw preferredDecoderInitializationException; } } } availableCodecInfos = null; } private List<MediaCodecInfo> getAvailableCodecInfos(boolean mediaCryptoRequiresSecureDecoder) throws DecoderQueryException { List<MediaCodecInfo> codecInfos = getDecoderInfos(mediaCodecSelector, inputFormat, mediaCryptoRequiresSecureDecoder); if (codecInfos.isEmpty() && mediaCryptoRequiresSecureDecoder) { // The drm session indicates that a secure decoder is required, but the device does not // have one. Assuming that supportsFormat indicated support for the media being played, we // know that it does not require a secure output path. Most CDM implementations allow // playback to proceed with a non-secure decoder in this case, so we try our luck. codecInfos = getDecoderInfos(mediaCodecSelector, inputFormat, /* requiresSecureDecoder= */ false); if (!codecInfos.isEmpty()) { Log.w( TAG, "Drm session requires secure decoder for " + inputFormat.sampleMimeType + ", but no secure decoder available. Trying to proceed with " + codecInfos + "."); } } return codecInfos; } private void initCodec(MediaCodecInfo codecInfo, MediaCrypto crypto) throws Exception { long codecInitializingTimestamp; long codecInitializedTimestamp; MediaCodec codec = null; String codecName = codecInfo.name; float codecOperatingRate = Util.SDK_INT < 23 ? CODEC_OPERATING_RATE_UNSET : getCodecOperatingRateV23(rendererOperatingRate, inputFormat, getStreamFormats()); if (codecOperatingRate <= assumedMinimumCodecOperatingRate) { codecOperatingRate = CODEC_OPERATING_RATE_UNSET; } MediaCodecAdapter codecAdapter = null; try { codecInitializingTimestamp = SystemClock.elapsedRealtime(); TraceUtil.beginSection("createCodec:" + codecName); codec = MediaCodec.createByCodecName(codecName); if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_PLAYBACK_THREAD && Util.SDK_INT >= 21) { codecAdapter = new AsynchronousMediaCodecAdapter(codec); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD && Util.SDK_INT >= 23) { codecAdapter = new DedicatedThreadAsyncMediaCodecAdapter(codec, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK && Util.SDK_INT >= 23) { codecAdapter = new MultiLockAsyncMediaCodecAdapter(codec, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_ASYNCHRONOUS_QUEUEING && Util.SDK_INT >= 23) { codecAdapter = new DedicatedThreadAsyncMediaCodecAdapter( codec, /* enableAsynchronousQueueing= */ true, getTrackType()); } else if (mediaCodecOperationMode == OPERATION_MODE_ASYNCHRONOUS_DEDICATED_THREAD_MULTI_LOCK_ASYNCHRONOUS_QUEUEING && Util.SDK_INT >= 23) { codecAdapter = new MultiLockAsyncMediaCodecAdapter( codec, /* enableAsynchronousQueueing= */ true, getTrackType()); } else { codecAdapter = new SynchronousMediaCodecAdapter(codec); } TraceUtil.endSection(); TraceUtil.beginSection("configureCodec"); configureCodec(codecInfo, codec, inputFormat, crypto, codecOperatingRate); TraceUtil.endSection(); TraceUtil.beginSection("startCodec"); codecAdapter.start(); TraceUtil.endSection(); codecInitializedTimestamp = SystemClock.elapsedRealtime(); getCodecBuffers(codec); } catch (Exception e) { if (codecAdapter != null) { codecAdapter.shutdown(); } if (codec != null) { resetCodecBuffers(); codec.release(); } throw e; } this.codec = codec; this.codecAdapter = codecAdapter; this.codecInfo = codecInfo; this.codecOperatingRate = codecOperatingRate; codecFormat = inputFormat; codecAdaptationWorkaroundMode = codecAdaptationWorkaroundMode(codecName); codecNeedsReconfigureWorkaround = codecNeedsReconfigureWorkaround(codecName); codecNeedsDiscardToSpsWorkaround = codecNeedsDiscardToSpsWorkaround(codecName, codecFormat); codecNeedsFlushWorkaround = codecNeedsFlushWorkaround(codecName); codecNeedsEosFlushWorkaround = codecNeedsEosFlushWorkaround(codecName); codecNeedsEosOutputExceptionWorkaround = codecNeedsEosOutputExceptionWorkaround(codecName); codecNeedsMonoChannelCountWorkaround = codecNeedsMonoChannelCountWorkaround(codecName, codecFormat); codecNeedsEosPropagation = codecNeedsEosPropagationWorkaround(codecInfo) || getCodecNeedsEosPropagation(); if (getState() == STATE_STARTED) { codecHotswapDeadlineMs = SystemClock.elapsedRealtime() + MAX_CODEC_HOTSWAP_TIME_MS; } decoderCounters.decoderInitCount++; long elapsed = codecInitializedTimestamp - codecInitializingTimestamp; onCodecInitialized(codecName, codecInitializedTimestamp, elapsed); } private boolean shouldContinueFeeding(long drainStartTimeMs) { return renderTimeLimitMs == C.TIME_UNSET || SystemClock.elapsedRealtime() - drainStartTimeMs < renderTimeLimitMs; } private void getCodecBuffers(MediaCodec codec) { if (Util.SDK_INT < 21) { inputBuffers = codec.getInputBuffers(); outputBuffers = codec.getOutputBuffers(); } } private void resetCodecBuffers() { if (Util.SDK_INT < 21) { inputBuffers = null; outputBuffers = null; } } private ByteBuffer getInputBuffer(int inputIndex) { if (Util.SDK_INT >= 21) { return codec.getInputBuffer(inputIndex); } else { return inputBuffers[inputIndex]; } } private ByteBuffer getOutputBuffer(int outputIndex) { if (Util.SDK_INT >= 21) { return codec.getOutputBuffer(outputIndex); } else { return outputBuffers[outputIndex]; } } private boolean hasOutputBuffer() { return outputIndex >= 0; } private void resetInputBuffer() { inputIndex = C.INDEX_UNSET; buffer.data = null; } private void resetOutputBuffer() { outputIndex = C.INDEX_UNSET; outputBuffer = null; } private void setSourceDrmSession(@Nullable DrmSession<FrameworkMediaCrypto> session) { DrmSession.replaceSession(sourceDrmSession, session); sourceDrmSession = session; } private void setCodecDrmSession(@Nullable DrmSession<FrameworkMediaCrypto> session) { DrmSession.replaceSession(codecDrmSession, session); codecDrmSession = session; } /** * @return Whether it may be possible to feed more input data. * @throws ExoPlaybackException If an error occurs feeding the input buffer. */ private boolean feedInputBuffer() throws ExoPlaybackException { if (codec == null || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM || inputStreamEnded) { return false; } if (inputIndex < 0) { inputIndex = codecAdapter.dequeueInputBufferIndex(); if (inputIndex < 0) { return false; } buffer.data = getInputBuffer(inputIndex); buffer.clear(); } if (codecDrainState == DRAIN_STATE_SIGNAL_END_OF_STREAM) { // We need to re-initialize the codec. Send an end of stream signal to the existing codec so // that it outputs any remaining buffers before we release it. if (codecNeedsEosPropagation) { // Do nothing. } else { codecReceivedEos = true; codecAdapter.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); resetInputBuffer(); } codecDrainState = DRAIN_STATE_WAIT_END_OF_STREAM; return false; } if (codecNeedsAdaptationWorkaroundBuffer) { codecNeedsAdaptationWorkaroundBuffer = false; buffer.data.put(ADAPTATION_WORKAROUND_BUFFER); codecAdapter.queueInputBuffer(inputIndex, 0, ADAPTATION_WORKAROUND_BUFFER.length, 0, 0); resetInputBuffer(); codecReceivedBuffers = true; return true; } int result; FormatHolder formatHolder = getFormatHolder(); int adaptiveReconfigurationBytes = 0; if (waitingForKeys) { // We've already read an encrypted sample into buffer, and are waiting for keys. result = C.RESULT_BUFFER_READ; } else { // For adaptive reconfiguration OMX decoders expect all reconfiguration data to be supplied // at the start of the buffer that also contains the first frame in the new format. if (codecReconfigurationState == RECONFIGURATION_STATE_WRITE_PENDING) { for (int i = 0; i < codecFormat.initializationData.size(); i++) { byte[] data = codecFormat.initializationData.get(i); buffer.data.put(data); } codecReconfigurationState = RECONFIGURATION_STATE_QUEUE_PENDING; } adaptiveReconfigurationBytes = buffer.data.position(); result = readSource(formatHolder, buffer, false); } if (hasReadStreamToEnd()) { // Notify output queue of the last buffer's timestamp. lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs; } if (result == C.RESULT_NOTHING_READ) { return false; } if (result == C.RESULT_FORMAT_READ) { if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // We received two formats in a row. Clear the current buffer of any reconfiguration data // associated with the first format. buffer.clear(); codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } onInputFormatChanged(formatHolder); return true; } // We've read a buffer. if (buffer.isEndOfStream()) { if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // We received a new format immediately before the end of the stream. We need to clear // the corresponding reconfiguration data from the current buffer, but re-write it into // a subsequent buffer if there are any (e.g. if the user seeks backwards). buffer.clear(); codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } inputStreamEnded = true; if (!codecReceivedBuffers) { processEndOfStream(); return false; } try { if (codecNeedsEosPropagation) { // Do nothing. } else { codecReceivedEos = true; codecAdapter.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); resetInputBuffer(); } } catch (CryptoException e) { throw createRendererException(e, inputFormat); } return false; } if (waitingForFirstSyncSample && !buffer.isKeyFrame()) { buffer.clear(); if (codecReconfigurationState == RECONFIGURATION_STATE_QUEUE_PENDING) { // The buffer we just cleared contained reconfiguration data. We need to re-write this // data into a subsequent buffer (if there is one). codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; } return true; } waitingForFirstSyncSample = false; boolean bufferEncrypted = buffer.isEncrypted(); waitingForKeys = shouldWaitForKeys(bufferEncrypted); if (waitingForKeys) { return false; } if (codecNeedsDiscardToSpsWorkaround && !bufferEncrypted) { NalUnitUtil.discardToSps(buffer.data); if (buffer.data.position() == 0) { return true; } codecNeedsDiscardToSpsWorkaround = false; } try { long presentationTimeUs = buffer.timeUs; if (buffer.isDecodeOnly()) { decodeOnlyPresentationTimestamps.add(presentationTimeUs); } if (waitingForFirstSampleInFormat) { formatQueue.add(presentationTimeUs, inputFormat); waitingForFirstSampleInFormat = false; } largestQueuedPresentationTimeUs = Math.max(largestQueuedPresentationTimeUs, presentationTimeUs); buffer.flip(); if (buffer.hasSupplementalData()) { handleInputBufferSupplementalData(buffer); } onQueueInputBuffer(buffer); if (bufferEncrypted) { CryptoInfo cryptoInfo = buffer.cryptoInfo; cryptoInfo.increaseClearDataFirstSubSampleBy(adaptiveReconfigurationBytes); codecAdapter.queueSecureInputBuffer(inputIndex, 0, cryptoInfo, presentationTimeUs, 0); } else { codecAdapter.queueInputBuffer(inputIndex, 0, buffer.data.limit(), presentationTimeUs, 0); } resetInputBuffer(); codecReceivedBuffers = true; codecReconfigurationState = RECONFIGURATION_STATE_NONE; decoderCounters.inputBufferCount++; } catch (CryptoException e) { throw createRendererException(e, inputFormat); } return true; } private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { if (codecDrmSession == null || (!bufferEncrypted && codecDrmSession.playClearSamplesWithoutKeys())) { return false; } @DrmSession.State int drmSessionState = codecDrmSession.getState(); if (drmSessionState == DrmSession.STATE_ERROR) { throw createRendererException(codecDrmSession.getError(), inputFormat); } return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; } /** * Called when a {@link MediaCodec} has been created and configured. * <p> * The default implementation is a no-op. * * @param name The name of the codec that was initialized. * @param initializedTimestampMs {@link SystemClock#elapsedRealtime()} when initialization * finished. * @param initializationDurationMs The time taken to initialize the codec in milliseconds. */ protected void onCodecInitialized(String name, long initializedTimestampMs, long initializationDurationMs) { // Do nothing. } /** * Called when a new {@link Format} is read from the upstream {@link MediaPeriod}. * * @param formatHolder A {@link FormatHolder} that holds the new {@link Format}. * @throws ExoPlaybackException If an error occurs re-initializing the {@link MediaCodec}. */ @SuppressWarnings("unchecked") protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { waitingForFirstSampleInFormat = true; Format newFormat = Assertions.checkNotNull(formatHolder.format); setSourceDrmSession((DrmSession<FrameworkMediaCrypto>) formatHolder.drmSession); inputFormat = newFormat; if (codec == null) { maybeInitCodec(); return; } // We have an existing codec that we may need to reconfigure or re-initialize. If the existing // codec instance is being kept then its operating rate may need to be updated. if ((sourceDrmSession == null && codecDrmSession != null) || (sourceDrmSession != null && codecDrmSession == null) || (sourceDrmSession != codecDrmSession && !codecInfo.secure && maybeRequiresSecureDecoder(sourceDrmSession, newFormat)) || (Util.SDK_INT < 23 && sourceDrmSession != codecDrmSession)) { // We might need to switch between the clear and protected output paths, or we're using DRM // prior to API level 23 where the codec needs to be re-initialized to switch to the new DRM // session. drainAndReinitializeCodec(); return; } switch (canKeepCodec(codec, codecInfo, codecFormat, newFormat)) { case KEEP_CODEC_RESULT_NO: drainAndReinitializeCodec(); break; case KEEP_CODEC_RESULT_YES_WITH_FLUSH: codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } else { drainAndFlushCodec(); } break; case KEEP_CODEC_RESULT_YES_WITH_RECONFIGURATION: if (codecNeedsReconfigureWorkaround) { drainAndReinitializeCodec(); } else { codecReconfigured = true; codecReconfigurationState = RECONFIGURATION_STATE_WRITE_PENDING; codecNeedsAdaptationWorkaroundBuffer = codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_ALWAYS || (codecAdaptationWorkaroundMode == ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION && newFormat.width == codecFormat.width && newFormat.height == codecFormat.height); codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } } break; case KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION: codecFormat = newFormat; updateCodecOperatingRate(); if (sourceDrmSession != codecDrmSession) { drainAndUpdateCodecDrmSession(); } break; default: throw new IllegalStateException(); // Never happens. } } /** * Called when the output {@link MediaFormat} of the {@link MediaCodec} changes. * * <p>The default implementation is a no-op. * * @param codec The {@link MediaCodec} instance. * @param outputMediaFormat The new output {@link MediaFormat}. * @throws ExoPlaybackException Thrown if an error occurs handling the new output media format. */ protected void onOutputMediaFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat) throws ExoPlaybackException { // Do nothing. } /** * Handles supplemental data associated with an input buffer. * * <p>The default implementation is a no-op. * * @param buffer The input buffer that is about to be queued. * @throws ExoPlaybackException Thrown if an error occurs handling supplemental data. */ protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) throws ExoPlaybackException { // Do nothing. } /** * Called immediately before an input buffer is queued into the codec. * * <p>The default implementation is a no-op. * * @param buffer The buffer to be queued. */ protected void onQueueInputBuffer(DecoderInputBuffer buffer) { // Do nothing. } /** * Called when an output buffer is successfully processed. * * @param presentationTimeUs The timestamp associated with the output buffer. */ @CallSuper protected void onProcessedOutputBuffer(long presentationTimeUs) { while (pendingOutputStreamOffsetCount != 0 && presentationTimeUs >= pendingOutputStreamSwitchTimesUs[0]) { outputStreamOffsetUs = pendingOutputStreamOffsetsUs[0]; pendingOutputStreamOffsetCount--; System.arraycopy( pendingOutputStreamOffsetsUs, /* srcPos= */ 1, pendingOutputStreamOffsetsUs, /* destPos= */ 0, pendingOutputStreamOffsetCount); System.arraycopy( pendingOutputStreamSwitchTimesUs, /* srcPos= */ 1, pendingOutputStreamSwitchTimesUs, /* destPos= */ 0, pendingOutputStreamOffsetCount); onProcessedStreamChange(); } } /** Called after the last output buffer before a stream change has been processed. */ protected void onProcessedStreamChange() { // Do nothing. } /** * Determines whether the existing {@link MediaCodec} can be kept for a new {@link Format}, and if * it can whether it requires reconfiguration. * * <p>The default implementation returns {@link #KEEP_CODEC_RESULT_NO}. * * @param codec The existing {@link MediaCodec} instance. * @param codecInfo A {@link MediaCodecInfo} describing the decoder. * @param oldFormat The {@link Format} for which the existing instance is configured. * @param newFormat The new {@link Format}. * @return Whether the instance can be kept, and if it can whether it requires reconfiguration. */ protected @KeepCodecResult int canKeepCodec( MediaCodec codec, MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { return KEEP_CODEC_RESULT_NO; } @Override public boolean isEnded() { return outputStreamEnded; } @Override public boolean isReady() { return inputFormat != null && !waitingForKeys && (isSourceReady() || hasOutputBuffer() || (codecHotswapDeadlineMs != C.TIME_UNSET && SystemClock.elapsedRealtime() < codecHotswapDeadlineMs)); } /** * Returns the {@link MediaFormat#KEY_OPERATING_RATE} value for a given renderer operating rate, * current {@link Format} and set of possible stream formats. * * <p>The default implementation returns {@link #CODEC_OPERATING_RATE_UNSET}. * * @param operatingRate The renderer operating rate. * @param format The {@link Format} for which the codec is being configured. * @param streamFormats The possible stream formats. * @return The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if no codec operating * rate should be set. */ protected float getCodecOperatingRateV23( float operatingRate, Format format, Format[] streamFormats) { return CODEC_OPERATING_RATE_UNSET; } /** * Updates the codec operating rate. * * @throws ExoPlaybackException If an error occurs releasing or initializing a codec. */ private void updateCodecOperatingRate() throws ExoPlaybackException { if (Util.SDK_INT < 23) { return; } float newCodecOperatingRate = getCodecOperatingRateV23(rendererOperatingRate, codecFormat, getStreamFormats()); if (codecOperatingRate == newCodecOperatingRate) { // No change. } else if (newCodecOperatingRate == CODEC_OPERATING_RATE_UNSET) { // The only way to clear the operating rate is to instantiate a new codec instance. See // [Internal ref: b/71987865]. drainAndReinitializeCodec(); } else if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET || newCodecOperatingRate > assumedMinimumCodecOperatingRate) { // We need to set the operating rate, either because we've set it previously or because it's // above the assumed minimum rate. Bundle codecParameters = new Bundle(); codecParameters.putFloat(MediaFormat.KEY_OPERATING_RATE, newCodecOperatingRate); codec.setParameters(codecParameters); codecOperatingRate = newCodecOperatingRate; } } /** Starts draining the codec for flush. */ private void drainAndFlushCodec() { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_FLUSH; } } /** * Starts draining the codec to update its DRM session. The update may occur immediately if no * buffers have been queued to the codec. * * @throws ExoPlaybackException If an error occurs updating the codec's DRM session. */ private void drainAndUpdateCodecDrmSession() throws ExoPlaybackException { if (Util.SDK_INT < 23) { // The codec needs to be re-initialized to switch to the source DRM session. drainAndReinitializeCodec(); return; } if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_UPDATE_DRM_SESSION; } else { // Nothing has been queued to the decoder, so we can do the update immediately. updateDrmSessionOrReinitializeCodecV23(); } } /** * Starts draining the codec for re-initialization. Re-initialization may occur immediately if no * buffers have been queued to the codec. * * @throws ExoPlaybackException If an error occurs re-initializing a codec. */ private void drainAndReinitializeCodec() throws ExoPlaybackException { if (codecReceivedBuffers) { codecDrainState = DRAIN_STATE_SIGNAL_END_OF_STREAM; codecDrainAction = DRAIN_ACTION_REINITIALIZE; } else { // Nothing has been queued to the decoder, so we can re-initialize immediately. reinitializeCodec(); } } /** * @return Whether it may be possible to drain more output data. * @throws ExoPlaybackException If an error occurs draining the output buffer. */ private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { if (!hasOutputBuffer()) { int outputIndex; if (codecNeedsEosOutputExceptionWorkaround && codecReceivedEos) { try { outputIndex = codecAdapter.dequeueOutputBufferIndex(outputBufferInfo); } catch (IllegalStateException e) { processEndOfStream(); if (outputStreamEnded) { // Release the codec, as it's in an error state. releaseCodec(); } return false; } } else { outputIndex = codecAdapter.dequeueOutputBufferIndex(outputBufferInfo); } if (outputIndex < 0) { if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED /* (-2) */) { processOutputMediaFormat(); return true; } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED /* (-3) */) { processOutputBuffersChanged(); return true; } /* MediaCodec.INFO_TRY_AGAIN_LATER (-1) or unknown negative return value */ if (codecNeedsEosPropagation && (inputStreamEnded || codecDrainState == DRAIN_STATE_WAIT_END_OF_STREAM)) { processEndOfStream(); } return false; } // We've dequeued a buffer. if (shouldSkipAdaptationWorkaroundOutputBuffer) { shouldSkipAdaptationWorkaroundOutputBuffer = false; codec.releaseOutputBuffer(outputIndex, false); return true; } else if (outputBufferInfo.size == 0 && (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // The dequeued buffer indicates the end of the stream. Process it immediately. processEndOfStream(); return false; } this.outputIndex = outputIndex; outputBuffer = getOutputBuffer(outputIndex); // The dequeued buffer is a media buffer. Do some initial setup. // It will be processed by calling processOutputBuffer (possibly multiple times). if (outputBuffer != null) { outputBuffer.position(outputBufferInfo.offset); outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size); } isDecodeOnlyOutputBuffer = isDecodeOnlyBuffer(outputBufferInfo.presentationTimeUs); isLastOutputBuffer = lastBufferInStreamPresentationTimeUs == outputBufferInfo.presentationTimeUs; updateOutputFormatForTime(outputBufferInfo.presentationTimeUs); } boolean processedOutputBuffer; if (codecNeedsEosOutputExceptionWorkaround && codecReceivedEos) { try { processedOutputBuffer = processOutputBuffer( positionUs, elapsedRealtimeUs, codec, outputBuffer, outputIndex, outputBufferInfo.flags, /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, outputFormat); } catch (IllegalStateException e) { processEndOfStream(); if (outputStreamEnded) { // Release the codec, as it's in an error state. releaseCodec(); } return false; } } else { processedOutputBuffer = processOutputBuffer( positionUs, elapsedRealtimeUs, codec, outputBuffer, outputIndex, outputBufferInfo.flags, /* sampleCount= */ 1, outputBufferInfo.presentationTimeUs, isDecodeOnlyOutputBuffer, isLastOutputBuffer, outputFormat); } if (processedOutputBuffer) { onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs); boolean isEndOfStream = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; resetOutputBuffer(); if (!isEndOfStream) { return true; } processEndOfStream(); } return false; } /** Processes a new output {@link MediaFormat}. */ private void processOutputMediaFormat() throws ExoPlaybackException { MediaFormat mediaFormat = codecAdapter.getOutputFormat(); if (codecAdaptationWorkaroundMode != ADAPTATION_WORKAROUND_MODE_NEVER && mediaFormat.getInteger(MediaFormat.KEY_WIDTH) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT && mediaFormat.getInteger(MediaFormat.KEY_HEIGHT) == ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT) { // We assume this format changed event was caused by the adaptation workaround. shouldSkipAdaptationWorkaroundOutputBuffer = true; return; } if (codecNeedsMonoChannelCountWorkaround) { mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); } onOutputMediaFormatChanged(codec, mediaFormat); } /** * Processes a change in the output buffers. */ private void processOutputBuffersChanged() { if (Util.SDK_INT < 21) { outputBuffers = codec.getOutputBuffers(); } } /** * Processes an output media buffer. * * <p>When a new {@link ByteBuffer} is passed to this method its position and limit delineate the * data to be processed. The return value indicates whether the buffer was processed in full. If * true is returned then the next call to this method will receive a new buffer to be processed. * If false is returned then the same buffer will be passed to the next call. An implementation of * this method is free to modify the buffer and can assume that the buffer will not be externally * modified between successive calls. Hence an implementation can, for example, modify the * buffer's position to keep track of how much of the data it has processed. * * <p>Note that the first call to this method following a call to {@link #onPositionReset(long, * boolean)} will always receive a new {@link ByteBuffer} to be processed. * * @param positionUs The current media time in microseconds, measured at the start of the current * iteration of the rendering loop. * @param elapsedRealtimeUs {@link SystemClock#elapsedRealtime()} in microseconds, measured at the * start of the current iteration of the rendering loop. * @param codec The {@link MediaCodec} instance. * @param buffer The output buffer to process. * @param bufferIndex The index of the output buffer. * @param bufferFlags The flags attached to the output buffer. * @param sampleCount The number of samples extracted from the sample queue in the buffer. This * allows handling multiple samples as a batch for efficiency. * @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds. * @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY} * by the source. * @param isLastBuffer Whether the buffer is the last sample of the current stream. * @param format The {@link Format} associated with the buffer. * @return Whether the output buffer was fully processed (e.g. rendered or skipped). * @throws ExoPlaybackException If an error occurs processing the output buffer. */ protected abstract boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, MediaCodec codec, ByteBuffer buffer, int bufferIndex, int bufferFlags, int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, Format format) throws ExoPlaybackException; /** * Incrementally renders any remaining output. * <p> * The default implementation is a no-op. * * @throws ExoPlaybackException Thrown if an error occurs rendering remaining output. */ protected void renderToEndOfStream() throws ExoPlaybackException { // Do nothing. } /** * Processes an end of stream signal. * * @throws ExoPlaybackException If an error occurs processing the signal. */ @TargetApi(23) // codecDrainAction == DRAIN_ACTION_UPDATE_DRM_SESSION implies SDK_INT >= 23. private void processEndOfStream() throws ExoPlaybackException { switch (codecDrainAction) { case DRAIN_ACTION_REINITIALIZE: reinitializeCodec(); break; case DRAIN_ACTION_UPDATE_DRM_SESSION: updateDrmSessionOrReinitializeCodecV23(); break; case DRAIN_ACTION_FLUSH: flushOrReinitializeCodec(); break; case DRAIN_ACTION_NONE: default: outputStreamEnded = true; renderToEndOfStream(); break; } } /** * Notifies the renderer that output end of stream is pending and should be handled on the next * render. */ protected final void setPendingOutputEndOfStream() { pendingOutputEndOfStream = true; } /** Returns the largest queued input presentation time, in microseconds. */ protected final long getLargestQueuedPresentationTimeUs() { return largestQueuedPresentationTimeUs; } /** * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link * #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, int, long, boolean, boolean, * Format)} to get the playback position with respect to the media. */ protected final long getOutputStreamOffsetUs() { return outputStreamOffsetUs; } private void reinitializeCodec() throws ExoPlaybackException { releaseCodec(); maybeInitCodec(); } private boolean isDecodeOnlyBuffer(long presentationTimeUs) { // We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would // box presentationTimeUs, creating a Long object that would need to be garbage collected. int size = decodeOnlyPresentationTimestamps.size(); for (int i = 0; i < size; i++) { if (decodeOnlyPresentationTimestamps.get(i) == presentationTimeUs) { decodeOnlyPresentationTimestamps.remove(i); return true; } } return false; } @RequiresApi(23) private void updateDrmSessionOrReinitializeCodecV23() throws ExoPlaybackException { @Nullable FrameworkMediaCrypto sessionMediaCrypto = sourceDrmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { // We'd only expect this to happen if the CDM from which the pending session is obtained needs // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme // to another, where the new CDM hasn't been used before and needs provisioning). It would be // possible to handle this case more efficiently (i.e. with a new renderer state that waits // for provisioning to finish and then calls mediaCrypto.setMediaDrmSession), but the extra // complexity is not warranted given how unlikely the case is to occur. reinitializeCodec(); return; } if (C.PLAYREADY_UUID.equals(sessionMediaCrypto.uuid)) { // The PlayReady CDM does not implement setMediaDrmSession. // TODO: Add API check once [Internal ref: b/128835874] is fixed. reinitializeCodec(); return; } if (flushOrReinitializeCodec()) { // The codec was reinitialized. The new codec will be using the new DRM session, so there's // nothing more to do. return; } try { mediaCrypto.setMediaDrmSession(sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { throw createRendererException(e, inputFormat); } setCodecDrmSession(sourceDrmSession); codecDrainState = DRAIN_STATE_NONE; codecDrainAction = DRAIN_ACTION_NONE; } /** * Returns whether a {@link DrmSession} may require a secure decoder for a given {@link Format}. * * @param drmSession The {@link DrmSession}. * @param format The {@link Format}. * @return Whether a secure decoder may be required. */ private static boolean maybeRequiresSecureDecoder( DrmSession<FrameworkMediaCrypto> drmSession, Format format) { @Nullable FrameworkMediaCrypto sessionMediaCrypto = drmSession.getMediaCrypto(); if (sessionMediaCrypto == null) { // We'd only expect this to happen if the CDM from which the pending session is obtained needs // provisioning. This is unlikely to happen (it probably requires a switch from one DRM scheme // to another, where the new CDM hasn't been used before and needs provisioning). Assume that // a secure decoder may be required. return true; } if (sessionMediaCrypto.forceAllowInsecureDecoderComponents) { return false; } MediaCrypto mediaCrypto; try { mediaCrypto = new MediaCrypto(sessionMediaCrypto.uuid, sessionMediaCrypto.sessionId); } catch (MediaCryptoException e) { // This shouldn't happen, but if it does then assume that a secure decoder may be required. return true; } try { return mediaCrypto.requiresSecureDecoderComponent(format.sampleMimeType); } finally { mediaCrypto.release(); } } private static boolean isMediaCodecException(IllegalStateException error) { if (Util.SDK_INT >= 21 && isMediaCodecExceptionV21(error)) { return true; } StackTraceElement[] stackTrace = error.getStackTrace(); return stackTrace.length > 0 && stackTrace[0].getClassName().equals("android.media.MediaCodec"); } @RequiresApi(21) private static boolean isMediaCodecExceptionV21(IllegalStateException error) { return error instanceof MediaCodec.CodecException; } /** * Returns whether the decoder is known to fail when flushed. * <p> * If true is returned, the renderer will work around the issue by releasing the decoder and * instantiating a new one rather than flushing the current instance. * <p> * See [Internal: b/8347958, b/8543366]. * * @param name The name of the decoder. * @return True if the decoder is known to fail when flushed. */ private static boolean codecNeedsFlushWorkaround(String name) { return Util.SDK_INT < 18 || (Util.SDK_INT == 18 && ("OMX.SEC.avc.dec".equals(name) || "OMX.SEC.avc.dec.secure".equals(name))) || (Util.SDK_INT == 19 && Util.MODEL.startsWith("SM-G800") && ("OMX.Exynos.avc.dec".equals(name) || "OMX.Exynos.avc.dec.secure".equals(name))); } /** * Returns a mode that specifies when the adaptation workaround should be enabled. * * <p>When enabled, the workaround queues and discards a blank frame with a resolution whose width * and height both equal {@link #ADAPTATION_WORKAROUND_SLICE_WIDTH_HEIGHT}, to reset the decoder's * internal state when a format change occurs. * * <p>See [Internal: b/27807182]. See <a * href="https://github.com/google/ExoPlayer/issues/3257">GitHub issue #3257</a>. * * @param name The name of the decoder. * @return The mode specifying when the adaptation workaround should be enabled. */ private @AdaptationWorkaroundMode int codecAdaptationWorkaroundMode(String name) { if (Util.SDK_INT <= 25 && "OMX.Exynos.avc.dec.secure".equals(name) && (Util.MODEL.startsWith("SM-T585") || Util.MODEL.startsWith("SM-A510") || Util.MODEL.startsWith("SM-A520") || Util.MODEL.startsWith("SM-J700"))) { return ADAPTATION_WORKAROUND_MODE_ALWAYS; } else if (Util.SDK_INT < 24 && ("OMX.Nvidia.h264.decode".equals(name) || "OMX.Nvidia.h264.decode.secure".equals(name)) && ("flounder".equals(Util.DEVICE) || "flounder_lte".equals(Util.DEVICE) || "grouper".equals(Util.DEVICE) || "tilapia".equals(Util.DEVICE))) { return ADAPTATION_WORKAROUND_MODE_SAME_RESOLUTION; } else { return ADAPTATION_WORKAROUND_MODE_NEVER; } } /** * Returns whether the decoder is known to fail when an attempt is made to reconfigure it with a * new format's configuration data. * * <p>When enabled, the workaround will always release and recreate the decoder, rather than * attempting to reconfigure the existing instance. * * @param name The name of the decoder. * @return True if the decoder is known to fail when an attempt is made to reconfigure it with a * new format's configuration data. */ private static boolean codecNeedsReconfigureWorkaround(String name) { return Util.MODEL.startsWith("SM-T230") && "OMX.MARVELL.VIDEO.HW.CODA7542DECODER".equals(name); } /** * Returns whether the decoder is an H.264/AVC decoder known to fail if NAL units are queued * before the codec specific data. * * <p>If true is returned, the renderer will work around the issue by discarding data up to the * SPS. * * @param name The name of the decoder. * @param format The {@link Format} used to configure the decoder. * @return True if the decoder is known to fail if NAL units are queued before CSD. */ private static boolean codecNeedsDiscardToSpsWorkaround(String name, Format format) { return Util.SDK_INT < 21 && format.initializationData.isEmpty() && "OMX.MTK.VIDEO.DECODER.AVC".equals(name); } /** * Returns whether the decoder is known to handle the propagation of the {@link * MediaCodec#BUFFER_FLAG_END_OF_STREAM} flag incorrectly on the host device. * * <p>If true is returned, the renderer will work around the issue by approximating end of stream * behavior without relying on the flag being propagated through to an output buffer by the * underlying decoder. * * @param codecInfo Information about the {@link MediaCodec}. * @return True if the decoder is known to handle {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} * propagation incorrectly on the host device. False otherwise. */ private static boolean codecNeedsEosPropagationWorkaround(MediaCodecInfo codecInfo) { String name = codecInfo.name; return (Util.SDK_INT <= 25 && "OMX.rk.video_decoder.avc".equals(name)) || (Util.SDK_INT <= 17 && "OMX.allwinner.video.decoder.avc".equals(name)) || ("Amazon".equals(Util.MANUFACTURER) && "AFTS".equals(Util.MODEL) && codecInfo.secure); } /** * Returns whether the decoder is known to behave incorrectly if flushed after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. * <p> * If true is returned, the renderer will work around the issue by instantiating a new decoder * when this case occurs. * <p> * See [Internal: b/8578467, b/23361053]. * * @param name The name of the decoder. * @return True if the decoder is known to behave incorrectly if flushed after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. False otherwise. */ private static boolean codecNeedsEosFlushWorkaround(String name) { return (Util.SDK_INT <= 23 && "OMX.google.vorbis.decoder".equals(name)) || (Util.SDK_INT <= 19 && ("hb2000".equals(Util.DEVICE) || "stvm8".equals(Util.DEVICE)) && ("OMX.amlogic.avc.decoder.awesome".equals(name) || "OMX.amlogic.avc.decoder.awesome.secure".equals(name))); } /** * Returns whether the decoder may throw an {@link IllegalStateException} from * {@link MediaCodec#dequeueOutputBuffer(MediaCodec.BufferInfo, long)} or * {@link MediaCodec#releaseOutputBuffer(int, boolean)} after receiving an input * buffer with {@link MediaCodec#BUFFER_FLAG_END_OF_STREAM} set. * <p> * See [Internal: b/17933838]. * * @param name The name of the decoder. * @return True if the decoder may throw an exception after receiving an end-of-stream buffer. */ private static boolean codecNeedsEosOutputExceptionWorkaround(String name) { return Util.SDK_INT == 21 && "OMX.google.aac.decoder".equals(name); } /** * Returns whether the decoder is known to set the number of audio channels in the output {@link * Format} to 2 for the given input {@link Format}, whilst only actually outputting a single * channel. * * <p>If true is returned then we explicitly override the number of channels in the output {@link * Format}, setting it to 1. * * @param name The decoder name. * @param format The input {@link Format}. * @return True if the decoder is known to set the number of audio channels in the output {@link * Format} to 2 for the given input {@link Format}, whilst only actually outputting a single * channel. False otherwise. */ private static boolean codecNeedsMonoChannelCountWorkaround(String name, Format format) { return Util.SDK_INT <= 18 && format.channelCount == 1 && "OMX.MTK.AUDIO.DECODER.MP3".equals(name); } }
MediaCodecRenderer: apply rendering limit on feed PiperOrigin-RevId: 297873726
library/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java
MediaCodecRenderer: apply rendering limit on feed
<ide><path>ibrary/core/src/main/java/com/google/android/exoplayer2/mediacodec/MediaCodecRenderer.java <ide> // We have a format. <ide> maybeInitCodec(); <ide> if (codec != null) { <del> long drainStartTimeMs = SystemClock.elapsedRealtime(); <add> long renderStartTimeMs = SystemClock.elapsedRealtime(); <ide> TraceUtil.beginSection("drainAndFeed"); <del> while (drainOutputBuffer(positionUs, elapsedRealtimeUs)) {} <del> while (feedInputBuffer() && shouldContinueFeeding(drainStartTimeMs)) {} <add> while (drainOutputBuffer(positionUs, elapsedRealtimeUs) <add> && shouldContinueRendering(renderStartTimeMs)) {} <add> while (feedInputBuffer() && shouldContinueRendering(renderStartTimeMs)) {} <ide> TraceUtil.endSection(); <ide> } else { <ide> decoderCounters.skippedInputBufferCount += skipSource(positionUs); <ide> onCodecInitialized(codecName, codecInitializedTimestamp, elapsed); <ide> } <ide> <del> private boolean shouldContinueFeeding(long drainStartTimeMs) { <add> private boolean shouldContinueRendering(long renderStartTimeMs) { <ide> return renderTimeLimitMs == C.TIME_UNSET <del> || SystemClock.elapsedRealtime() - drainStartTimeMs < renderTimeLimitMs; <add> || SystemClock.elapsedRealtime() - renderStartTimeMs < renderTimeLimitMs; <ide> } <ide> <ide> private void getCodecBuffers(MediaCodec codec) {
Java
apache-2.0
e397812b11a1229189333d74283624aa0ac0df99
0
sematext/solr-redis
package com.sematext.solr.redis; import com.sematext.lucene.query.TaggedQuery; import com.sematext.solr.redis.command.Command; import com.sematext.solr.redis.command.Eval; import com.sematext.solr.redis.command.EvalSha; import com.sematext.solr.redis.command.Get; import com.sematext.solr.redis.command.HGet; import com.sematext.solr.redis.command.HKeys; import com.sematext.solr.redis.command.HMGet; import com.sematext.solr.redis.command.HVals; import com.sematext.solr.redis.command.Keys; import com.sematext.solr.redis.command.LIndex; import com.sematext.solr.redis.command.LRange; import com.sematext.solr.redis.command.MGet; import com.sematext.solr.redis.command.SDiff; import com.sematext.solr.redis.command.SInter; import com.sematext.solr.redis.command.SMembers; import com.sematext.solr.redis.command.SRandMember; import com.sematext.solr.redis.command.SUnion; import com.sematext.solr.redis.command.Sort; import com.sematext.solr.redis.command.ValueFilter; import com.sematext.solr.redis.command.ZRange; import com.sematext.solr.redis.command.ZRangeByScore; import com.sematext.solr.redis.command.ZRevRange; import com.sematext.solr.redis.command.ZRevrangeByScore; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.QParser; import org.apache.solr.search.QueryParsing; import org.apache.solr.search.SyntaxError; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.ArrayList; import java.util.List; import org.apache.lucene.search.BoostQuery; /** * RedisQParser is responsible for preparing a query based on data fetched from Redis. * * @author prog * @author lstrojny */ final class RedisQParser extends QParser { /** * Logger */ private static final Logger log = LoggerFactory.getLogger(RedisQParser.class); /** * Collection of commands */ private static final Map<String, Command<?>> commands; static { commands = new HashMap<>(); commands.put("SDIFF", new SDiff()); commands.put("SINTER", new SInter()); commands.put("SMEMBERS", new SMembers()); commands.put("SRANDMEMBER", new SRandMember()); commands.put("SUNION", new SUnion()); commands.put("ZRANGE", new ZRange()); commands.put("ZREVRANGE", new ZRevRange()); commands.put("ZRANGEBYSCORE", new ZRangeByScore()); commands.put("ZREVRANGEBYSCORE", new ZRevrangeByScore()); commands.put("HGET", new HGet()); commands.put("HKEYS", new HKeys()); commands.put("HMGET", new HMGet()); commands.put("HVALS", new HVals()); commands.put("LRANGE", new LRange()); commands.put("LINDEX", new LIndex()); commands.put("GET", new Get(new ValueFilter())); commands.put("MGET", new MGet()); commands.put("KEYS", new Keys()); commands.put("SORT", new Sort()); commands.put("EVAL", new Eval()); commands.put("EVALSHA", new EvalSha()); } /** * Jedis command handler */ private final CommandHandler commandHandler; /** * Operator used to build query. */ private BooleanClause.Occur operator = BooleanClause.Occur.SHOULD; /** * Redis command name to use. */ private final String redisCommand; /** * Query tag name - virtual field name. Used for highlighting. */ private final String queryTag; /** * Parameters which determines if this QParser should analyze data from Redis. */ private final boolean useQueryTimeAnalyzer; /** * * @param qstr Query string * @param localParams Local parameters for this query parser * @param params Parameters * @param req Request object * @param commandHandler Redis command handler */ RedisQParser(final String qstr, final SolrParams localParams, final SolrParams params, final SolrQueryRequest req, final CommandHandler commandHandler) { super(qstr, localParams, params, req); this.commandHandler = commandHandler; redisCommand = localParams.get("command") == null ? null : localParams.get("command").toUpperCase(); final String operatorString = localParams.get("operator"); queryTag = localParams.get("tag"); if (redisCommand == null) { log.error("No command argument passed to RedisQParser."); throw new IllegalArgumentException("No command argument passed to RedisQParser."); } else if (!commands.containsKey(redisCommand)) { log.error("Wrong Redis command: {}", redisCommand); throw new IllegalArgumentException(String.format("Wrong Redis command '%s'.", redisCommand)); } operator = "AND".equalsIgnoreCase(operatorString) ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD; useQueryTimeAnalyzer = localParams.getBool("useAnalyzer", false); } @Override public Query parse() throws SyntaxError { final String fieldName = localParams.get(QueryParsing.V); final BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); final List<BytesRef> queryTerms = new ArrayList<>(); booleanQueryBuilder.setDisableCoord(true); int booleanClausesTotal = 0; final Map<String, Float> results = commandHandler.executeCommand(commands.get(redisCommand), localParams); if (results != null) { log.debug("Preparing a query for {} redis objects for field: {}", results.size(), fieldName); for (final Map.Entry<String, Float> entry : results.entrySet()) { try { final String termString = entry.getKey(); if (termString == null) { continue; } final Float score = entry.getValue(); if (useQueryTimeAnalyzer) { log.trace("Term string {}", termString); try (final TokenStream tokenStream = req.getSchema().getQueryAnalyzer().tokenStream(fieldName, termString)) { final CharTermAttribute charAttribute = tokenStream.addAttribute(CharTermAttribute.class); tokenStream.reset(); int counter = 0; while (tokenStream.incrementToken()) { log.trace("Taking {} token {} with score {} from query string from {} for field: {}", ++counter, charAttribute, score, termString, fieldName); if (this.operator == BooleanClause.Occur.MUST) { addTermToQuery(booleanQueryBuilder, fieldName, new BytesRef(charAttribute), score); } else { queryTerms.add(new BytesRef(charAttribute)); } ++booleanClausesTotal; } tokenStream.end(); } } else { if (this.operator == BooleanClause.Occur.MUST) { addTermToQuery(booleanQueryBuilder, fieldName, new BytesRef(termString), score); } else { queryTerms.add(new BytesRef(termString)); } ++booleanClausesTotal; } } catch (final IOException ex) { log.error("Error occurred during processing token stream.", ex); } } } log.debug("Prepared a query for field {} with {} boolean clauses. (request params: {}}", fieldName, booleanClausesTotal, req.getParamString()); if (queryTag == null || queryTag.isEmpty()) { if (this.operator == BooleanClause.Occur.MUST) { return booleanQueryBuilder.build(); } else { return new TermsQuery(fieldName, queryTerms); } } else { if (this.operator == BooleanClause.Occur.MUST) { return new TaggedQuery(booleanQueryBuilder.build(), queryTag); } else { return new TaggedQuery(new TermsQuery(fieldName, queryTerms), queryTag); } } } /** * Adds clause to query. * * @param queryBuilder Boolean query builder object which should take new clauses. * @param fieldName Field name used in added clause. * @param term Term * @param score Optional score */ private void addTermToQuery(final BooleanQuery.Builder queryBuilder, final String fieldName, final BytesRef term, final Float score) { Query termQuery = new TermQuery(new Term(fieldName, term)); if (!score.isNaN()) { termQuery = new BoostQuery(termQuery, score); } queryBuilder.add(termQuery, this.operator); } }
src/main/java/com/sematext/solr/redis/RedisQParser.java
package com.sematext.solr.redis; import com.sematext.lucene.query.TaggedQuery; import com.sematext.solr.redis.command.Command; import com.sematext.solr.redis.command.Eval; import com.sematext.solr.redis.command.EvalSha; import com.sematext.solr.redis.command.Get; import com.sematext.solr.redis.command.HGet; import com.sematext.solr.redis.command.HKeys; import com.sematext.solr.redis.command.HMGet; import com.sematext.solr.redis.command.HVals; import com.sematext.solr.redis.command.Keys; import com.sematext.solr.redis.command.LIndex; import com.sematext.solr.redis.command.LRange; import com.sematext.solr.redis.command.MGet; import com.sematext.solr.redis.command.SDiff; import com.sematext.solr.redis.command.SInter; import com.sematext.solr.redis.command.SMembers; import com.sematext.solr.redis.command.SRandMember; import com.sematext.solr.redis.command.SUnion; import com.sematext.solr.redis.command.Sort; import com.sematext.solr.redis.command.ValueFilter; import com.sematext.solr.redis.command.ZRange; import com.sematext.solr.redis.command.ZRangeByScore; import com.sematext.solr.redis.command.ZRevRange; import com.sematext.solr.redis.command.ZRevrangeByScore; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.QParser; import org.apache.solr.search.QueryParsing; import org.apache.solr.search.SyntaxError; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.ArrayList; import java.util.List; import org.apache.lucene.search.BoostQuery; /** * RedisQParser is responsible for preparing a query based on data fetched from Redis. * * @author prog * @author lstrojny */ final class RedisQParser extends QParser { /** * Logger */ private static final Logger log = LoggerFactory.getLogger(RedisQParser.class); /** * Collection of commands */ private static final Map<String, Command<?>> commands; static { commands = new HashMap<>(); commands.put("SDIFF", new SDiff()); commands.put("SINTER", new SInter()); commands.put("SMEMBERS", new SMembers()); commands.put("SRANDMEMBER", new SRandMember()); commands.put("SUNION", new SUnion()); commands.put("ZRANGE", new ZRange()); commands.put("ZREVRANGE", new ZRevRange()); commands.put("ZRANGEBYSCORE", new ZRangeByScore()); commands.put("ZREVRANGEBYSCORE", new ZRevrangeByScore()); commands.put("HGET", new HGet()); commands.put("HKEYS", new HKeys()); commands.put("HMGET", new HMGet()); commands.put("HVALS", new HVals()); commands.put("LRANGE", new LRange()); commands.put("LINDEX", new LIndex()); commands.put("GET", new Get(new ValueFilter())); commands.put("MGET", new MGet()); commands.put("KEYS", new Keys()); commands.put("SORT", new Sort()); commands.put("EVAL", new Eval()); commands.put("EVALSHA", new EvalSha()); } /** * Jedis command handler */ private final CommandHandler commandHandler; /** * Operator used to build query. */ private BooleanClause.Occur operator = BooleanClause.Occur.SHOULD; /** * Redis command name to use. */ private final String redisCommand; /** * Query tag name - virtual field name. Used for highlighting. */ private final String queryTag; /** * Parameters which determines if this QParser should analyze data from Redis. */ private final boolean useQueryTimeAnalyzer; /** * * @param qstr Query string * @param localParams Local parameters for this query parser * @param params Parameters * @param req Request object * @param commandHandler Redis command handler */ RedisQParser(final String qstr, final SolrParams localParams, final SolrParams params, final SolrQueryRequest req, final CommandHandler commandHandler) { super(qstr, localParams, params, req); this.commandHandler = commandHandler; redisCommand = localParams.get("command") == null ? null : localParams.get("command").toUpperCase(); final String operatorString = localParams.get("operator"); queryTag = localParams.get("tag"); if (redisCommand == null) { log.error("No command argument passed to RedisQParser."); throw new IllegalArgumentException("No command argument passed to RedisQParser."); } else if (!commands.containsKey(redisCommand)) { log.error("Wrong Redis command: {}", redisCommand); throw new IllegalArgumentException(String.format("Wrong Redis command '%s'.", redisCommand)); } operator = "AND".equalsIgnoreCase(operatorString) ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD; useQueryTimeAnalyzer = localParams.getBool("useAnalyzer", false); } @Override public Query parse() throws SyntaxError { final String fieldName = localParams.get(QueryParsing.V); final BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); final List<BytesRef> queryTerms = new ArrayList<>(); booleanQueryBuilder.setDisableCoord(true); int booleanClausesTotal = 0; final Map<String, Float> results = commandHandler.executeCommand(commands.get(redisCommand), localParams); if (results != null) { log.debug("Preparing a query for {} redis objects for field: {}", results.size(), fieldName); for (final Map.Entry<String, Float> entry : results.entrySet()) { try { final String termString = entry.getKey(); if (termString == null) { continue; } final Float score = entry.getValue(); if (useQueryTimeAnalyzer) { log.trace("Term string {}", termString); try (final TokenStream tokenStream = req.getSchema().getQueryAnalyzer().tokenStream(fieldName, termString)) { final CharTermAttribute charAttribute = tokenStream.addAttribute(CharTermAttribute.class); tokenStream.reset(); int counter = 0; while (tokenStream.incrementToken()) { log.trace("Taking {} token {} with score {} from query string from {} for field: {}", ++counter, charAttribute, score, termString, fieldName); if (this.operator == BooleanClause.Occur.MUST) { addTermToQuery(booleanQueryBuilder, fieldName, new BytesRef(charAttribute), score); } else { queryTerms.add(new BytesRef(charAttribute)); } ++booleanClausesTotal; } tokenStream.end(); } } else { if (this.operator == BooleanClause.Occur.MUST) { addTermToQuery(booleanQueryBuilder, fieldName, new BytesRef(termString), score); } else { queryTerms.add(new BytesRef(termString)); } ++booleanClausesTotal; } } catch (final IOException ex) { log.error("Error occurred during processing token stream.", ex); } } } log.debug("Prepared a query for field {} with {} boolean clauses. (request params: {}}", fieldName, booleanClausesTotal, req.getParamString()); if (queryTag == null || queryTag.isEmpty()) { if(this.operator == BooleanClause.Occur.MUST){ return booleanQueryBuilder.build(); }else{ return new TermsQuery(fieldName, queryTerms); } } else { if(this.operator == BooleanClause.Occur.MUST){ return new TaggedQuery(booleanQueryBuilder.build(), queryTag); }else{ return new TaggedQuery(new TermsQuery(fieldName, queryTerms), queryTag); } } } /** * Adds clause to query. * * @param queryBuilder Boolean query builder object which should take new clauses. * @param fieldName Field name used in added clause. * @param term Term * @param score Optional score */ private void addTermToQuery(final BooleanQuery.Builder queryBuilder, final String fieldName, final BytesRef term, final Float score) { Query termQuery = new TermQuery(new Term(fieldName, term)); if (!score.isNaN()) { termQuery = new BoostQuery(termQuery, score); } queryBuilder.add(termQuery, this.operator); } }
Fixed styling issues introduced in previous commit
src/main/java/com/sematext/solr/redis/RedisQParser.java
Fixed styling issues introduced in previous commit
<ide><path>rc/main/java/com/sematext/solr/redis/RedisQParser.java <ide> booleanClausesTotal, req.getParamString()); <ide> <ide> if (queryTag == null || queryTag.isEmpty()) { <del> if(this.operator == BooleanClause.Occur.MUST){ <add> if (this.operator == BooleanClause.Occur.MUST) { <ide> return booleanQueryBuilder.build(); <del> }else{ <add> } else { <ide> return new TermsQuery(fieldName, queryTerms); <ide> } <del> <ide> } else { <del> if(this.operator == BooleanClause.Occur.MUST){ <add> if (this.operator == BooleanClause.Occur.MUST) { <ide> return new TaggedQuery(booleanQueryBuilder.build(), queryTag); <del> }else{ <add> } else { <ide> return new TaggedQuery(new TermsQuery(fieldName, queryTerms), queryTag); <ide> } <ide> }
Java
apache-2.0
6d7582591cd75b27d6ebf3ef5471b98acc9ae5b5
0
ButterflyNetwork/bazel,akira-baruah/bazel,perezd/bazel,meteorcloudy/bazel,dslomov/bazel,ulfjack/bazel,twitter-forks/bazel,werkt/bazel,cushon/bazel,twitter-forks/bazel,bazelbuild/bazel,davidzchen/bazel,safarmer/bazel,perezd/bazel,dslomov/bazel,katre/bazel,ButterflyNetwork/bazel,twitter-forks/bazel,akira-baruah/bazel,ulfjack/bazel,safarmer/bazel,meteorcloudy/bazel,werkt/bazel,davidzchen/bazel,dslomov/bazel-windows,perezd/bazel,katre/bazel,davidzchen/bazel,ulfjack/bazel,katre/bazel,dslomov/bazel,ButterflyNetwork/bazel,ulfjack/bazel,meteorcloudy/bazel,perezd/bazel,dslomov/bazel-windows,katre/bazel,bazelbuild/bazel,bazelbuild/bazel,dslomov/bazel-windows,meteorcloudy/bazel,werkt/bazel,safarmer/bazel,safarmer/bazel,dslomov/bazel,ulfjack/bazel,cushon/bazel,ulfjack/bazel,akira-baruah/bazel,dslomov/bazel-windows,dslomov/bazel-windows,twitter-forks/bazel,akira-baruah/bazel,dslomov/bazel,safarmer/bazel,twitter-forks/bazel,werkt/bazel,meteorcloudy/bazel,katre/bazel,davidzchen/bazel,perezd/bazel,dslomov/bazel,davidzchen/bazel,cushon/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,meteorcloudy/bazel,perezd/bazel,twitter-forks/bazel,davidzchen/bazel,davidzchen/bazel,cushon/bazel,katre/bazel,bazelbuild/bazel,perezd/bazel,ulfjack/bazel,safarmer/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,cushon/bazel,dslomov/bazel,werkt/bazel,twitter-forks/bazel,dslomov/bazel-windows,werkt/bazel,meteorcloudy/bazel,cushon/bazel,bazelbuild/bazel
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.apple; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.DefaultLabelConverter; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.LabelConverter; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.rules.apple.AppleConfiguration.ConfigurationDistinguisher; import com.google.devtools.build.lib.rules.apple.ApplePlatform.PlatformType; import com.google.devtools.build.lib.skyframe.serialization.DeserializationContext; import com.google.devtools.build.lib.skyframe.serialization.SerializationContext; import com.google.devtools.build.lib.skyframe.serialization.SerializationException; import com.google.devtools.build.lib.skylarkbuildapi.apple.AppleBitcodeModeApi; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter; import com.google.devtools.common.options.EnumConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.util.List; /** Command-line options for building for Apple platforms. */ public class AppleCommandLineOptions extends FragmentOptions { @Option( name = "experimental_apple_mandatory_minimum_version", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.BUILD_FILE_SEMANTICS}, help = "Whether Apple rules must have a mandatory minimum_os_version attribute." ) // TODO(b/37096178): This flag should be default-on and then be removed. public boolean mandatoryMinimumVersion; @Option( name = "experimental_objc_provider_from_linked", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.BUILD_FILE_SEMANTICS}, help = "Whether Apple rules which control linking should propagate objc provider at the top " + "level" ) // TODO(b/32411441): This flag should be default-off and then be removed. public boolean objcProviderFromLinked; @Option( name = "xcode_version", defaultValue = "null", documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "If specified, uses Xcode of the given version for relevant build actions. " + "If unspecified, uses the executor default version of Xcode." ) public String xcodeVersion; @Option( name = "ios_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the iOS SDK to use to build iOS applications. " + "If unspecified, uses default iOS SDK version from 'xcode_version'.") public DottedVersion.Option iosSdkVersion; @Option( name = "watchos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the watchOS SDK to use to build watchOS applications. " + "If unspecified, uses default watchOS SDK version from 'xcode_version'.") public DottedVersion.Option watchOsSdkVersion; @Option( name = "tvos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the tvOS SDK to use to build tvOS applications. " + "If unspecified, uses default tvOS SDK version from 'xcode_version'.") public DottedVersion.Option tvOsSdkVersion; @Option( name = "macos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the macOS SDK to use to build macOS applications. " + "If unspecified, uses default macOS SDK version from 'xcode_version'.") public DottedVersion.Option macOsSdkVersion; @Option( name = "ios_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible iOS version for target simulators and devices. " + "If unspecified, uses 'ios_sdk_version'.") public DottedVersion.Option iosMinimumOs; @Option( name = "watchos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible watchOS version for target simulators and devices. " + "If unspecified, uses 'watchos_sdk_version'.") public DottedVersion.Option watchosMinimumOs; @Option( name = "tvos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible tvOS version for target simulators and devices. " + "If unspecified, uses 'tvos_sdk_version'.") public DottedVersion.Option tvosMinimumOs; @Option( name = "macos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible macOS version for targets. " + "If unspecified, uses 'macos_sdk_version'.") public DottedVersion.Option macosMinimumOs; @VisibleForTesting public static final String DEFAULT_IOS_SDK_VERSION = "8.4"; @VisibleForTesting public static final String DEFAULT_WATCHOS_SDK_VERSION = "2.0"; @VisibleForTesting public static final String DEFAULT_MACOS_SDK_VERSION = "10.10"; @VisibleForTesting public static final String DEFAULT_TVOS_SDK_VERSION = "9.0"; @VisibleForTesting static final String DEFAULT_IOS_CPU = "x86_64"; /** The default watchos CPU value. */ public static final String DEFAULT_WATCHOS_CPU = "i386"; /** The default tvOS CPU value. */ public static final String DEFAULT_TVOS_CPU = "x86_64"; /** The default macOS CPU value. */ public static final String DEFAULT_MACOS_CPU = "x86_64"; @Option( name = "ios_cpu", defaultValue = DEFAULT_IOS_CPU, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Specifies to target CPU of iOS compilation." ) public String iosCpu; @Option( name = "apple_compiler", defaultValue = "null", documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = { OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.LOSES_INCREMENTAL_STATE, }, help = "The Apple target compiler. Useful for selecting variants of a toolchain " + "(e.g. xcode-beta)." ) public String cppCompiler; @Option( name = "apple_grte_top", defaultValue = "null", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = { OptionEffectTag.CHANGES_INPUTS, OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.LOSES_INCREMENTAL_STATE, }, help = "The Apple target grte_top." ) public Label appleLibcTop; @Option( name = "apple_crosstool_top", defaultValue = "@bazel_tools//tools/cpp:toolchain", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.CHANGES_INPUTS}, help = "The label of the crosstool package to be used in Apple and Objc rules and their" + " dependencies." ) public Label appleCrosstoolTop; @Option( name = "apple_platform_type", defaultValue = "MACOS", converter = PlatformTypeConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, help = "Don't set this value from the command line - it is derived from other flags and " + "configuration transitions derived from rule attributes") public PlatformType applePlatformType; @Option( name = "apple_split_cpu", defaultValue = "", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, help = "Don't set this value from the command line - it is derived from other flags and " + "configuration transitions derived from rule attributes" ) public String appleSplitCpu; // This option exists because two configurations are not allowed to have the same cache key // (partially derived from options). Since we have multiple transitions that may result in the // same configuration values at runtime we need an artificial way to distinguish between them. // This option must only be set by those transitions for this purpose. // TODO(bazel-team): Remove this once we have dynamic configurations but make sure that different // configurations (e.g. by min os version) always use different output paths. @Option( name = "apple configuration distinguisher", defaultValue = "UNKNOWN", converter = ConfigurationDistinguisherConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, metadataTags = {OptionMetadataTag.INTERNAL} ) public ConfigurationDistinguisher configurationDistinguisher; @Option( name = "ios_multi_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures to build an ios_application with. The result " + "is a universal binary containing all specified architectures.") public List<String> iosMultiCpus; @Option( name = "watchos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple watchOS binaries.") public List<String> watchosCpus; @Option( name = "tvos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple tvOS binaries.") public List<String> tvosCpus; @Option( name = "macos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple macOS binaries.") public List<String> macosCpus; @Option( name = "default_ios_provisioning_profile", defaultValue = "", documentationCategory = OptionDocumentationCategory.SIGNING, effectTags = {OptionEffectTag.CHANGES_INPUTS}, converter = DefaultProvisioningProfileConverter.class ) public Label defaultProvisioningProfile; @Option( name = "xcode_version_config", defaultValue = "@local_config_xcode//:host_xcodes", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "The label of the xcode_config rule to be used for selecting the Xcode version " + "in the build configuration." ) public Label xcodeVersionConfig; /** * The default label of the build-wide {@code xcode_config} configuration rule. This can be * changed from the default using the {@code xcode_version_config} build flag. */ // TODO(cparsons): Update all callers to reference the actual xcode_version_config flag value. @VisibleForTesting public static final String DEFAULT_XCODE_VERSION_CONFIG_LABEL = "//tools/objc:host_xcodes"; /** Converter for --default_ios_provisioning_profile. */ public static class DefaultProvisioningProfileConverter extends DefaultLabelConverter { public DefaultProvisioningProfileConverter() { super("//tools/objc:default_provisioning_profile"); } } @Option( name = "apple_bitcode", converter = AppleBitcodeMode.Converter.class, // TODO(blaze-team): Default to embedded_markers when fully implemented. defaultValue = "none", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specify the Apple bitcode mode for compile steps. " + "Values: 'none', 'embedded_markers', 'embedded'." ) public AppleBitcodeMode appleBitcodeMode; /** Returns whether the minimum OS version is explicitly set for the current platform. */ public DottedVersion getMinimumOsVersion() { DottedVersion.Option option; switch (applePlatformType) { case IOS: option = iosMinimumOs; break; case MACOS: option = macosMinimumOs; break; case TVOS: option = tvosMinimumOs; break; case WATCHOS: option = watchosMinimumOs; break; default: throw new IllegalStateException(); } return DottedVersion.maybeUnwrap(option); } /** * Represents the Apple Bitcode mode for compilation steps. * * <p>Bitcode is an intermediate representation of a compiled program. For many platforms, Apple * requires app submissions to contain bitcode in order to be uploaded to the app store. * * <p>This is a build-wide value, as bitcode mode needs to be consistent among a target and its * compiled dependencies. */ @Immutable public enum AppleBitcodeMode implements AppleBitcodeModeApi { /** Do not compile bitcode. */ NONE("none", ImmutableList.<String>of()), /** * Compile the minimal set of bitcode markers. This is often the best option for developer/debug * builds. */ EMBEDDED_MARKERS("embedded_markers", ImmutableList.of("bitcode_embedded_markers")), /** Fully embed bitcode in compiled files. This is often the best option for release builds. */ EMBEDDED("embedded", ImmutableList.of("bitcode_embedded")); private final String mode; private final ImmutableList<String> featureNames; private AppleBitcodeMode(String mode, ImmutableList<String> featureNames) { this.mode = mode; this.featureNames = featureNames; } @Override public String toString() { return mode; } @Override public void repr(SkylarkPrinter printer) { printer.append(mode); } /** Returns the names of any crosstool features that correspond to this bitcode mode. */ public ImmutableList<String> getFeatureNames() { return featureNames; } /** Converts to {@link AppleBitcodeMode}. */ public static class Converter extends EnumConverter<AppleBitcodeMode> { public Converter() { super(AppleBitcodeMode.class, "apple bitcode mode"); } } } @Override public FragmentOptions getHost() { AppleCommandLineOptions host = (AppleCommandLineOptions) super.getHost(); // Set options needed in the host configuration. host.xcodeVersionConfig = xcodeVersionConfig; host.xcodeVersion = xcodeVersion; host.iosSdkVersion = iosSdkVersion; host.watchOsSdkVersion = watchOsSdkVersion; host.tvOsSdkVersion = tvOsSdkVersion; host.macOsSdkVersion = macOsSdkVersion; host.appleBitcodeMode = appleBitcodeMode; // The host apple platform type will always be MACOS, as no other apple platform type can // currently execute build actions. If that were the case, a host_apple_platform_type flag might // be needed. host.applePlatformType = PlatformType.MACOS; host.configurationDistinguisher = ConfigurationDistinguisher.UNKNOWN; return host; } void serialize(SerializationContext context, CodedOutputStream out) throws IOException, SerializationException { context.serialize(this, out); } static AppleCommandLineOptions deserialize(DeserializationContext context, CodedInputStream in) throws IOException, SerializationException { return context.deserialize(in); } /** Converter for the Apple configuration distinguisher. */ public static final class ConfigurationDistinguisherConverter extends EnumConverter<ConfigurationDistinguisher> { public ConfigurationDistinguisherConverter() { super(ConfigurationDistinguisher.class, "Apple rule configuration distinguisher"); } } /** Flag converter for {@link PlatformType}. */ public static final class PlatformTypeConverter extends EnumConverter<PlatformType> { public PlatformTypeConverter() { super(PlatformType.class, "Apple platform type"); } } }
src/main/java/com/google/devtools/build/lib/rules/apple/AppleCommandLineOptions.java
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.apple; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.DefaultLabelConverter; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.LabelConverter; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.rules.apple.AppleConfiguration.ConfigurationDistinguisher; import com.google.devtools.build.lib.rules.apple.ApplePlatform.PlatformType; import com.google.devtools.build.lib.skyframe.serialization.DeserializationContext; import com.google.devtools.build.lib.skyframe.serialization.SerializationContext; import com.google.devtools.build.lib.skyframe.serialization.SerializationException; import com.google.devtools.build.lib.skylarkbuildapi.apple.AppleBitcodeModeApi; import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter; import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter; import com.google.devtools.common.options.EnumConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.util.List; /** Command-line options for building for Apple platforms. */ public class AppleCommandLineOptions extends FragmentOptions { @Option( name = "experimental_apple_mandatory_minimum_version", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.BUILD_FILE_SEMANTICS}, help = "Whether Apple rules must have a mandatory minimum_os_version attribute." ) // TODO(b/37096178): This flag should be default-on and then be removed. public boolean mandatoryMinimumVersion; @Option( name = "experimental_objc_provider_from_linked", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.BUILD_FILE_SEMANTICS}, help = "Whether Apple rules which control linking should propagate objc provider at the top " + "level" ) // TODO(b/32411441): This flag should be default-off and then be removed. public boolean objcProviderFromLinked; @Option( name = "xcode_version", defaultValue = "null", documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "If specified, uses Xcode of the given version for relevant build actions. " + "If unspecified, uses the executor default version of Xcode." ) public String xcodeVersion; @Option( name = "ios_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the iOS SDK to use to build iOS applications." ) public DottedVersion.Option iosSdkVersion; @Option( name = "watchos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the watchOS SDK to use to build watchOS applications." ) public DottedVersion.Option watchOsSdkVersion; @Option( name = "tvos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the tvOS SDK to use to build tvOS applications." ) public DottedVersion.Option tvOsSdkVersion; @Option( name = "macos_sdk_version", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specifies the version of the macOS SDK to use to build macOS applications." ) public DottedVersion.Option macOsSdkVersion; @Option( name = "ios_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible iOS version for target simulators and devices." ) public DottedVersion.Option iosMinimumOs; @Option( name = "watchos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible watchOS version for target simulators and devices." ) public DottedVersion.Option watchosMinimumOs; @Option( name = "tvos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible tvOS version for target simulators and devices." ) public DottedVersion.Option tvosMinimumOs; @Option( name = "macos_minimum_os", defaultValue = "null", converter = DottedVersionConverter.class, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Minimum compatible macOS version for targets." ) public DottedVersion.Option macosMinimumOs; @VisibleForTesting public static final String DEFAULT_IOS_SDK_VERSION = "8.4"; @VisibleForTesting public static final String DEFAULT_WATCHOS_SDK_VERSION = "2.0"; @VisibleForTesting public static final String DEFAULT_MACOS_SDK_VERSION = "10.10"; @VisibleForTesting public static final String DEFAULT_TVOS_SDK_VERSION = "9.0"; @VisibleForTesting static final String DEFAULT_IOS_CPU = "x86_64"; /** The default watchos CPU value. */ public static final String DEFAULT_WATCHOS_CPU = "i386"; /** The default tvOS CPU value. */ public static final String DEFAULT_TVOS_CPU = "x86_64"; /** The default macOS CPU value. */ public static final String DEFAULT_MACOS_CPU = "x86_64"; @Option( name = "ios_cpu", defaultValue = DEFAULT_IOS_CPU, documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Specifies to target CPU of iOS compilation." ) public String iosCpu; @Option( name = "apple_compiler", defaultValue = "null", documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = { OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.LOSES_INCREMENTAL_STATE, }, help = "The Apple target compiler. Useful for selecting variants of a toolchain " + "(e.g. xcode-beta)." ) public String cppCompiler; @Option( name = "apple_grte_top", defaultValue = "null", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = { OptionEffectTag.CHANGES_INPUTS, OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.LOSES_INCREMENTAL_STATE, }, help = "The Apple target grte_top." ) public Label appleLibcTop; @Option( name = "apple_crosstool_top", defaultValue = "@bazel_tools//tools/cpp:toolchain", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.CHANGES_INPUTS}, help = "The label of the crosstool package to be used in Apple and Objc rules and their" + " dependencies." ) public Label appleCrosstoolTop; @Option( name = "apple_platform_type", defaultValue = "MACOS", converter = PlatformTypeConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, help = "Don't set this value from the command line - it is derived from other flags and " + "configuration transitions derived from rule attributes") public PlatformType applePlatformType; @Option( name = "apple_split_cpu", defaultValue = "", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, help = "Don't set this value from the command line - it is derived from other flags and " + "configuration transitions derived from rule attributes" ) public String appleSplitCpu; // This option exists because two configurations are not allowed to have the same cache key // (partially derived from options). Since we have multiple transitions that may result in the // same configuration values at runtime we need an artificial way to distinguish between them. // This option must only be set by those transitions for this purpose. // TODO(bazel-team): Remove this once we have dynamic configurations but make sure that different // configurations (e.g. by min os version) always use different output paths. @Option( name = "apple configuration distinguisher", defaultValue = "UNKNOWN", converter = ConfigurationDistinguisherConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BAZEL_INTERNAL_CONFIGURATION}, metadataTags = {OptionMetadataTag.INTERNAL} ) public ConfigurationDistinguisher configurationDistinguisher; @Option( name = "ios_multi_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures to build an ios_application with. The result " + "is a universal binary containing all specified architectures.") public List<String> iosMultiCpus; @Option( name = "watchos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple watchOS binaries.") public List<String> watchosCpus; @Option( name = "tvos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple tvOS binaries.") public List<String> tvosCpus; @Option( name = "macos_cpus", allowMultiple = true, converter = CommaSeparatedOptionListConverter.class, defaultValue = "unused", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "Comma-separated list of architectures for which to build Apple macOS binaries.") public List<String> macosCpus; @Option( name = "default_ios_provisioning_profile", defaultValue = "", documentationCategory = OptionDocumentationCategory.SIGNING, effectTags = {OptionEffectTag.CHANGES_INPUTS}, converter = DefaultProvisioningProfileConverter.class ) public Label defaultProvisioningProfile; @Option( name = "xcode_version_config", defaultValue = "@local_config_xcode//:host_xcodes", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.TOOLCHAIN, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE, OptionEffectTag.LOADING_AND_ANALYSIS}, help = "The label of the xcode_config rule to be used for selecting the Xcode version " + "in the build configuration." ) public Label xcodeVersionConfig; /** * The default label of the build-wide {@code xcode_config} configuration rule. This can be * changed from the default using the {@code xcode_version_config} build flag. */ // TODO(cparsons): Update all callers to reference the actual xcode_version_config flag value. @VisibleForTesting public static final String DEFAULT_XCODE_VERSION_CONFIG_LABEL = "//tools/objc:host_xcodes"; /** Converter for --default_ios_provisioning_profile. */ public static class DefaultProvisioningProfileConverter extends DefaultLabelConverter { public DefaultProvisioningProfileConverter() { super("//tools/objc:default_provisioning_profile"); } } @Option( name = "apple_bitcode", converter = AppleBitcodeMode.Converter.class, // TODO(blaze-team): Default to embedded_markers when fully implemented. defaultValue = "none", documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, help = "Specify the Apple bitcode mode for compile steps. " + "Values: 'none', 'embedded_markers', 'embedded'." ) public AppleBitcodeMode appleBitcodeMode; /** Returns whether the minimum OS version is explicitly set for the current platform. */ public DottedVersion getMinimumOsVersion() { DottedVersion.Option option; switch (applePlatformType) { case IOS: option = iosMinimumOs; break; case MACOS: option = macosMinimumOs; break; case TVOS: option = tvosMinimumOs; break; case WATCHOS: option = watchosMinimumOs; break; default: throw new IllegalStateException(); } return DottedVersion.maybeUnwrap(option); } /** * Represents the Apple Bitcode mode for compilation steps. * * <p>Bitcode is an intermediate representation of a compiled program. For many platforms, Apple * requires app submissions to contain bitcode in order to be uploaded to the app store. * * <p>This is a build-wide value, as bitcode mode needs to be consistent among a target and its * compiled dependencies. */ @Immutable public enum AppleBitcodeMode implements AppleBitcodeModeApi { /** Do not compile bitcode. */ NONE("none", ImmutableList.<String>of()), /** * Compile the minimal set of bitcode markers. This is often the best option for developer/debug * builds. */ EMBEDDED_MARKERS("embedded_markers", ImmutableList.of("bitcode_embedded_markers")), /** Fully embed bitcode in compiled files. This is often the best option for release builds. */ EMBEDDED("embedded", ImmutableList.of("bitcode_embedded")); private final String mode; private final ImmutableList<String> featureNames; private AppleBitcodeMode(String mode, ImmutableList<String> featureNames) { this.mode = mode; this.featureNames = featureNames; } @Override public String toString() { return mode; } @Override public void repr(SkylarkPrinter printer) { printer.append(mode); } /** Returns the names of any crosstool features that correspond to this bitcode mode. */ public ImmutableList<String> getFeatureNames() { return featureNames; } /** Converts to {@link AppleBitcodeMode}. */ public static class Converter extends EnumConverter<AppleBitcodeMode> { public Converter() { super(AppleBitcodeMode.class, "apple bitcode mode"); } } } @Override public FragmentOptions getHost() { AppleCommandLineOptions host = (AppleCommandLineOptions) super.getHost(); // Set options needed in the host configuration. host.xcodeVersionConfig = xcodeVersionConfig; host.xcodeVersion = xcodeVersion; host.iosSdkVersion = iosSdkVersion; host.watchOsSdkVersion = watchOsSdkVersion; host.tvOsSdkVersion = tvOsSdkVersion; host.macOsSdkVersion = macOsSdkVersion; host.appleBitcodeMode = appleBitcodeMode; // The host apple platform type will always be MACOS, as no other apple platform type can // currently execute build actions. If that were the case, a host_apple_platform_type flag might // be needed. host.applePlatformType = PlatformType.MACOS; host.configurationDistinguisher = ConfigurationDistinguisher.UNKNOWN; return host; } void serialize(SerializationContext context, CodedOutputStream out) throws IOException, SerializationException { context.serialize(this, out); } static AppleCommandLineOptions deserialize(DeserializationContext context, CodedInputStream in) throws IOException, SerializationException { return context.deserialize(in); } /** Converter for the Apple configuration distinguisher. */ public static final class ConfigurationDistinguisherConverter extends EnumConverter<ConfigurationDistinguisher> { public ConfigurationDistinguisherConverter() { super(ConfigurationDistinguisher.class, "Apple rule configuration distinguisher"); } } /** Flag converter for {@link PlatformType}. */ public static final class PlatformTypeConverter extends EnumConverter<PlatformType> { public PlatformTypeConverter() { super(PlatformType.class, "Apple platform type"); } } }
Improve documentation on {mac|watch|i|tv}os_minimum_os and {mac|watch|i|tv}os_sdk_version to explain how defaults are determined. PiperOrigin-RevId: 279984024
src/main/java/com/google/devtools/build/lib/rules/apple/AppleCommandLineOptions.java
Improve documentation on {mac|watch|i|tv}os_minimum_os and {mac|watch|i|tv}os_sdk_version to explain how defaults are determined.
<ide><path>rc/main/java/com/google/devtools/build/lib/rules/apple/AppleCommandLineOptions.java <ide> public String xcodeVersion; <ide> <ide> @Option( <del> name = "ios_sdk_version", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Specifies the version of the iOS SDK to use to build iOS applications." <del> ) <add> name = "ios_sdk_version", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Specifies the version of the iOS SDK to use to build iOS applications. " <add> + "If unspecified, uses default iOS SDK version from 'xcode_version'.") <ide> public DottedVersion.Option iosSdkVersion; <ide> <ide> @Option( <del> name = "watchos_sdk_version", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Specifies the version of the watchOS SDK to use to build watchOS applications." <del> ) <add> name = "watchos_sdk_version", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Specifies the version of the watchOS SDK to use to build watchOS applications. " <add> + "If unspecified, uses default watchOS SDK version from 'xcode_version'.") <ide> public DottedVersion.Option watchOsSdkVersion; <ide> <ide> @Option( <del> name = "tvos_sdk_version", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Specifies the version of the tvOS SDK to use to build tvOS applications." <del> ) <add> name = "tvos_sdk_version", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Specifies the version of the tvOS SDK to use to build tvOS applications. " <add> + "If unspecified, uses default tvOS SDK version from 'xcode_version'.") <ide> public DottedVersion.Option tvOsSdkVersion; <ide> <ide> @Option( <del> name = "macos_sdk_version", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Specifies the version of the macOS SDK to use to build macOS applications." <del> ) <add> name = "macos_sdk_version", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.TOOLCHAIN, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Specifies the version of the macOS SDK to use to build macOS applications. " <add> + "If unspecified, uses default macOS SDK version from 'xcode_version'.") <ide> public DottedVersion.Option macOsSdkVersion; <ide> <ide> @Option( <del> name = "ios_minimum_os", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Minimum compatible iOS version for target simulators and devices." <del> ) <add> name = "ios_minimum_os", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Minimum compatible iOS version for target simulators and devices. " <add> + "If unspecified, uses 'ios_sdk_version'.") <ide> public DottedVersion.Option iosMinimumOs; <ide> <ide> @Option( <del> name = "watchos_minimum_os", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Minimum compatible watchOS version for target simulators and devices." <del> ) <add> name = "watchos_minimum_os", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Minimum compatible watchOS version for target simulators and devices. " <add> + "If unspecified, uses 'watchos_sdk_version'.") <ide> public DottedVersion.Option watchosMinimumOs; <ide> <ide> @Option( <del> name = "tvos_minimum_os", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Minimum compatible tvOS version for target simulators and devices." <del> ) <add> name = "tvos_minimum_os", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Minimum compatible tvOS version for target simulators and devices. " <add> + "If unspecified, uses 'tvos_sdk_version'.") <ide> public DottedVersion.Option tvosMinimumOs; <ide> <ide> @Option( <del> name = "macos_minimum_os", <del> defaultValue = "null", <del> converter = DottedVersionConverter.class, <del> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <del> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <del> help = "Minimum compatible macOS version for targets." <del> ) <add> name = "macos_minimum_os", <add> defaultValue = "null", <add> converter = DottedVersionConverter.class, <add> documentationCategory = OptionDocumentationCategory.OUTPUT_PARAMETERS, <add> effectTags = {OptionEffectTag.LOSES_INCREMENTAL_STATE}, <add> help = <add> "Minimum compatible macOS version for targets. " <add> + "If unspecified, uses 'macos_sdk_version'.") <ide> public DottedVersion.Option macosMinimumOs; <ide> <ide> @VisibleForTesting public static final String DEFAULT_IOS_SDK_VERSION = "8.4";
Java
apache-2.0
ec7824f6de9cc2cacc5ba91b23f28c2cfa0dfe02
0
thirdiron/mendeley-android-sdk,Mendeley/mendeley-android-sdk
package com.mendeley.api.network; import android.os.AsyncTask; import com.mendeley.api.callbacks.RequestHandle; import com.mendeley.api.exceptions.MendeleyException; import com.mendeley.api.params.Page; import com.mendeley.api.util.Utils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import javax.net.ssl.HttpsURLConnection; public abstract class NetworkTask extends AsyncTask<String, Integer, MendeleyException> implements RequestHandle { Page next; String location; Date serverDate; InputStream is = null; OutputStream os = null; HttpsURLConnection con = null; protected abstract int getExpectedResponse(); /** * Extracts the headers from the given HttpsURLConnection object. */ protected void getResponseHeaders() throws IOException { Map<String, List<String>> headersMap = con.getHeaderFields(); if (headersMap == null) { // No headers implies an error, which should be handled based on the HTTP status code; // no need to throw another error here. return; } for (String key : headersMap.keySet()) { if (key != null) { switch (key) { case "Date": SimpleDateFormat simpledateformat = new SimpleDateFormat("EEE, dd MMM yyyy kk:mm:ss 'GMT'"); try { serverDate = simpledateformat.parse(headersMap.get(key).get(0)); } catch (ParseException e) { e.printStackTrace(); } break; case "Vary": case "Content-Type": case "X-Mendeley-Trace-Id": case "Connection": case "Content-Length": case "Content-Encoding": case "Mendeley-Count": // Unused break; case "Location": location = headersMap.get(key).get(0); case "Link": List<String> links = headersMap.get(key); String linkString = null; for (String link : links) { try { linkString = link.substring(link.indexOf("<")+1, link.indexOf(">")); } catch (IndexOutOfBoundsException e) {} if (link.indexOf("next") != -1) { next = new Page(linkString); } // "last" and "prev" links are not used } break; } } } } protected void closeConnection() { if (con != null) { con.disconnect(); } Utils.closeQuietly(is); Utils.closeQuietly(os); } @Override protected void onPostExecute(MendeleyException exception) { if (exception == null) { onSuccess(); } else { onFailure(exception); } } protected void onProgressUpdate(Integer[] progress) { super.onProgressUpdate(); } protected abstract void onSuccess(); protected abstract void onFailure(MendeleyException exception); public void cancel() { // If the request is cancelled, we simply cancel the AsyncTask. cancel(true); } }
src/com/mendeley/api/network/NetworkTask.java
package com.mendeley.api.network; import android.os.AsyncTask; import com.mendeley.api.callbacks.RequestHandle; import com.mendeley.api.exceptions.MendeleyException; import com.mendeley.api.params.Page; import com.mendeley.api.util.Utils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Map; import javax.net.ssl.HttpsURLConnection; public abstract class NetworkTask extends AsyncTask<String, Integer, MendeleyException> implements RequestHandle { Page next; String location; Date serverDate; InputStream is = null; OutputStream os = null; HttpsURLConnection con = null; protected abstract int getExpectedResponse(); /** * Extracts the headers from the given HttpsURLConnection object. */ protected void getResponseHeaders() throws IOException { Map<String, List<String>> headersMap = con.getHeaderFields(); for (String key : headersMap.keySet()) { if (key != null) { switch (key) { case "Date": SimpleDateFormat simpledateformat = new SimpleDateFormat("EEE, dd MMM yyyy kk:mm:ss 'GMT'"); try { serverDate = simpledateformat.parse(headersMap.get(key).get(0)); } catch (ParseException e) { e.printStackTrace(); } break; case "Vary": case "Content-Type": case "X-Mendeley-Trace-Id": case "Connection": case "Content-Length": case "Content-Encoding": case "Mendeley-Count": // Unused break; case "Location": location = headersMap.get(key).get(0); case "Link": List<String> links = headersMap.get(key); String linkString = null; for (String link : links) { try { linkString = link.substring(link.indexOf("<")+1, link.indexOf(">")); } catch (IndexOutOfBoundsException e) {} if (link.indexOf("next") != -1) { next = new Page(linkString); } // "last" and "prev" links are not used } break; } } } } protected void closeConnection() { if (con != null) { con.disconnect(); } Utils.closeQuietly(is); Utils.closeQuietly(os); } @Override protected void onPostExecute(MendeleyException exception) { if (exception == null) { onSuccess(); } else { onFailure(exception); } } protected void onProgressUpdate(Integer[] progress) { super.onProgressUpdate(); } protected abstract void onSuccess(); protected abstract void onFailure(MendeleyException exception); public void cancel() { // If the request is cancelled, we simply cancel the AsyncTask. cancel(true); } }
Fix Calabash issue #51, NPE crash in getResponseHeaders.
src/com/mendeley/api/network/NetworkTask.java
Fix Calabash issue #51, NPE crash in getResponseHeaders.
<ide><path>rc/com/mendeley/api/network/NetworkTask.java <ide> */ <ide> protected void getResponseHeaders() throws IOException { <ide> Map<String, List<String>> headersMap = con.getHeaderFields(); <add> if (headersMap == null) { <add> // No headers implies an error, which should be handled based on the HTTP status code; <add> // no need to throw another error here. <add> return; <add> } <ide> for (String key : headersMap.keySet()) { <ide> if (key != null) { <ide> switch (key) {
Java
mit
3d5f04cf91474464d0a20d2c62041bdc362ce355
0
josh314/aurora
package ninja.joshdavis; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.Future; import java.util.concurrent.ExecutionException; import java.util.concurrent.ConcurrentLinkedQueue; import com.ning.http.client.*; import com.ning.http.client.extra.*; public class Crawler { private AsyncHttpClient client; private ConcurrentLinkedQueue<String> queue; private HashSet<String> seen; private HashSet<Future<Response>> processing; private Scraper scraper; //Class containing the callbacks for processing responses and errors class ThrottledHandler extends AsyncCompletionHandler<Response>{ String url; public ThrottledHandler(String _url) { url = _url; } @Override public Response onCompleted(Response response) throws Exception { String html = response.getResponseBody(); Iterable<String> links = scraper.process(html, url); for(String link_url: links) { if(link_url != null) { enqueue_request(link_url); } } System.out.println("Request completed: " + url); return response; } @Override public void onThrowable(Throwable t){ System.out.println("Failed: " + url); System.out.println(t); } } public Crawler(Collection<String> urls, Scraper _scraper) { //Create the client AsyncHttpClientConfig.Builder b = new AsyncHttpClientConfig.Builder().addRequestFilter(new ThrottleRequestFilter(100)); client = new AsyncHttpClient(b.build()); //Set scraper scraper = _scraper; //Init book-keeping data structures queue = new ConcurrentLinkedQueue<String>(); seen = new HashSet<String>(urls.size()); processing = new HashSet<Future<Response>>(urls.size()); //Fill the to-be-scheduled queue with initial inputs for(String url: urls) { enqueue_request(url); } } // private void enqueue_request(String url) { if(!seen.contains(url)) { seen.add(url); queue.add(url); } } private void request(String url) { System.out.println("Requesting: "+url); Future<Response> f = this.client.prepareGet(url).execute(new ThrottledHandler(url)); processing.add(f); } //Done when both queue and processing are empty private boolean notDone() { return !(queue.isEmpty() & processing.isEmpty()); } private void schedule_next_request() { String url = queue.peek(); if(url != null) { request(url); } queue.poll(); } // Removes resolved futures from the in-process list private void cleanup_finished_tasks() { HashSet<Future<Response>> done = new HashSet<Future<Response>>(); for(Future<Response> f: processing) { if(f.isDone()) done.add(f); } for(Future<Response> f: done) { processing.remove(f); } } public void crawl() { while(notDone()) { schedule_next_request(); cleanup_finished_tasks(); try{ Thread.sleep(100); } catch (InterruptedException e) { System.out.println(e); } } client.close(); } /** Print the current crawler status. */ public void print_status() { System.out.println("In queue: " + queue.size()); System.out.println("In process: " + processing.size()); System.out.println("Have seen: " + seen.size()); } }
src/main/java/ninja/joshdavis/Crawler.java
package ninja.joshdavis; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.Future; import java.util.concurrent.ExecutionException; import java.util.concurrent.ConcurrentLinkedQueue; import com.ning.http.client.*; import com.ning.http.client.extra.*; public class Crawler { private AsyncHttpClient client; private ConcurrentLinkedQueue<String> queue; private HashSet<String> seen; private HashSet<Future<Response>> processing; private Scraper scraper; //Class containing the callbacks for processing responses and errors class ThrottledHandler extends AsyncCompletionHandler<Response>{ String url; public ThrottledHandler(String _url) { url = _url; } @Override public Response onCompleted(Response response) throws Exception { String html = response.getResponseBody(); Iterable<String> links = scraper.process(html, url); for(String link_url: links) { if(link_url != null) { enqueue_request(link_url); } } System.out.println("Request completed: " + url); return response; } @Override public void onThrowable(Throwable t){ System.out.println("Failed: " + url); System.out.println(t); } } public Crawler(Collection<String> urls, Scraper _scraper) { //Create the client AsyncHttpClientConfig.Builder b = new AsyncHttpClientConfig.Builder().addRequestFilter(new ThrottleRequestFilter(100)); client = new AsyncHttpClient(b.build()); //Set scraper scraper = _scraper; //Init book-keeping data structures queue = new ConcurrentLinkedQueue<String>(); seen = new HashSet<String>(urls.size()); processing = new HashSet<Future<Response>>(urls.size()); //Fill the to-be-scheduled queue with initial inputs for(String url: urls) { enqueue_request(url); } } // private void enqueue_request(String url) { if(!seen.contains(url)) { seen.add(url); queue.add(url); } } private void request(String url) { System.out.println("Requesting: "+url); Future<Response> f = this.client.prepareGet(url).execute(new ThrottledHandler(url)); processing.add(f); } //Done when both queue and processing are empty private boolean notDone() { return !(queue.isEmpty() & processing.isEmpty()); } private void schedule_next_request() { String url = queue.peek(); if(url != null) { request(url); } queue.poll(); } // Removes resolved futures from the in-process list private void cleanup_finished_tasks() { HashSet<Future<Response>> done = new HashSet<Future<Response>>(); for(Future<Response> f: processing) { if(f.isDone()) done.add(f); } for(Future<Response> f: done) { processing.remove(f); } } public void crawl() { while(notDone()) { schedule_next_request(); cleanup_finished_tasks(); try{ Thread.sleep(100); } catch (InterruptedException e) { System.out.println(e); } } client.close(); } public void print_status() { System.out.println("In queue: " + queue.size()); System.out.println("In process: " + processing.size()); System.out.println("Have seen: " + seen.size()); } }
comments
src/main/java/ninja/joshdavis/Crawler.java
comments
<ide><path>rc/main/java/ninja/joshdavis/Crawler.java <ide> } <ide> client.close(); <ide> } <del> <add> /** Print the current crawler status. */ <ide> public void print_status() <ide> { <ide> System.out.println("In queue: " + queue.size());
Java
mit
error: pathspec 'cracking-the-coding-interview/18-hard/P1801.java' did not match any file(s) known to git
157fc542b1774f650765cee0104c891936a364cb
1
zw267/coding-practice,zw267/coding-practice,zw267/coding-practice,zw267/coding-practice
/* P1801 * * Write a functin that adds two numbers. You should not use + or any * arithmetic operators. */ public class P1801 { public static int add(int a, int b) { int c; while (b != 0) { c = a ^ b; b = (a & b) << 1; a = c; } return a; } public static void main(String args[]) { int a = Integer.parseInt(args[0]); int b = Integer.parseInt(args[1]); int r = add(a, b); System.out.println(r); } }
cracking-the-coding-interview/18-hard/P1801.java
P1801
cracking-the-coding-interview/18-hard/P1801.java
P1801
<ide><path>racking-the-coding-interview/18-hard/P1801.java <add>/* P1801 <add> * <add> * Write a functin that adds two numbers. You should not use + or any <add> * arithmetic operators. <add> */ <add> <add>public class P1801 { <add> public static int add(int a, int b) { <add> int c; <add> while (b != 0) { <add> c = a ^ b; <add> b = (a & b) << 1; <add> a = c; <add> } <add> <add> return a; <add> } <add> <add> public static void main(String args[]) { <add> int a = Integer.parseInt(args[0]); <add> int b = Integer.parseInt(args[1]); <add> <add> int r = add(a, b); <add> System.out.println(r); <add> } <add>}
Java
mit
03db5209f63881361141b7720ba26f865e11aa93
0
Springrbua/typescript.java,angelozerr/typescript.java,angelozerr/typescript.java,angelozerr/typescript.java,Springrbua/typescript.java,angelozerr/typescript.java,Springrbua/typescript.java,angelozerr/typescript.java,Springrbua/typescript.java,Springrbua/typescript.java
/** * Copyright (c) 2013-2016 Angelo ZERR. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Angelo Zerr <[email protected]> - initial API and implementation */ package ts.utils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import ts.internal.io.tar.TarEntry; import ts.internal.io.tar.TarException; import ts.internal.io.tar.TarInputStream; /** * Zip, tar.gz Utilities. * */ public class ZipUtils { public static final String ZIP_EXTENSION = ".zip"; public static final String TAR_GZ_EXTENSION = ".tar.gz"; private static final String BIN_FOLDER = "/bin"; private ZipUtils() { } /** * Returns true if the given file is a zip file and false otherwise. * * @param file * @return true if the given file is a zip file and false otherwise. */ public static boolean isZipFile(File file) { return file.isFile() && file.getName().toLowerCase().endsWith(ZIP_EXTENSION); } /** * Returns true if the given file is a zip file and false otherwise. * * @param file * @return true if the given file is a zip file and false otherwise. */ public static boolean isTarFile(File file) { return file.isFile() && file.getName().toLowerCase().endsWith(TAR_GZ_EXTENSION); } /** * Extract zip file to destination folder. * * @param file * zip file to extract * @param destination * destination folder */ public static void extractZip(File file, File destination) throws IOException { ZipInputStream in = null; OutputStream out = null; try { // Open the ZIP file in = new ZipInputStream(new FileInputStream(file)); // Get the first entry ZipEntry entry = null; while ((entry = in.getNextEntry()) != null) { String outFilename = entry.getName(); // Open the output file File extracted = new File(destination, outFilename); if (entry.isDirectory()) { extracted.mkdirs(); } else { // Be sure that parent file exists File baseDir = extracted.getParentFile(); if (!baseDir.exists()) { baseDir.mkdirs(); } out = new FileOutputStream(extracted); // Transfer bytes from the ZIP file to the output file byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } // Close the stream out.close(); if (extracted.getParent().contains(BIN_FOLDER)) { extracted.setExecutable(true); } } } } finally { // Close the stream if (in != null) { in.close(); } if (out != null) { out.close(); } } } /** * Extract tar.gz file to destination folder. * * @param file * zip file to extract * @param destination * destination folder */ public static void extractTar(File file, File destination) throws IOException { TarInputStream in = null; OutputStream out = null; try { // Open the ZIP file in = new TarInputStream(new GZIPInputStream(new FileInputStream(file))); // Get the first entry TarEntry entry = null; while ((entry = in.getNextEntry()) != null) { String outFilename = entry.getName(); switch (entry.getFileType()) { case TarEntry.DIRECTORY: File extractedDir = new File(destination, outFilename); if (extractedDir.isDirectory()) { extractedDir.mkdirs(); } break; case TarEntry.FILE: File extractedFile = new File(destination, outFilename); // Be sure that parent file exists File baseDir = extractedFile.getParentFile(); if (!baseDir.exists()) { baseDir.mkdirs(); } out = new FileOutputStream(extractedFile); // Transfer bytes from the ZIP file to the output file byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } // Close the stream out.close(); if (extractedFile.getParent().contains(BIN_FOLDER)) { extractedFile.setExecutable(true); } break; case TarEntry.LINK: File linkFile = new File(destination, outFilename); // Be sure that parent file exists File linkBaseDir = linkFile.getParentFile(); if (!linkBaseDir.exists()) { linkBaseDir.mkdirs(); } Path target = Paths.get(entry.getLinkName()); Files.createLink(linkFile.toPath(), target); break; case TarEntry.SYM_LINK: File symLinkFile = new File(destination, outFilename); // Be sure that parent file exists File symLinkBaseDir = symLinkFile.getParentFile(); if (!symLinkBaseDir.exists()) { symLinkBaseDir.mkdirs(); } Path symTarget = Paths.get(entry.getLinkName()); Files.createSymbolicLink(symLinkFile.toPath(), symTarget); break; } } } catch (TarException e) { throw new IOException(e); } finally { // Close the stream if (in != null) { in.close(); } if (out != null) { out.close(); } } } }
core/ts.core/src/ts/utils/ZipUtils.java
/** * Copyright (c) 2013-2016 Angelo ZERR. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Angelo Zerr <[email protected]> - initial API and implementation */ package ts.utils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import ts.internal.io.tar.TarEntry; import ts.internal.io.tar.TarException; import ts.internal.io.tar.TarInputStream; /** * Zip, tar.gz Utilities. * */ public class ZipUtils { public static final String ZIP_EXTENSION = ".zip"; public static final String TAR_GZ_EXTENSION = ".tar.gz"; private static final String BIN_FOLDER = "/bin"; private ZipUtils() { } /** * Returns true if the given file is a zip file and false otherwise. * * @param file * @return true if the given file is a zip file and false otherwise. */ public static boolean isZipFile(File file) { return file.isFile() && file.getName().toLowerCase().endsWith(ZIP_EXTENSION); } /** * Returns true if the given file is a zip file and false otherwise. * * @param file * @return true if the given file is a zip file and false otherwise. */ public static boolean isTarFile(File file) { return file.isFile() && file.getName().toLowerCase().endsWith(TAR_GZ_EXTENSION); } /** * Extract zip file to destination folder. * * @param file * zip file to extract * @param destination * destination folder */ public static void extractZip(File file, File destination) throws IOException { ZipInputStream in = null; OutputStream out = null; try { // Open the ZIP file in = new ZipInputStream(new FileInputStream(file)); // Get the first entry ZipEntry entry = null; while ((entry = in.getNextEntry()) != null) { String outFilename = entry.getName(); // Open the output file File extracted = new File(destination, outFilename); if (entry.isDirectory()) { extracted.mkdirs(); } else { // Be sure that parent file exists File baseDir = extracted.getParentFile(); if (!baseDir.exists()) { baseDir.mkdirs(); } out = new FileOutputStream(extracted); // Transfer bytes from the ZIP file to the output file byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } // Close the stream out.close(); if (extracted.getParent().contains(BIN_FOLDER)) { extracted.setExecutable(true); } } } } finally { // Close the stream if (in != null) { in.close(); } if (out != null) { out.close(); } } } /** * Extract tar.gz file to destination folder. * * @param file * zip file to extract * @param destination * destination folder */ public static void extractTar(File file, File destination) throws IOException { TarInputStream in = null; OutputStream out = null; try { // Open the ZIP file in = new TarInputStream(new GZIPInputStream(new FileInputStream(file))); // Get the first entry TarEntry entry = null; while ((entry = in.getNextEntry()) != null) { String outFilename = entry.getName(); switch (entry.getFileType()) { case TarEntry.DIRECTORY: File extractedDir = new File(destination, outFilename); if (extractedDir.isDirectory()) { extractedDir.mkdirs(); } break; case TarEntry.FILE: File extractedFile = new File(destination, outFilename); // Be sure that parent file exists File baseDir = extractedFile.getParentFile(); if (!baseDir.exists()) { baseDir.mkdirs(); } out = new FileOutputStream(extractedFile); // Transfer bytes from the ZIP file to the output file byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } // Close the stream out.close(); if (extractedFile.getParent().contains(BIN_FOLDER)) { extractedFile.setExecutable(true); } break; case TarEntry.LINK: File linkFile = new File(destination, outFilename); // Be sure that parent file exists File linkBaseDir = linkFile.getParentFile(); if (!linkBaseDir.exists()) { linkBaseDir.mkdirs(); } Path target = new File(linkFile.getParentFile(), entry.getLinkName()).toPath(); Files.createLink(linkFile.toPath(), target); break; case TarEntry.SYM_LINK: File symLinkFile = new File(destination, outFilename); // Be sure that parent file exists File symLinkBaseDir = symLinkFile.getParentFile(); if (!symLinkBaseDir.exists()) { symLinkBaseDir.mkdirs(); } Path symTarget = Paths.get(entry.getLinkName()); Files.createSymbolicLink(symLinkFile.toPath(), symTarget); break; } } } catch (TarException e) { throw new IOException(e); } finally { // Close the stream if (in != null) { in.close(); } if (out != null) { out.close(); } } } }
Link target path from link name As commented by @angelozerr (but not tested as It seems that if you include a "hard link" in a .tar.gz archive, no link information is kept)
core/ts.core/src/ts/utils/ZipUtils.java
Link target path from link name
<ide><path>ore/ts.core/src/ts/utils/ZipUtils.java <ide> if (!linkBaseDir.exists()) { <ide> linkBaseDir.mkdirs(); <ide> } <del> Path target = new File(linkFile.getParentFile(), entry.getLinkName()).toPath(); <add> Path target = Paths.get(entry.getLinkName()); <ide> Files.createLink(linkFile.toPath(), target); <ide> break; <ide> case TarEntry.SYM_LINK:
Java
apache-2.0
99812aa5e36d973884e4c2d5c6355f207cf96f28
0
akirakw/asakusafw,cocoatomo/asakusafw,cocoatomo/asakusafw,ashigeru/asakusafw,akirakw/asakusafw,asakusafw/asakusafw,asakusafw/asakusafw,ashigeru/asakusafw
/** * Copyright 2011-2013 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.compiler.directio; import java.io.IOException; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.apache.hadoop.mapreduce.InputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.compiler.directio.OutputPattern.CompiledOrder; import com.asakusafw.compiler.directio.OutputPattern.CompiledResourcePattern; import com.asakusafw.compiler.directio.emitter.NamingClassEmitter; import com.asakusafw.compiler.directio.emitter.OrderingClassEmitter; import com.asakusafw.compiler.directio.emitter.Slot; import com.asakusafw.compiler.directio.emitter.StageEmitter; import com.asakusafw.compiler.flow.DataClass; import com.asakusafw.compiler.flow.ExternalIoDescriptionProcessor; import com.asakusafw.compiler.flow.Location; import com.asakusafw.compiler.flow.jobflow.CompiledStage; import com.asakusafw.compiler.flow.jobflow.ExternalIoStage; import com.asakusafw.compiler.flow.mapreduce.copy.CopierClientEmitter; import com.asakusafw.compiler.flow.mapreduce.copy.CopyDescription; import com.asakusafw.runtime.directio.DataFormat; import com.asakusafw.runtime.directio.DirectDataSourceConstants; import com.asakusafw.runtime.directio.FilePattern; import com.asakusafw.runtime.stage.input.BridgeInputFormat; import com.asakusafw.runtime.stage.input.TemporaryInputFormat; import com.asakusafw.runtime.stage.output.TemporaryOutputFormat; import com.asakusafw.utils.collections.Lists; import com.asakusafw.utils.collections.Maps; import com.asakusafw.utils.java.model.syntax.ModelFactory; import com.asakusafw.utils.java.model.syntax.Name; import com.asakusafw.utils.java.model.util.Models; import com.asakusafw.vocabulary.directio.DirectFileInputDescription; import com.asakusafw.vocabulary.directio.DirectFileOutputDescription; import com.asakusafw.vocabulary.external.ExporterDescription; import com.asakusafw.vocabulary.external.ImporterDescription; import com.asakusafw.vocabulary.flow.graph.InputDescription; import com.asakusafw.vocabulary.flow.graph.OutputDescription; /** * Processes {@link DirectFileInputDescription} and {@link DirectFileOutputDescription}. * @since 0.2.5 * @version 0.4.0 */ public class DirectFileIoProcessor extends ExternalIoDescriptionProcessor { static final Logger LOG = LoggerFactory.getLogger(DirectFileIoProcessor.class); private static final String METHOD_RESOURCE_PATTERN = "getResourcePattern"; private static final String METHOD_ORDER = "getOrder"; private static final String MODULE_NAME = "directio"; private static final Class<? extends InputFormat<?, ?>> INPUT_FORMAT = BridgeInputFormat.class; @Override public String getId() { return MODULE_NAME; } @Override public Class<? extends ImporterDescription> getImporterDescriptionType() { return DirectFileInputDescription.class; } @Override public Class<? extends ExporterDescription> getExporterDescriptionType() { return DirectFileOutputDescription.class; } @Override public boolean validate(List<InputDescription> inputs, List<OutputDescription> outputs) { LOG.debug("Checking Direct I/O vocabularies: batch={}, flow={}", getEnvironment().getBatchId(), getEnvironment().getFlowId()); boolean valid = true; for (InputDescription input : inputs) { LOG.debug("Checking Direct I/O input: {}", input.getName()); valid &= validateInput(input); } for (OutputDescription output : outputs) { LOG.debug("Checking Direct I/O output: {}", output.getName()); valid &= validateOutput(output); } LOG.debug("Checking Direct I/O paths"); valid &= validatePaths(inputs, outputs); return valid; } private boolean validateInput(InputDescription input) { boolean valid = true; DirectFileInputDescription desc = extract(input); String pattern = desc.getResourcePattern(); try { FilePattern.compile(pattern); } catch (IllegalArgumentException e) { getEnvironment().error( "入力リソース名のパターンが不正です ({1}): {0}", e.getMessage(), desc.getClass().getName()); valid = false; } valid &= validateFormat(desc.getClass(), desc.getModelType(), desc.getFormat()); return valid; } private boolean validateOutput(OutputDescription output) { boolean valid = true; DirectFileOutputDescription desc = extract(output); DataClass dataType = getEnvironment().getDataClasses().load(desc.getModelType()); String pattern = desc.getResourcePattern(); List<CompiledResourcePattern> compiledPattern; try { compiledPattern = OutputPattern.compileResourcePattern(pattern, dataType); } catch (IllegalArgumentException e) { getEnvironment().error( "出力リソース名のパターンが不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName()); valid = false; compiledPattern = Collections.emptyList(); } for (String patternString : desc.getDeletePatterns()) { try { FilePattern.compile(patternString); } catch (IllegalArgumentException e) { getEnvironment().error( "削除するリソース名のパターン(\"{2}\")が不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName(), patternString); valid = false; } } List<String> orders = desc.getOrder(); try { OutputPattern.compileOrder(orders, dataType); } catch (IllegalArgumentException e) { getEnvironment().error( "出力順序の指定が不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName()); valid = false; } Set<OutputPattern.SourceKind> kinds = pickSourceKinds(compiledPattern); if (kinds.contains(OutputPattern.SourceKind.ENVIRONMENT)) { if (kinds.contains(OutputPattern.SourceKind.PROPERTY)) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、プロパティ ('{'name'}') は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_RESOURCE_PATTERN); valid = false; } if (kinds.contains(OutputPattern.SourceKind.RANDOM)) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、ランダム ([m..n]) は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_RESOURCE_PATTERN); valid = false; } if (orders.isEmpty() == false) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、出力順序は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_ORDER); valid = false; } } valid &= validateFormat(desc.getClass(), desc.getModelType(), desc.getFormat()); return valid; } private boolean validatePaths(List<InputDescription> inputs, List<OutputDescription> outputs) { assert inputs != null; assert outputs != null; boolean valid = true; TreeMap<String, InputDescription> inputPaths = new TreeMap<String, InputDescription>(); for (InputDescription input : inputs) { DirectFileInputDescription desc = extract(input); String path = normalizePath(desc.getBasePath()); inputPaths.put(path, input); } TreeMap<String, OutputDescription> outputPaths = new TreeMap<String, OutputDescription>(); for (OutputDescription output : outputs) { DirectFileOutputDescription desc = extract(output); String path = normalizePath(desc.getBasePath()); for (Map.Entry<String, InputDescription> entry : inputPaths.tailMap(path, true).entrySet()) { if (entry.getKey().startsWith(path) == false) { break; } DirectFileInputDescription other = extract(entry.getValue()); getEnvironment().error( "入出力のベースパスが衝突しています: {0}[{1}] -> {2}[{3}]", desc.getClass().getName(), desc.getBasePath(), other.getClass().getName(), other.getBasePath()); valid = false; } if (outputPaths.containsKey(path)) { DirectFileOutputDescription other = extract(outputPaths.get(path)); getEnvironment().error( "2つの出力のベースパスが重複しています: {0}[{1}] <-> {2}[{3}]", desc.getClass().getName(), desc.getBasePath(), other.getClass().getName(), other.getBasePath()); valid = false; } else { outputPaths.put(path, output); } } for (Map.Entry<String, OutputDescription> base : outputPaths.entrySet()) { String path = base.getKey(); DirectFileOutputDescription desc = extract(base.getValue()); for (Map.Entry<String, OutputDescription> entry : outputPaths.tailMap(path, false).entrySet()) { if (entry.getKey().startsWith(path) == false) { break; } DirectFileOutputDescription other = extract(entry.getValue()); getEnvironment().error( "2つの出力のベースパスが衝突しています: {0}[{1}] -> {2}[{3}]", desc.getClass().getName(), desc.getBasePath(), other.getClass().getName(), other.getBasePath()); valid = false; } } return valid; } private String normalizePath(String path) { assert path != null; boolean sawSeparator = false; StringBuilder buf = new StringBuilder(); for (int i = 0, n = path.length(); i < n; i++) { char c = path.charAt(i); if (c == '/') { sawSeparator = true; } else { if (sawSeparator && buf.length() > 0) { buf.append('/'); } sawSeparator = false; buf.append(c); } } if (sawSeparator == false) { buf.append('/'); } return buf.toString(); } private boolean validateFormat(Class<?> desc, Class<?> model, Class<? extends DataFormat<?>> format) { assert desc != null; if (format == null) { getEnvironment().error( "データフォーマットが指定されていません: {0}", desc.getName()); return false; } DataFormat<?> formatObject; try { formatObject = format.getConstructor().newInstance(); } catch (Exception e) { getEnvironment().error( "データフォーマット\"{1}\"の生成に失敗しました: {0}", desc.getName(), format.getName()); return false; } if (formatObject.getSupportedType().isAssignableFrom(model) == false) { getEnvironment().error( "データフォーマット\"{2}\"はデータモデル\"{1}\"をサポートしていません: {0}", desc.getName(), model.getName(), format.getName()); return false; } return true; } @Override public SourceInfo getInputInfo(InputDescription description) { DirectFileInputDescription desc = extract(description); if (isCacheTarget(desc)) { String outputName = getProcessedInputName(description); Location location = getEnvironment().getPrologueLocation(MODULE_NAME).append(outputName).asPrefix(); return new SourceInfo(Collections.singleton(location), TemporaryInputFormat.class); } else { return getOriginalInputInfo(description); } } private SourceInfo getOriginalInputInfo(InputDescription description) { DirectFileInputDescription desc = extract(description); Set<Location> locations = Collections.singleton( Location.fromPath("__DIRECTIO__", '/') .append(description.getName()) .append(Location.fromPath(desc.getBasePath(), '/'))); return new SourceInfo(locations, INPUT_FORMAT, getAttributes(desc)); } private Map<String, String> getAttributes(DirectFileInputDescription desc) { Map<String, String> attributes = Maps.create(); attributes.put(DirectDataSourceConstants.KEY_DATA_CLASS, desc.getModelType().getName()); attributes.put(DirectDataSourceConstants.KEY_FORMAT_CLASS, desc.getFormat().getName()); attributes.put(DirectDataSourceConstants.KEY_BASE_PATH, desc.getBasePath()); attributes.put(DirectDataSourceConstants.KEY_RESOURCE_PATH, desc.getResourcePattern()); return attributes; } private String getProcessedInputName(InputDescription description) { assert description != null; StringBuilder buf = new StringBuilder(); for (char c : description.getName().toCharArray()) { // 0 as escape character if ('1' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z') { buf.append(c); } else if (c <= 0xff) { buf.append('0'); buf.append(String.format("%02x", (int) c)); } else { buf.append("0u"); buf.append(String.format("%04x", (int) c)); } } return buf.toString(); } @Override public List<ExternalIoStage> emitPrologue(IoContext context) throws IOException { IoContextBuilder builder = new IoContextBuilder(); List<CopyDescription> targets = Lists.create(); for (Input input : context.getInputs()) { InputDescription description = input.getDescription(); DirectFileInputDescription desc = extract(description); if (isCacheTarget(desc)) { LOG.debug("Input will be copied in prologue: {}", description.getName()); targets.add(new CopyDescription( getProcessedInputName(description), getEnvironment().getDataClasses().load(description.getDataType()), getOriginalInputInfo(description), TemporaryOutputFormat.class)); builder.addInput(input); } } if (targets.isEmpty()) { return Collections.emptyList(); } CopierClientEmitter emitter = new CopierClientEmitter(getEnvironment()); CompiledStage stage = emitter.emitPrologue( MODULE_NAME, targets, getEnvironment().getPrologueLocation(MODULE_NAME)); return Collections.singletonList(new ExternalIoStage(getId(), stage, builder.build())); } @Override public List<ExternalIoStage> emitEpilogue(IoContext context) throws IOException { ModelFactory f = getEnvironment().getModelFactory(); NamingClassEmitter namingEmitter = new NamingClassEmitter(getEnvironment(), MODULE_NAME); OrderingClassEmitter orderingEmitter = new OrderingClassEmitter(getEnvironment(), MODULE_NAME); List<Slot> slots = Lists.create(); for (Output output : context.getOutputs()) { DirectFileOutputDescription desc = extract(output.getDescription()); DataClass dataType = getEnvironment().getDataClasses().load(desc.getModelType()); List<CompiledResourcePattern> namingInfo = OutputPattern.compileResourcePattern(desc.getResourcePattern(), dataType); Set<OutputPattern.SourceKind> kinds = pickSourceKinds(namingInfo); if (kinds.contains(OutputPattern.SourceKind.ENVIRONMENT)) { assert kinds.contains(OutputPattern.SourceKind.PROPERTY) == false; assert kinds.contains(OutputPattern.SourceKind.RANDOM) == false; assert desc.getOrder().isEmpty(); String outputName = output.getDescription().getName(); Slot slot = new Slot( outputName, output.getSources(), Models.toName(f, desc.getModelType().getName()), desc.getBasePath(), desc.getResourcePattern(), Models.toName(f, desc.getFormat().getName()), null, null, desc.getDeletePatterns()); slots.add(slot); } else { List<CompiledOrder> orderingInfo = OutputPattern.compileOrder(desc.getOrder(), dataType); String outputName = output.getDescription().getName(); Name naming = namingEmitter.emit(outputName, slots.size() + 1, dataType, namingInfo); Name ordering = orderingEmitter.emit(outputName, slots.size() + 1, dataType, orderingInfo); Slot slot = new Slot( outputName, output.getSources(), Models.toName(f, desc.getModelType().getName()), desc.getBasePath(), desc.getResourcePattern(), Models.toName(f, desc.getFormat().getName()), naming, ordering, desc.getDeletePatterns()); slots.add(slot); } } if (slots.isEmpty()) { return Collections.emptyList(); } StageEmitter stageEmitter = new StageEmitter(getEnvironment(), MODULE_NAME); CompiledStage result = stageEmitter.emit(slots, getEnvironment().getEpilogueLocation(MODULE_NAME)); return Collections.singletonList(new ExternalIoStage(getId(), result, context.getOutputContext())); } private boolean isCacheTarget(ImporterDescription desc) { assert desc != null; switch (desc.getDataSize()) { case TINY: return getEnvironment().getOptions().isHashJoinForTiny(); case SMALL: return getEnvironment().getOptions().isHashJoinForSmall(); default: return false; } } private Set<OutputPattern.SourceKind> pickSourceKinds(List<CompiledResourcePattern> fragments) { assert fragments != null; Set<OutputPattern.SourceKind> results = EnumSet.noneOf(OutputPattern.SourceKind.class); for (CompiledResourcePattern fragment : fragments) { results.add(fragment.getKind()); } return results; } private DirectFileInputDescription extract(InputDescription description) { assert description != null; ImporterDescription importer = description.getImporterDescription(); assert importer != null; assert importer instanceof DirectFileInputDescription; return (DirectFileInputDescription) importer; } private DirectFileOutputDescription extract(OutputDescription description) { assert description != null; ExporterDescription exporter = description.getExporterDescription(); assert exporter != null; assert exporter instanceof DirectFileOutputDescription; return (DirectFileOutputDescription) exporter; } }
directio-project/asakusa-directio-plugin/src/main/java/com/asakusafw/compiler/directio/DirectFileIoProcessor.java
/** * Copyright 2011-2013 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.compiler.directio; import java.io.IOException; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.apache.hadoop.mapreduce.InputFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.compiler.directio.OutputPattern.CompiledOrder; import com.asakusafw.compiler.directio.OutputPattern.CompiledResourcePattern; import com.asakusafw.compiler.directio.emitter.NamingClassEmitter; import com.asakusafw.compiler.directio.emitter.OrderingClassEmitter; import com.asakusafw.compiler.directio.emitter.Slot; import com.asakusafw.compiler.directio.emitter.StageEmitter; import com.asakusafw.compiler.flow.DataClass; import com.asakusafw.compiler.flow.ExternalIoDescriptionProcessor; import com.asakusafw.compiler.flow.Location; import com.asakusafw.compiler.flow.jobflow.CompiledStage; import com.asakusafw.compiler.flow.jobflow.ExternalIoStage; import com.asakusafw.compiler.flow.mapreduce.copy.CopierClientEmitter; import com.asakusafw.compiler.flow.mapreduce.copy.CopyDescription; import com.asakusafw.runtime.directio.DataFormat; import com.asakusafw.runtime.directio.DirectDataSourceConstants; import com.asakusafw.runtime.directio.FilePattern; import com.asakusafw.runtime.stage.input.BridgeInputFormat; import com.asakusafw.runtime.stage.input.TemporaryInputFormat; import com.asakusafw.runtime.stage.output.TemporaryOutputFormat; import com.asakusafw.utils.collections.Lists; import com.asakusafw.utils.collections.Maps; import com.asakusafw.utils.java.model.syntax.ModelFactory; import com.asakusafw.utils.java.model.syntax.Name; import com.asakusafw.utils.java.model.util.Models; import com.asakusafw.vocabulary.directio.DirectFileInputDescription; import com.asakusafw.vocabulary.directio.DirectFileOutputDescription; import com.asakusafw.vocabulary.external.ExporterDescription; import com.asakusafw.vocabulary.external.ImporterDescription; import com.asakusafw.vocabulary.flow.graph.InputDescription; import com.asakusafw.vocabulary.flow.graph.OutputDescription; /** * Processes {@link DirectFileInputDescription} and {@link DirectFileOutputDescription}. * @since 0.2.5 * @version 0.4.0 */ public class DirectFileIoProcessor extends ExternalIoDescriptionProcessor { static final Logger LOG = LoggerFactory.getLogger(DirectFileIoProcessor.class); private static final String METHOD_RESOURCE_PATTERN = "getResourcePattern"; private static final String METHOD_ORDER = "getOrder"; private static final String MODULE_NAME = "directio"; private static final Class<? extends InputFormat<?, ?>> INPUT_FORMAT = BridgeInputFormat.class; @Override public String getId() { return MODULE_NAME; } @Override public Class<? extends ImporterDescription> getImporterDescriptionType() { return DirectFileInputDescription.class; } @Override public Class<? extends ExporterDescription> getExporterDescriptionType() { return DirectFileOutputDescription.class; } @Override public boolean validate(List<InputDescription> inputs, List<OutputDescription> outputs) { LOG.debug("Checking Direct I/O vocabularies: batch={}, flow={}", getEnvironment().getBatchId(), getEnvironment().getFlowId()); boolean valid = true; for (InputDescription input : inputs) { LOG.debug("Checking Direct I/O input: {}", input.getName()); valid &= validateInput(input); } for (OutputDescription output : outputs) { LOG.debug("Checking Direct I/O output: {}", output.getName()); valid &= validateOutput(output); } LOG.debug("Checking Direct I/O paths"); valid &= validatePaths(inputs, outputs); return valid; } private boolean validateInput(InputDescription input) { boolean valid = true; DirectFileInputDescription desc = extract(input); String pattern = desc.getResourcePattern(); try { FilePattern.compile(pattern); } catch (IllegalArgumentException e) { getEnvironment().error( "入力リソース名のパターンが不正です ({1}): {0}", e.getMessage(), desc.getClass().getName()); valid = false; } valid &= validateFormat(desc.getClass(), desc.getModelType(), desc.getFormat()); return valid; } private boolean validateOutput(OutputDescription output) { boolean valid = true; DirectFileOutputDescription desc = extract(output); DataClass dataType = getEnvironment().getDataClasses().load(desc.getModelType()); String pattern = desc.getResourcePattern(); List<CompiledResourcePattern> compiledPattern; try { compiledPattern = OutputPattern.compileResourcePattern(pattern, dataType); } catch (IllegalArgumentException e) { getEnvironment().error( "出力リソース名のパターンが不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName()); valid = false; compiledPattern = Collections.emptyList(); } for (String patternString : desc.getDeletePatterns()) { try { FilePattern.compile(patternString); } catch (IllegalArgumentException e) { getEnvironment().error( "削除するリソース名のパターン(\"{2}\")が不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName(), patternString); valid = false; } } List<String> orders = desc.getOrder(); try { OutputPattern.compileOrder(orders, dataType); } catch (IllegalArgumentException e) { getEnvironment().error( "出力順序の指定が不正です ({1}) [{0}]", e.getMessage(), desc.getClass().getName()); valid = false; } Set<OutputPattern.SourceKind> kinds = pickSourceKinds(compiledPattern); if (kinds.contains(OutputPattern.SourceKind.ENVIRONMENT)) { if (kinds.contains(OutputPattern.SourceKind.PROPERTY)) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、プロパティ ('{'name'}') は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_RESOURCE_PATTERN); valid = false; } if (kinds.contains(OutputPattern.SourceKind.RANDOM)) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、ランダム ([m..n]) は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_RESOURCE_PATTERN); valid = false; } if (orders.isEmpty() == false) { getEnvironment().error( "出力リソース名にワイルドカードを含む場合、出力順序は指定できません" + " ({1}.{2}()): {0}", pattern, desc.getClass().getName(), METHOD_ORDER); valid = false; } } valid &= validateFormat(desc.getClass(), desc.getModelType(), desc.getFormat()); return valid; } private boolean validatePaths(List<InputDescription> inputs, List<OutputDescription> outputs) { assert inputs != null; assert outputs != null; boolean valid = true; TreeMap<String, InputDescription> inputPaths = new TreeMap<String, InputDescription>(); for (InputDescription input : inputs) { DirectFileInputDescription desc = extract(input); String path = normalizePath(desc.getBasePath()); inputPaths.put(path, input); } TreeMap<String, OutputDescription> outputPaths = new TreeMap<String, OutputDescription>(); for (OutputDescription output : outputs) { DirectFileOutputDescription desc = extract(output); String path = normalizePath(desc.getBasePath()); for (Map.Entry<String, InputDescription> entry : inputPaths.tailMap(path, true).entrySet()) { if (entry.getKey().startsWith(path) == false) { break; } DirectFileInputDescription other = extract(entry.getValue()); getEnvironment().error( "出力が別の入力を上書きします ({0}->{1})", desc.getClass().getName(), other.getClass().getName()); valid = false; } if (outputPaths.containsKey(path)) { DirectFileOutputDescription other = extract(outputPaths.get(path)); getEnvironment().error( "出力が別の出力を上書きします ({0}->{1})", desc.getClass().getName(), other.getClass().getName()); valid = false; } else { outputPaths.put(path, output); } } for (Map.Entry<String, OutputDescription> base : outputPaths.entrySet()) { String path = base.getKey(); DirectFileOutputDescription desc = extract(base.getValue()); for (Map.Entry<String, OutputDescription> entry : outputPaths.tailMap(path, false).entrySet()) { if (entry.getKey().startsWith(path) == false) { break; } DirectFileOutputDescription other = extract(entry.getValue()); getEnvironment().error( "出力が別の出力を上書きします ({0}->{1})", desc.getClass().getName(), other.getClass().getName()); valid = false; } } return valid; } private String normalizePath(String path) { assert path != null; boolean sawSeparator = false; StringBuilder buf = new StringBuilder(); for (int i = 0, n = path.length(); i < n; i++) { char c = path.charAt(i); if (c == '/') { sawSeparator = true; } else { if (sawSeparator && buf.length() > 0) { buf.append('/'); } sawSeparator = false; buf.append(c); } } if (sawSeparator == false) { buf.append('/'); } return buf.toString(); } private boolean validateFormat(Class<?> desc, Class<?> model, Class<? extends DataFormat<?>> format) { assert desc != null; if (format == null) { getEnvironment().error( "データフォーマットが指定されていません: {0}", desc.getName()); return false; } DataFormat<?> formatObject; try { formatObject = format.getConstructor().newInstance(); } catch (Exception e) { getEnvironment().error( "データフォーマット\"{1}\"の生成に失敗しました: {0}", desc.getName(), format.getName()); return false; } if (formatObject.getSupportedType().isAssignableFrom(model) == false) { getEnvironment().error( "データフォーマット\"{2}\"はデータモデル\"{1}\"をサポートしていません: {0}", desc.getName(), model.getName(), format.getName()); return false; } return true; } @Override public SourceInfo getInputInfo(InputDescription description) { DirectFileInputDescription desc = extract(description); if (isCacheTarget(desc)) { String outputName = getProcessedInputName(description); Location location = getEnvironment().getPrologueLocation(MODULE_NAME).append(outputName).asPrefix(); return new SourceInfo(Collections.singleton(location), TemporaryInputFormat.class); } else { return getOriginalInputInfo(description); } } private SourceInfo getOriginalInputInfo(InputDescription description) { DirectFileInputDescription desc = extract(description); Set<Location> locations = Collections.singleton( Location.fromPath("__DIRECTIO__", '/') .append(description.getName()) .append(Location.fromPath(desc.getBasePath(), '/'))); return new SourceInfo(locations, INPUT_FORMAT, getAttributes(desc)); } private Map<String, String> getAttributes(DirectFileInputDescription desc) { Map<String, String> attributes = Maps.create(); attributes.put(DirectDataSourceConstants.KEY_DATA_CLASS, desc.getModelType().getName()); attributes.put(DirectDataSourceConstants.KEY_FORMAT_CLASS, desc.getFormat().getName()); attributes.put(DirectDataSourceConstants.KEY_BASE_PATH, desc.getBasePath()); attributes.put(DirectDataSourceConstants.KEY_RESOURCE_PATH, desc.getResourcePattern()); return attributes; } private String getProcessedInputName(InputDescription description) { assert description != null; StringBuilder buf = new StringBuilder(); for (char c : description.getName().toCharArray()) { // 0 as escape character if ('1' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z') { buf.append(c); } else if (c <= 0xff) { buf.append('0'); buf.append(String.format("%02x", (int) c)); } else { buf.append("0u"); buf.append(String.format("%04x", (int) c)); } } return buf.toString(); } @Override public List<ExternalIoStage> emitPrologue(IoContext context) throws IOException { IoContextBuilder builder = new IoContextBuilder(); List<CopyDescription> targets = Lists.create(); for (Input input : context.getInputs()) { InputDescription description = input.getDescription(); DirectFileInputDescription desc = extract(description); if (isCacheTarget(desc)) { LOG.debug("Input will be copied in prologue: {}", description.getName()); targets.add(new CopyDescription( getProcessedInputName(description), getEnvironment().getDataClasses().load(description.getDataType()), getOriginalInputInfo(description), TemporaryOutputFormat.class)); builder.addInput(input); } } if (targets.isEmpty()) { return Collections.emptyList(); } CopierClientEmitter emitter = new CopierClientEmitter(getEnvironment()); CompiledStage stage = emitter.emitPrologue( MODULE_NAME, targets, getEnvironment().getPrologueLocation(MODULE_NAME)); return Collections.singletonList(new ExternalIoStage(getId(), stage, builder.build())); } @Override public List<ExternalIoStage> emitEpilogue(IoContext context) throws IOException { ModelFactory f = getEnvironment().getModelFactory(); NamingClassEmitter namingEmitter = new NamingClassEmitter(getEnvironment(), MODULE_NAME); OrderingClassEmitter orderingEmitter = new OrderingClassEmitter(getEnvironment(), MODULE_NAME); List<Slot> slots = Lists.create(); for (Output output : context.getOutputs()) { DirectFileOutputDescription desc = extract(output.getDescription()); DataClass dataType = getEnvironment().getDataClasses().load(desc.getModelType()); List<CompiledResourcePattern> namingInfo = OutputPattern.compileResourcePattern(desc.getResourcePattern(), dataType); Set<OutputPattern.SourceKind> kinds = pickSourceKinds(namingInfo); if (kinds.contains(OutputPattern.SourceKind.ENVIRONMENT)) { assert kinds.contains(OutputPattern.SourceKind.PROPERTY) == false; assert kinds.contains(OutputPattern.SourceKind.RANDOM) == false; assert desc.getOrder().isEmpty(); String outputName = output.getDescription().getName(); Slot slot = new Slot( outputName, output.getSources(), Models.toName(f, desc.getModelType().getName()), desc.getBasePath(), desc.getResourcePattern(), Models.toName(f, desc.getFormat().getName()), null, null, desc.getDeletePatterns()); slots.add(slot); } else { List<CompiledOrder> orderingInfo = OutputPattern.compileOrder(desc.getOrder(), dataType); String outputName = output.getDescription().getName(); Name naming = namingEmitter.emit(outputName, slots.size() + 1, dataType, namingInfo); Name ordering = orderingEmitter.emit(outputName, slots.size() + 1, dataType, orderingInfo); Slot slot = new Slot( outputName, output.getSources(), Models.toName(f, desc.getModelType().getName()), desc.getBasePath(), desc.getResourcePattern(), Models.toName(f, desc.getFormat().getName()), naming, ordering, desc.getDeletePatterns()); slots.add(slot); } } if (slots.isEmpty()) { return Collections.emptyList(); } StageEmitter stageEmitter = new StageEmitter(getEnvironment(), MODULE_NAME); CompiledStage result = stageEmitter.emit(slots, getEnvironment().getEpilogueLocation(MODULE_NAME)); return Collections.singletonList(new ExternalIoStage(getId(), result, context.getOutputContext())); } private boolean isCacheTarget(ImporterDescription desc) { assert desc != null; switch (desc.getDataSize()) { case TINY: return getEnvironment().getOptions().isHashJoinForTiny(); case SMALL: return getEnvironment().getOptions().isHashJoinForSmall(); default: return false; } } private Set<OutputPattern.SourceKind> pickSourceKinds(List<CompiledResourcePattern> fragments) { assert fragments != null; Set<OutputPattern.SourceKind> results = EnumSet.noneOf(OutputPattern.SourceKind.class); for (CompiledResourcePattern fragment : fragments) { results.add(fragment.getKind()); } return results; } private DirectFileInputDescription extract(InputDescription description) { assert description != null; ImporterDescription importer = description.getImporterDescription(); assert importer != null; assert importer instanceof DirectFileInputDescription; return (DirectFileInputDescription) importer; } private DirectFileOutputDescription extract(OutputDescription description) { assert description != null; ExporterDescription exporter = description.getExporterDescription(); assert exporter != null; assert exporter instanceof DirectFileOutputDescription; return (DirectFileOutputDescription) exporter; } }
Fixed #330. Unreadable error message when Direct I/O may override another model output.
directio-project/asakusa-directio-plugin/src/main/java/com/asakusafw/compiler/directio/DirectFileIoProcessor.java
Fixed #330.
<ide><path>irectio-project/asakusa-directio-plugin/src/main/java/com/asakusafw/compiler/directio/DirectFileIoProcessor.java <ide> } <ide> DirectFileInputDescription other = extract(entry.getValue()); <ide> getEnvironment().error( <del> "出力が別の入力を上書きします ({0}->{1})", <del> desc.getClass().getName(), <del> other.getClass().getName()); <add> "入出力のベースパスが衝突しています: {0}[{1}] -> {2}[{3}]", <add> desc.getClass().getName(), <add> desc.getBasePath(), <add> other.getClass().getName(), <add> other.getBasePath()); <ide> valid = false; <ide> } <ide> if (outputPaths.containsKey(path)) { <ide> DirectFileOutputDescription other = extract(outputPaths.get(path)); <ide> getEnvironment().error( <del> "出力が別の出力を上書きします ({0}->{1})", <del> desc.getClass().getName(), <del> other.getClass().getName()); <add> "2つの出力のベースパスが重複しています: {0}[{1}] <-> {2}[{3}]", <add> desc.getClass().getName(), <add> desc.getBasePath(), <add> other.getClass().getName(), <add> other.getBasePath()); <ide> valid = false; <ide> } else { <ide> outputPaths.put(path, output); <ide> } <ide> DirectFileOutputDescription other = extract(entry.getValue()); <ide> getEnvironment().error( <del> "出力が別の出力を上書きします ({0}->{1})", <del> desc.getClass().getName(), <del> other.getClass().getName()); <add> "2つの出力のベースパスが衝突しています: {0}[{1}] -> {2}[{3}]", <add> desc.getClass().getName(), <add> desc.getBasePath(), <add> other.getClass().getName(), <add> other.getBasePath()); <ide> valid = false; <ide> } <ide> }
Java
apache-2.0
fcfcc13cc843495d28e379ccfb2a5515b3182618
0
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project. * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.genome.sequenceAnalysis; import java.util.Collection; import java.util.List; import org.hibernate.Criteria; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import ubic.gemma.model.genome.biosequence.BioSequence; import ubic.gemma.util.BusinessKey; /** * @see ubic.gemma.model.genome.sequenceAnalysis.BlatResult */ @Repository public class BlatResultDaoImpl extends ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase { @Autowired public BlatResultDaoImpl( SessionFactory sessionFactory ) { super.setSessionFactory( sessionFactory ); } /* * (non-Javadoc) * @see * ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#find(ubic.gemma.model.genome.biosequence.BioSequence) */ @SuppressWarnings("unchecked") @Override public Collection<BlatResult> findByBioSequence( BioSequence bioSequence ) { BusinessKey.checkValidKey( bioSequence ); Criteria queryObject = super.getSession().createCriteria( BlatResult.class ); BusinessKey.attachCriteria( queryObject, bioSequence, "querySequence" ); List results = queryObject.list(); if ( results != null ) { for ( Object object : results ) { BlatResult br = ( BlatResult ) object; if ( br.getTargetChromosome() != null ) { br.getTargetChromosome().getName(); // to initialize the proxies. } br.getQuerySequence(); } } return results; } /* * (non-Javadoc) * @see * ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#findOrCreate(ubic.gemma.model.genome.sequenceAnalysis * .BlatResult) */ @Override public ubic.gemma.model.genome.sequenceAnalysis.BlatResult findOrCreate( ubic.gemma.model.genome.sequenceAnalysis.BlatResult blatResult ) { if ( blatResult.getQuerySequence() == null ) throw new IllegalArgumentException( "BlatResult must have a querySequence associated with it." ); BlatResult result = this.find( blatResult ); if ( result != null ) return result; logger.debug( "Creating new BlatResult: " + blatResult.toString() ); result = create( blatResult ); return result; } /* * (non-Javadoc) * @see ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#handleLoad(java.util.Collection) */ @Override protected Collection handleLoad( Collection ids ) throws Exception { final String queryString = "select distinct blatResult from BlatResultImpl blatResult where blatResult.id in (:ids)"; return this.getHibernateTemplate().findByNamedParam( queryString, "ids", ids ); } }
gemma-mda/src/main/java/ubic/gemma/model/genome/sequenceAnalysis/BlatResultDaoImpl.java
/* * The Gemma project. * * Copyright (c) 2006 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.genome.sequenceAnalysis; import java.util.Collection; import java.util.List; import org.hibernate.Criteria; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import ubic.gemma.model.genome.biosequence.BioSequence; import ubic.gemma.util.BusinessKey; /** * @see ubic.gemma.model.genome.sequenceAnalysis.BlatResult */ @Repository public class BlatResultDaoImpl extends ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase { @Autowired public BlatResultDaoImpl( SessionFactory sessionFactory ) { super.setSessionFactory( sessionFactory ); } /* * (non-Javadoc) * @see * ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#find(ubic.gemma.model.genome.biosequence.BioSequence) */ @SuppressWarnings("unchecked") @Override public Collection<BlatResult> findByBioSequence( BioSequence bioSequence ) { BusinessKey.checkValidKey( bioSequence ); Criteria queryObject = super.getSession().createCriteria( BlatResult.class ); BusinessKey.attachCriteria( queryObject, bioSequence, "querySequence" ); List results = queryObject.list(); if ( results != null ) { for ( Object object : results ) { BlatResult br = ( BlatResult ) object; if ( br.getTargetChromosome() != null ) { br.getTargetChromosome().getName(); // to initialize the proxies. } br.getQuerySequence(); } } return results; } /* * (non-Javadoc) * @see * ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#findOrCreate(ubic.gemma.model.genome.sequenceAnalysis * .BlatResult) */ @Override public ubic.gemma.model.genome.sequenceAnalysis.BlatResult findOrCreate( ubic.gemma.model.genome.sequenceAnalysis.BlatResult blatResult ) { if ( blatResult.getQuerySequence() == null ) throw new IllegalArgumentException( "BlatResult must have a querrySequence associated with it." ); BlatResult result = this.find( blatResult ); if ( result != null ) return result; logger.debug( "Creating new BlatResult: " + blatResult.toString() ); result = create( blatResult ); return result; } /* * (non-Javadoc) * @see ubic.gemma.model.genome.sequenceAnalysis.BlatResultDaoBase#handleLoad(java.util.Collection) */ @Override protected Collection handleLoad( Collection ids ) throws Exception { final String queryString = "select distinct blatResult from BlatResultImpl blatResult where blatResult.id in (:ids)"; return this.getHibernateTemplate().findByNamedParam( queryString, "ids", ids ); } }
speling
gemma-mda/src/main/java/ubic/gemma/model/genome/sequenceAnalysis/BlatResultDaoImpl.java
speling
<ide><path>emma-mda/src/main/java/ubic/gemma/model/genome/sequenceAnalysis/BlatResultDaoImpl.java <ide> public ubic.gemma.model.genome.sequenceAnalysis.BlatResult findOrCreate( <ide> ubic.gemma.model.genome.sequenceAnalysis.BlatResult blatResult ) { <ide> if ( blatResult.getQuerySequence() == null ) <del> throw new IllegalArgumentException( "BlatResult must have a querrySequence associated with it." ); <add> throw new IllegalArgumentException( "BlatResult must have a querySequence associated with it." ); <ide> <ide> BlatResult result = this.find( blatResult ); <ide> if ( result != null ) return result;
Java
epl-1.0
d637f1e6ac41b871b666a92b1c53cef68419eab9
0
theanuradha/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,theanuradha/debrief,debrief/debrief,debrief/debrief,debrief/debrief,theanuradha/debrief,debrief/debrief,theanuradha/debrief,debrief/debrief
package org.mwc.debrief.track_shift.ambiguity; import java.awt.Color; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.apache.commons.math3.fitting.WeightedObservedPoint; import org.jfree.data.time.FixedMillisecond; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesDataItem; import org.mwc.debrief.track_shift.ambiguity.LegOfCuts.WhichBearing; import org.mwc.debrief.track_shift.ambiguity.LegOfCuts.WhichPeriod; import org.mwc.debrief.track_shift.controls.ZoneChart.ColorProvider; import org.mwc.debrief.track_shift.controls.ZoneChart.Zone; import org.mwc.debrief.track_shift.views.StackedDotHelper; import Debrief.ReaderWriter.Replay.ImportReplay; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.SensorContactWrapper; import Debrief.Wrappers.SensorWrapper; import Debrief.Wrappers.TrackWrapper; import MWC.GUI.BaseLayer; import MWC.GUI.Editable; import MWC.GUI.Layers; import MWC.GUI.JFreeChart.ColouredDataItem; import MWC.GenericData.HiResDate; import MWC.GenericData.TimePeriod; public class AmbiguityResolver { public static class LegsAndZigs { private final List<LegOfCuts> legs; private final LegOfCuts zigCuts; public LegsAndZigs(final List<LegOfCuts> legs, final LegOfCuts zigCuts) { this.legs = legs; this.zigCuts = zigCuts; } public List<LegOfCuts> getLegs() { return legs; } public LegOfCuts getZigs() { return zigCuts; } } private static class Perm implements Comparable<Perm> { private final double score; private final boolean firstOne; private final boolean secondOne; public Perm(final double score, final boolean firstOne, final boolean secondOne) { this.score = score; this.firstOne = firstOne; this.secondOne = secondOne; } @Override public int compareTo(final Perm o) { final Double dScore = score; return dScore.compareTo(o.score); } } public static class ResolvedLeg { final boolean keepFirst; final LegOfCuts leg; public ResolvedLeg(final LegOfCuts leg, final boolean keepFirst) { this.leg = leg; this.keepFirst = keepFirst; } } // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class TestResolveAmbig extends junit.framework.TestCase { private TrackWrapper getData(final String name) throws FileNotFoundException { // get our sample data-file final ImportReplay importer = new ImportReplay(); final Layers theLayers = new Layers(); final String fName = "../org.mwc.cmap.combined.feature/root_installs/sample_data/S2R/" + name; final File inFile = new File(fName); assertTrue("input file exists", inFile.exists()); final FileInputStream is = new FileInputStream(fName); importer.importThis(fName, is, theLayers); // sort out the sensors importer.storePendingSensors(); assertEquals("has some layers", 3, theLayers.size()); // get the sensor track final TrackWrapper track = (TrackWrapper) theLayers.findLayer("SENSOR"); return track; } private TimeSeries getOSCourse(final TrackWrapper track) { final TimeSeries ts = new TimeSeries("OS Course"); final Enumeration<Editable> pts = track.getPositionIterator(); while (pts.hasMoreElements()) { final FixWrapper fw = (FixWrapper) pts.nextElement(); final double course = fw.getCourseDegs(); final FixedMillisecond thisMilli = new FixedMillisecond(fw.getDateTimeGroup().getDate().getTime()); final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, course, fw.getColor(), true, null, true, true); ts.add(crseBearing); } return ts; } public void testDitchUsingAmbiguity() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks2.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); // make the sensor visible final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); // ok, get resolving final AmbiguityResolver solver = new AmbiguityResolver(); // try to get zones using ambiguity delta final LegsAndZigs res = solver.sliceIntoLegsUsingAmbiguity(track, 0.2, 0.2, null, null); final List<LegOfCuts> legs = res.legs; final LegOfCuts zigs = res.zigCuts; assertNotNull("found zones", legs); assertEquals("found correct number of zones", 12, legs.size()); assertNotNull("found zigs", zigs); assertEquals("found correct number of zig cuts", 23, zigs.size()); // ok, ditch those cuts final int fullSensorLen = sensor.size(); Map<SensorWrapper, LegOfCuts> deleted = solver.deleteTheseCuts(zigs); assertEquals("fewer cuts", 98, sensor.size()); // ok, and undo them solver.restoreCuts(deleted); assertEquals("fewer cuts", fullSensorLen, sensor.size()); // and do it again, so we've got fewer cuts deleted = solver.deleteTheseCuts(zigs); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull(resolvedLegs); assertEquals("right num legs", 12, legs.size()); assertEquals("correct leg", 251d, resolvedLegs.get(0).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 253d, resolvedLegs.get(1).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 251d, resolvedLegs.get(2).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 254d, resolvedLegs.get(3).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 258d, resolvedLegs.get(4).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 269d, resolvedLegs.get(5).leg.get(0) .getBearing(), 1d); // ok, and cancel the leg resolving solver.undoResolveBearings(resolvedLegs); // and re-check they're ambiguous assertEquals("is unresloved", true, resolvedLegs.get(0).leg.get(0) .getHasAmbiguousBearing()); } public void testGetCurve() throws FileNotFoundException { final LegOfCuts leg4 = new LegOfCuts(); final SensorWrapper sensor = new SensorWrapper("name"); leg4.add(wrapMe(sensor, 80, 92d, 260d)); leg4.add(wrapMe(sensor, 90, 82d, 280d)); leg4.add(wrapMe(sensor, 100, 72d, 300d)); leg4.add(wrapMe(sensor, 110, 62d, 320d)); leg4.add(wrapMe(sensor, 120, 52d, 340d)); leg4.add(wrapMe(sensor, 130, 42d, 360d)); leg4.add(wrapMe(sensor, 140, 32d, 380d)); final double[] curve = leg4.getCurve(WhichPeriod.ALL, WhichBearing.AMBIGUOUS); assertNotNull("produced curve", curve); assertEquals("curve correct length", 3, curve.length); assertEquals("correct offset", 100d, curve[0], 0.001); leg4.set(5, wrapMe(sensor, 130, 42d, 0d)); leg4.set(6, wrapMe(sensor, 140, 32d, 20d)); final double[] curve2Ambig = leg4.getCurve(WhichPeriod.ALL, WhichBearing.AMBIGUOUS); final double[] curve2 = leg4.getCurve(WhichPeriod.ALL, WhichBearing.CORE); assertNotNull("produced curve", curve2Ambig); assertEquals("curve correct length", 3, curve2Ambig.length); assertEquals("correct offset", 100d, curve2Ambig[0], 0.001); double beforeValue = valueAt(60, curve2Ambig); assertEquals("correct next value", beforeValue, 220d, 0.001); double afterValue = valueAt(150, curve2Ambig); assertEquals("correct next value", afterValue, 400d, 0.001); beforeValue = valueAt(60, curve2); assertEquals("correct next value", beforeValue, 112d, 0.001); afterValue = valueAt(150, curve2); assertEquals("correct next value", afterValue, 22d, 0.001); } public void testGetCurveSector() throws FileNotFoundException { final LegOfCuts leg4 = new LegOfCuts(); final SensorWrapper sensor = new SensorWrapper("name"); leg4.add(wrapMe(sensor, 80, 92d, 260d)); leg4.add(wrapMe(sensor, 90, 82d, 280d)); leg4.add(wrapMe(sensor, 100, 72d, 300d)); leg4.add(wrapMe(sensor, 110, 62d, 320d)); leg4.add(wrapMe(sensor, 120, 52d, 340d)); // start off with it too short List<SensorContactWrapper> leg = leg4.extractPortion(WhichPeriod.EARLY); assertNotNull("leg retrieved", leg); assertEquals("correct length", 5, leg.size()); // ok, add more data, so that we need to trim the data leg4.add(wrapMe(sensor, 130, 42d, 0d)); leg4.add(wrapMe(sensor, 140, 32d, 20d)); leg4.add(wrapMe(sensor, 150, 22d, 40d)); leg4.add(wrapMe(sensor, 160, 12d, 60d)); leg4.add(wrapMe(sensor, 170, 2d, 80d)); leg4.add(wrapMe(sensor, 180, 12d, 60d)); leg4.add(wrapMe(sensor, 190, 22d, 40d)); leg4.add(wrapMe(sensor, 200, 32d, 20d)); leg4.add(wrapMe(sensor, 210, 42d, 0d)); leg4.add(wrapMe(sensor, 220, 52d, 340d)); leg4.add(wrapMe(sensor, 230, 62d, 320d)); leg4.add(wrapMe(sensor, 240, 72d, 300d)); // check we retrieve the expected data leg = leg4.extractPortion(WhichPeriod.ALL); assertNotNull("leg retrieved", leg); assertEquals("correct length", 17, leg.size()); assertEquals("correct start", 80, leg.get(0).getDTG().getDate().getTime()); assertEquals("correct end", 240, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); leg = leg4.extractPortion(WhichPeriod.EARLY); assertNotNull("leg retrieved", leg); assertEquals("correct length", 8, LegOfCuts.LEG_LENGTH); assertEquals("correct start", 80, leg.get(0).getDTG().getDate().getTime()); assertEquals("correct end", 150, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); leg = leg4.extractPortion(WhichPeriod.LATE); assertNotNull("leg retrieved", leg); assertEquals("correct length", 8, LegOfCuts.LEG_LENGTH); assertEquals("correct start", 170, leg.get(0).getDTG().getDate() .getTime()); assertEquals("correct end", 240, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); // try the calculated values double[] curve = leg4.getCurve(WhichPeriod.ALL, WhichBearing.CORE); assertEquals("correct value (since we can't fit good curve)", 119.20, valueAt(80, curve), 0.01); assertEquals("correct value (since we can't fit good curve)", 78.79, valueAt(240, curve), 0.01); curve = leg4.getCurve(WhichPeriod.EARLY, WhichBearing.CORE); assertEquals("correct value", 92, valueAt(80, curve), 0.01); curve = leg4.getCurve(WhichPeriod.EARLY, WhichBearing.AMBIGUOUS); assertEquals("correct value", 260, valueAt(80, curve), 0.01); curve = leg4.getCurve(WhichPeriod.LATE, WhichBearing.CORE); assertEquals("correct value", 72, valueAt(240, curve), 0.01); curve = leg4.getCurve(WhichPeriod.LATE, WhichBearing.AMBIGUOUS); assertEquals("correct value", -60, valueAt(240, curve), 0.01); } public void testGettingLegs() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); final ColorProvider provider = new ColorProvider() { @Override public Color getZoneColor() { return Color.blue; } }; final TimeSeries osCourse = getOSCourse(track); // try to slice the O/S zones final ArrayList<Zone> zonesList = StackedDotHelper.sliceOwnship(osCourse, provider); final Zone[] zones = zonesList.toArray(new Zone[] {}); // ok, get resolving final AmbiguityResolver res = new AmbiguityResolver(); // drop cuts in turn res.findCutsNotInLeg(track, zones, null); // now get the legs final List<LegOfCuts> legs = res.sliceIntoLegs(track, zones); assertEquals("right num", zones.length, legs.size()); assertEquals("right num (after working it out by hand)", 13, legs.size()); // now resolve ambiguity res.resolve(legs); } public void testOnlyDitchVisible() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks2.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); // make the sensor visible final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); // set some cuts to hidden int ctr = 0; final Enumeration<Editable> numer = sensor.elements(); while (numer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) numer.nextElement(); if (ctr > 20 && ctr < 50) { scw.setVisible(false); } ctr++; } // ok, get resolving final AmbiguityResolver solver = new AmbiguityResolver(); // try to get zones using ambiguity delta final LegsAndZigs res = solver.sliceIntoLegsUsingAmbiguity(track, 0.2, 0.2, null, null); final List<LegOfCuts> legs = res.legs; final LegOfCuts zigs = res.zigCuts; assertNotNull("found zones", legs); assertEquals("found correct number of zones", 8, legs.size()); assertNotNull("found zigs", zigs); assertEquals("found correct number of zig cuts", 17, zigs.size()); // ok, ditch those cuts solver.deleteTheseCuts(zigs); assertEquals("fewer cuts", 104, sensor.size()); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull(resolvedLegs); assertEquals("right num legs", 8, legs.size()); } public void testProcessCuts() throws FileNotFoundException { List<WeightedObservedPoint> obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80d, 260d)); obs.add(new WeightedObservedPoint(1, 90, 280d)); obs.add(new WeightedObservedPoint(1, 100, 300d)); obs.add(new WeightedObservedPoint(1, 110, 320d)); obs.add(new WeightedObservedPoint(1, 120, 340d)); obs.add(new WeightedObservedPoint(1, 130, 0d)); obs.add(new WeightedObservedPoint(1, 140, 20d)); List<WeightedObservedPoint> res = AmbiguityResolver.putObsInCorrectDomain(obs); assertEquals("correct last score", 380d, res.get(res.size() - 1).getY(), 0.001); obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80, 160d)); obs.add(new WeightedObservedPoint(1, 90, 140d)); obs.add(new WeightedObservedPoint(1, 100, 120d)); obs.add(new WeightedObservedPoint(1, 110, 80d)); obs.add(new WeightedObservedPoint(1, 120, 30d)); obs.add(new WeightedObservedPoint(1, 130, 340d)); obs.add(new WeightedObservedPoint(1, 140, 320d)); res = AmbiguityResolver.putObsInCorrectDomain(obs); assertEquals("correct last score", -40d, res.get(res.size() - 1).getY(), 0.001); obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80, -160d)); obs.add(new WeightedObservedPoint(1, 90, -140d)); obs.add(new WeightedObservedPoint(1, 100, -120d)); obs.add(new WeightedObservedPoint(1, 110, -80d)); obs.add(new WeightedObservedPoint(1, 120, -30d)); obs.add(new WeightedObservedPoint(1, 130, 20d)); obs.add(new WeightedObservedPoint(1, 140, 40d)); res = AmbiguityResolver.putObsInCorrectRange(obs); assertEquals("correct last score", 200d, res.get(0).getY(), 0.001); assertEquals("correct last score", 40d, res.get(res.size() - 1).getY(), 0.001); } public void testResolve() throws FileNotFoundException { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final SensorWrapper sensor = new SensorWrapper("name"); final LegOfCuts leg1 = new LegOfCuts(); leg1.add(wrapMe(sensor, 100, 180d, 270d)); leg1.add(wrapMe(sensor, 110, 170d, 280d)); leg1.add(wrapMe(sensor, 120, 160d, 290d)); leg1.add(wrapMe(sensor, 130, 150d, 300d)); leg1.add(wrapMe(sensor, 140, 140d, 310d)); legs.add(leg1); final LegOfCuts leg2 = new LegOfCuts(); leg2.add(wrapMe(sensor, 160, 182d, 220d)); leg2.add(wrapMe(sensor, 170, 183d, 221d)); leg2.add(wrapMe(sensor, 180, 184d, 222d)); leg2.add(wrapMe(sensor, 190, 185d, 223d)); leg2.add(wrapMe(sensor, 200, 186d, 224d)); legs.add(leg2); final LegOfCuts leg3 = new LegOfCuts(); leg3.add(wrapMe(sensor, 220, 92d, 200d)); leg3.add(wrapMe(sensor, 230, 83d, 210d)); leg3.add(wrapMe(sensor, 240, 74d, 220d)); leg3.add(wrapMe(sensor, 250, 65d, 230d)); leg3.add(wrapMe(sensor, 260, 56d, 240d)); legs.add(leg3); final LegOfCuts leg4 = new LegOfCuts(); leg4.add(wrapMe(sensor, 280, 92d, 260d)); leg4.add(wrapMe(sensor, 290, 73d, 280d)); leg4.add(wrapMe(sensor, 300, 54d, 300d)); leg4.add(wrapMe(sensor, 310, 35d, 320d)); leg4.add(wrapMe(sensor, 320, 16d, 340d)); leg4.add(wrapMe(sensor, 330, 9d, 0d)); leg4.add(wrapMe(sensor, 340, 355d, 20d)); legs.add(leg4); // put the good cut in the wrong domain final LegOfCuts leg5 = new LegOfCuts(); leg5.add(wrapMe(sensor, 360, 41d, 260d)); leg5.add(wrapMe(sensor, 370, 43d, 240d)); leg5.add(wrapMe(sensor, 380, 45d, 220d)); leg5.add(wrapMe(sensor, 390, 47d, 200d)); leg5.add(wrapMe(sensor, 400, 49d, 180d)); leg5.add(wrapMe(sensor, 410, 51d, 160d)); leg5.add(wrapMe(sensor, 420, 53d, 140d)); legs.add(leg5); // make the first cuts very wonky final LegOfCuts leg6 = new LegOfCuts(); leg6.add(wrapMe(sensor, 440, 141d, 350d)); leg6.add(wrapMe(sensor, 450, 143d, 20d)); leg6.add(wrapMe(sensor, 460, 145d, 70d)); leg6.add(wrapMe(sensor, 470, 147d, 80d)); leg6.add(wrapMe(sensor, 480, 149d, 90d)); leg6.add(wrapMe(sensor, 490, 151d, 100d)); leg6.add(wrapMe(sensor, 500, 153d, 110d)); legs.add(leg6); final AmbiguityResolver solver = new AmbiguityResolver(); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull("have list of resolved", resolvedLegs); assertEquals("correct num legs", 6, resolvedLegs.size()); // ok, check the legs assertFalse("not ambig", leg1.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg2.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg3.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg4.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg5.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg6.get(0).getHasAmbiguousBearing()); assertEquals("correct bearing", 180d, leg1.get(0).getBearing()); assertEquals("correct bearing", 182d, leg2.get(0).getBearing()); assertEquals("correct bearing", 200d, leg3.get(0).getBearing()); assertEquals("correct bearing", 260d, leg4.get(0).getBearing()); assertEquals("correct bearing", 41d, leg5.get(0).getBearing()); assertEquals("correct bearing", 350d, leg6.get(0).getBearing()); } public void testSplittingAllTime() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); final ColorProvider provider = new ColorProvider() { @Override public Color getZoneColor() { return Color.blue; } }; final TimeSeries osCourse = getOSCourse(track); // try to slice the O/S zones final ArrayList<Zone> zonesList = StackedDotHelper.sliceOwnship(osCourse, provider); final Zone[] zones = zonesList.toArray(new Zone[] {}); // ok, get resolving final AmbiguityResolver res = new AmbiguityResolver(); // drop cuts in turn final int numCuts = sensor.size(); assertEquals("right cuts at start", 721, numCuts); final List<SensorContactWrapper> toDel = res.findCutsNotInLeg(track, zones, null); assertEquals("have cuts to delete", 133, toDel.size()); @SuppressWarnings("unused") final List<LegOfCuts> legs = res.sliceIntoLegs(track, zones); // ok, check the data } /** * check that we allow a couple of apparently steady cuts during a turn * * @throws FileNotFoundException */ public void testSteadyInTurn() throws FileNotFoundException { final SensorWrapper sensor = new SensorWrapper("name"); sensor.add(wrapMe(sensor, 100000, 180d, 270d)); sensor.add(wrapMe(sensor, 110000, 170d, 280d)); sensor.add(wrapMe(sensor, 120000, 160d, 290d)); sensor.add(wrapMe(sensor, 130000, 150d, 300d)); sensor.add(wrapMe(sensor, 140000, 140d, 310d)); sensor.add(wrapMe(sensor, 150000, 130d, 310d)); sensor.add(wrapMe(sensor, 160000, 122d, 220d)); sensor.add(wrapMe(sensor, 170000, 113d, 221d)); sensor.add(wrapMe(sensor, 180000, 104d, 222d)); sensor.add(wrapMe(sensor, 190000, 095d, 223d)); sensor.add(wrapMe(sensor, 200000, 086d, 224d)); sensor.add(wrapMe(sensor, 210000, 076d, 224d)); sensor.add(wrapMe(sensor, 220000, 62d, 200d)); sensor.add(wrapMe(sensor, 230000, 53d, 210d)); sensor.add(wrapMe(sensor, 240000, 44d, 220d)); sensor.add(wrapMe(sensor, 250000, 35d, 230d)); sensor.add(wrapMe(sensor, 260000, 26d, 240d)); sensor.add(wrapMe(sensor, 270000, 36d, 240d)); sensor.add(wrapMe(sensor, 280000, 42d, 260d)); sensor.add(wrapMe(sensor, 290000, 53d, 280d)); sensor.add(wrapMe(sensor, 300000, 64d, 300d)); sensor.add(wrapMe(sensor, 310000, 75d, 320d)); sensor.add(wrapMe(sensor, 320000, 66d, 340d)); sensor.add(wrapMe(sensor, 330000, 56d, 0d)); sensor.add(wrapMe(sensor, 340000, 45d, 20d)); sensor.add(wrapMe(sensor, 350000, 35d, 30d)); sensor.add(wrapMe(sensor, 360000, 35d, 30d)); sensor.add(wrapMe(sensor, 370000, 15d, 70d)); sensor.add(wrapMe(sensor, 380000, 355d, 20d)); sensor.add(wrapMe(sensor, 390000, 355d, 20d)); sensor.add(wrapMe(sensor, 400000, 345d, 20d)); // sensor.add(wrapMe(sensor, 410000, 345d, 20d)); sensor.setVisible(true); final TrackWrapper host = new TrackWrapper(); host.setName("Host"); host.add(sensor); final AmbiguityResolver solver = new AmbiguityResolver(); final Logger logger = Logger.getLogger("Test output"); logger.setUseParentHandlers(false); logger.addHandler(new ConsoleHandler() { @Override public void publish(final LogRecord record) { System.out.println(record.getMessage()); } }); final LegsAndZigs sliced = solver.sliceIntoLegsUsingAmbiguity(host, 2.2, 0.2, logger, null); // for(LegOfCuts leg: sliced.legs) // { // System.out.println(leg.get(0).getDTG().getDate().getTime() + " - " + // leg.get(leg.size()-1).getDTG().getDate().getTime()); // } // // System.out.println("==="); // for(SensorContactWrapper cut: sliced.zigCuts) // { // System.out.println(cut.getDTG().getDate().getTime()); // } assertNotNull("produced slices", sliced); assertEquals("correct legs", 3, sliced.legs.size()); assertEquals("correct turning cuts", 8, sliced.zigCuts.size()); } public void testWeighting() { final SensorWrapper sensor = new SensorWrapper("name"); final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts leg1 = new LegOfCuts(); leg1.add(wrapMe(sensor, 280, 92d, 260d)); leg1.add(wrapMe(sensor, 290, 73d, 280d)); leg1.add(wrapMe(sensor, 300, 54d, 300d)); leg1.add(wrapMe(sensor, 310, 35d, 320d)); leg1.add(wrapMe(sensor, 320, 16d, 340d)); leg1.add(wrapMe(sensor, 330, 9d, 0d)); leg1.add(wrapMe(sensor, 340, 355d, 20d)); legs.add(leg1); // put the good cut in the wrong domain final LegOfCuts leg2 = new LegOfCuts(); leg2.add(wrapMe(sensor, 360, 42d, 260d)); leg2.add(wrapMe(sensor, 370, 43d, 240d)); leg2.add(wrapMe(sensor, 380, 45d, 220d)); leg2.add(wrapMe(sensor, 390, 47d, 200d)); leg2.add(wrapMe(sensor, 400, 49d, 180d)); leg2.add(wrapMe(sensor, 410, 51d, 160d)); leg2.add(wrapMe(sensor, 420, 53d, 140d)); legs.add(leg2); // make the first cuts very wonky final LegOfCuts leg3 = new LegOfCuts(); leg3.add(wrapMe(sensor, 440, 141d, 350d)); leg3.add(wrapMe(sensor, 450, 143d, 20d)); leg3.add(wrapMe(sensor, 460, 145d, 70d)); leg3.add(wrapMe(sensor, 470, 147d, 80d)); leg3.add(wrapMe(sensor, 480, 149d, 90d)); leg3.add(wrapMe(sensor, 490, 151d, 100d)); leg3.add(wrapMe(sensor, 500, 153d, 110d)); legs.add(leg3); final AmbiguityResolver resolver = new AmbiguityResolver(); final List<ResolvedLeg> resolvedLegs = resolver.resolve(legs); assertNotNull("have legs", resolvedLegs); assertEquals("correct bearing", 260d, leg1.get(0).getBearing()); assertEquals("correct bearing", 42d, leg2.get(0).getBearing()); assertEquals("correct bearing", 350d, leg3.get(0).getBearing()); } private SensorContactWrapper wrapMe(final SensorWrapper sensor, final long dtg, final double bearing1, final double bearing2) { return new SensorContactWrapper("track", new HiResDate(dtg), null, bearing1, bearing2, null, null, Color.RED, "label", 0, sensor .getName()); } } private static void doLog(final Logger logger, final String msg) { if (logger != null) { logger.log(Level.INFO, msg); } } public static List<WeightedObservedPoint> putObsInCorrectDomain( final List<WeightedObservedPoint> obs) { final List<WeightedObservedPoint> res = new ArrayList<WeightedObservedPoint>(); double lastVal = Double.MIN_VALUE; for (final WeightedObservedPoint ob : obs) { double thisVal = ob.getY(); if (lastVal != Double.MIN_VALUE) { double valToUse; // ok, have we jumped up? if (thisVal - lastVal > 200) { // ok, reduce it valToUse = thisVal - 360d; } else if (thisVal - lastVal < -200) { // ok, increase it valToUse = thisVal + 360d; } else { valToUse = thisVal; } res.add(new WeightedObservedPoint(ob.getWeight(), ob.getX(), valToUse)); thisVal = valToUse; } else { res.add(ob); } lastVal = thisVal; } return res; } public static List<WeightedObservedPoint> putObsInCorrectRange( final List<WeightedObservedPoint> obs) { final List<WeightedObservedPoint> res = new ArrayList<WeightedObservedPoint>(); for (final WeightedObservedPoint ob : obs) { double thisVal = ob.getY(); while (thisVal < 0) { thisVal += 360d; } while (thisVal >= 360) { thisVal -= 360d; } res.add(new WeightedObservedPoint(ob.getWeight(), ob.getX(), thisVal)); } return res; } private static double trim(final double val) { double res = val; while (res < -360d) { res += 360d; } while (res >= 360d) { res -= 360d; } return res; } private static double valueAt(final long time, final double[] slope) { return slope[0] + slope[1] * time + slope[2] * Math.pow(time, 2); } private double calcDelta(final double one, final double two) { double res = Math.abs(one - two); while (res > 360d) { res -= 360d; } while (res <= -360d) { res += 360d; } return res; } public Map<SensorWrapper, LegOfCuts> deleteTheseCuts( final List<SensorContactWrapper> cutsToDelete) { final Map<SensorWrapper, LegOfCuts> deletedCuts = new HashMap<SensorWrapper, LegOfCuts>(); for (final SensorContactWrapper t : cutsToDelete) { // store the details of this sensor, so we can undo it LegOfCuts list = deletedCuts.get(t.getSensor()); if (list == null) { list = new LegOfCuts(); deletedCuts.put(t.getSensor(), list); } list.add(t); t.getSensor().removeElement(t); } return deletedCuts; } public void ditchBearings(final List<ResolvedLeg> legs) { for (final ResolvedLeg leg : legs) { ditchBearingsForThisLeg(leg.leg, leg.keepFirst); } } private void ditchBearingsForThisLeg(final LegOfCuts leg, final boolean keepFirst) { for (final SensorContactWrapper cut : leg) { // cool, we have a course - we can go for it. remember the bearings final double bearing1 = cut.getBearing(); final double bearing2 = cut.getAmbiguousBearing(); if (keepFirst) { cut.setBearing(bearing1); cut.setAmbiguousBearing(bearing2); } else { cut.setBearing(bearing2); cut.setAmbiguousBearing(bearing1); } // remember we're morally ambiguous cut.setHasAmbiguousBearing(false); } } public LegOfCuts findCutsNotInLeg(final TrackWrapper track, final Zone[] zones, final TimePeriod period) { final LegOfCuts toDelete = new LegOfCuts(); if (zones != null && zones.length > 0) { // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); final Enumeration<Editable> cNumer = sensor.elements(); while (cNumer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cNumer.nextElement(); final HiResDate dtg = scw.getDTG(); if (outOfZones(zones, dtg)) { toDelete.add(scw); } } } } return toDelete; } private long midTimeFor(final LegOfCuts lastLeg, final LegOfCuts leg) { final long startTime = lastLeg.get(lastLeg.size() - 1).getDTG().getDate().getTime(); final long endTime = leg.get(0).getDTG().getDate().getTime(); // and the mid-way value return startTime + (endTime - startTime) / 2; } private boolean outOfZones(final Zone[] zones, final HiResDate dtg) { final long thisLong = dtg.getDate().getTime(); boolean found = false; for (final Zone zone : zones) { if (zone.getStart() <= thisLong && zone.getEnd() >= thisLong) { // ok, valid. found = true; break; } } return !found; } @SuppressWarnings("unused") private void outputCurve(final String title, final long midTime, final LegOfCuts leg, final double[] slopeOne, final double[] slopeTwo) { System.out.println(title); final long firstTime = leg.get(0).getDTG().getDate().getTime(); final boolean firstLeg = firstTime < midTime; final boolean twoLegs = slopeTwo != null; if (!firstLeg) { // ok, output the mid-point final double legTwo = twoLegs ? valueAt(midTime, slopeTwo) : Double.NaN; System.out.println(midTime + ", " + trim(valueAt(midTime, slopeOne)) + ", " + trim(legTwo)); } // now loop through for (final SensorContactWrapper cut : leg) { final long thisTime = cut.getDTG().getDate().getTime(); double legTwo = twoLegs ? valueAt(thisTime, slopeTwo) : Double.NaN; if (legTwo > 360d) { legTwo -= 360d; } System.out.println(thisTime + ", " + trim(valueAt(thisTime, slopeOne)) + ", " + trim(legTwo)); } if (firstLeg) { // ok, output the mid-point final double legTwo = twoLegs ? valueAt(midTime, slopeTwo) : Double.NaN; System.out.println(midTime + ", " + trim(valueAt(midTime, slopeOne)) + ", " + trim(legTwo)); } } @SuppressWarnings("unused") private void outputLeg(final String title, final LegOfCuts lastLeg) { System.out.println(title); for (final SensorContactWrapper cut : lastLeg) { System.out.println(cut.getDTG().getDate().getTime() + ", " + cut.getBearing() + ", " + cut.getAmbiguousBearing()); } } public List<ResolvedLeg> resolve(final List<LegOfCuts> legs) { final List<ResolvedLeg> res = new ArrayList<ResolvedLeg>(); // ok, loop through the legs LegOfCuts lastLeg = null; for (final LegOfCuts leg : legs) { if (lastLeg != null) { // find the time 1/2 way between the legs final long midTime = midTimeFor(lastLeg, leg); // ok, retrieve slopes final double[] lastSlopeOne = lastLeg.getCurve(WhichPeriod.LATE, WhichBearing.CORE); final double[] lastSlopeTwo = lastLeg.getCurve(WhichPeriod.LATE, WhichBearing.AMBIGUOUS); // and generate the slope for this leg final double[] thisSlopeOne = leg.getCurve(WhichPeriod.EARLY, WhichBearing.CORE); final double[] thisSlopeTwo = leg.getCurve(WhichPeriod.EARLY, WhichBearing.AMBIGUOUS); // hmm, see if this has already been resolved if (thisSlopeTwo == null) { continue; } // get the slope scores we know we need final double lastSlopeValOne = trim(valueAt(midTime, lastSlopeOne)); final double nextSlopeValOne = trim(valueAt(midTime, thisSlopeOne)); final double nextSlopeValTwo = trim(valueAt(midTime, thisSlopeTwo)); // ok, is the first track resolved? if (lastSlopeTwo == null) { // ok, the previous leg has been sorted. just sort this leg final double oneone = calcDelta(lastSlopeValOne, nextSlopeValOne); final double onetwo = calcDelta(lastSlopeValOne, nextSlopeValTwo); final List<Perm> items = new ArrayList<>(); items.add(new Perm(oneone, true, true)); items.add(new Perm(onetwo, true, false)); Collections.sort(items); // check that the two solutions aren't too similar. If they are, // then it would be better to move onto the next leg. final Perm closest = items.get(0); final Perm nextClosest = items.get(1); final double firstTwoDiff = Math.abs(nextClosest.score - closest.score); final double cutOff = 10d; if (firstTwoDiff > cutOff) { ditchBearingsForThisLeg(leg, closest.secondOne); res.add(new ResolvedLeg(leg, closest.secondOne)); } } else { // ok, we've got to compare both of them final double lastSlopeValTwo = trim(valueAt(midTime, lastSlopeTwo)); // find the difference in the legs final double oneone = calcDelta(lastSlopeValOne, nextSlopeValOne); final double onetwo = calcDelta(lastSlopeValOne, nextSlopeValTwo); final double twoone = calcDelta(lastSlopeValTwo, nextSlopeValOne); final double twotwo = calcDelta(lastSlopeValTwo, nextSlopeValTwo); // store the permutations final List<Perm> items = new ArrayList<>(); items.add(new Perm(oneone, true, true)); items.add(new Perm(onetwo, true, false)); items.add(new Perm(twoone, false, true)); items.add(new Perm(twotwo, false, false)); // sort the permutations, so we can easily get the best Collections.sort(items); final Perm closest = items.get(0); // ditch the unnecessary bearing ditchBearingsForThisLeg(lastLeg, closest.firstOne); ditchBearingsForThisLeg(leg, closest.secondOne); // remember what we've done. res.add(new ResolvedLeg(lastLeg, closest.firstOne)); res.add(new ResolvedLeg(leg, closest.secondOne)); } } lastLeg = leg; } return res; } public List<ResolvedLeg> resolve(final TrackWrapper primaryTrack, final Zone[] zones) { final List<LegOfCuts> legs = sliceIntoLegs(primaryTrack, zones); return resolve(legs); } public void restoreCuts(final Map<SensorWrapper, LegOfCuts> deletedCuts) { for (final SensorWrapper sensor : deletedCuts.keySet()) { final ArrayList<SensorContactWrapper> cuts = deletedCuts.get(sensor); for (final SensorContactWrapper cut : cuts) { sensor.add(cut); } } } private List<LegOfCuts> sliceIntoLegs(final TrackWrapper track, final Zone[] zones) { final List<LegOfCuts> res = new ArrayList<LegOfCuts>(); if (zones != null && zones.length > 0) { // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); if (sensor.getVisible()) { for (final Zone zone : zones) { LegOfCuts thisC = null; final Enumeration<Editable> cNumer = sensor.elements(); while (cNumer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cNumer.nextElement(); final long dtg = scw.getDTG().getDate().getTime(); if (zone.getStart() <= dtg && zone.getEnd() >= dtg) { // ok, this cut is in this zone if (thisC == null) { thisC = new LegOfCuts(); } thisC.add(scw); } else if (zone.getEnd() < dtg) { // ok, we've passed the end of this zone continue; } } if (thisC != null) { res.add(thisC); } } } } } return res; } public LegsAndZigs sliceIntoLegsUsingAmbiguity(final SensorWrapper sensor, final double minZig, double maxSteady, final Logger logger, final TimeSeries scores) { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts zigs = new LegOfCuts(); if(scores != null) { scores.clear(); } final Enumeration<Editable> enumer = sensor.elements(); Double lastDelta = null; HiResDate lastTime = null; LegOfCuts thisLeg = null; LegOfCuts thisZig = null; SensorContactWrapper firstCut = null; final LegOfCuts possLeg = new LegOfCuts(); final int possLegAllowance = 2; while (enumer.hasMoreElements()) { final SensorContactWrapper cut = (SensorContactWrapper) enumer.nextElement(); if (cut.getVisible() && cut.getHasAmbiguousBearing()) { // ok, TA data final double delta = cut.getAmbiguousBearing() - cut.getBearing(); final HiResDate time = cut.getDTG(); // is this the first cut? if (lastDelta == null) { // store it. we'll add it to whatever type of data we build firstCut = cut; } else { double valueDelta = delta - lastDelta; // if we're not already in a turn, then any // monster delta will prob be related to domain if (thisLeg != null) { if (valueDelta < -180) { valueDelta += 360d; } else if (valueDelta > 180) { valueDelta -= 180d; } } // ok, work out the change rate final long timeDeltaMillis = time.getDate().getTime() - lastTime.getDate().getTime(); final long timeDeltaSecs = timeDeltaMillis / 1000L; final double rate = Math.abs(valueDelta / timeDeltaSecs); if(scores != null) { FixedMillisecond sec = new FixedMillisecond(time.getDate().getTime()); TimeSeriesDataItem item = new TimeSeriesDataItem(sec, rate); scores.add(item); } final String timeStr = time.getDate().toString(); final String stats = timeStr + " brg:" + (int) cut.getBearing() + " ambig:" + (int) cut.getAmbiguousBearing() + " step (secs)" + (int) timeDeltaSecs + " rate:" + rate; doLog(logger, stats); // if(time.getDate().getTime() == 260000) // { // System.out.println("here"); // } if (rate > minZig) { // ok, we were on a straight leg if (thisLeg != null) { // close the leg thisLeg = null; doLog(logger, timeStr + " End leg."); } // ok, we're in a leg if (thisZig == null) { thisZig = new LegOfCuts(); doLog(logger, timeStr + " New zig."); } // do we have any pending cuts if (!possLeg.isEmpty()) { doLog(logger, timeStr + " Did have poss straight cuts. Drop them, we're in a turn"); // ok, we have a couple of cuts that look like they're straight. // well, they're not. they're actually in a turn thisZig.addAll(possLeg); // and clear the list possLeg.clear(); } // if we have a pending first cut, // we should store it if (firstCut != null) { thisZig.add(firstCut); firstCut = null; } thisZig.add(cut); } else { boolean straightCutHandled = false; if (thisZig != null) { // hmm, we were in a turn, and now things are straight. // but, we want to allow a number of low-rate-change // entries, just in cases there's a coincidental // couple of steady cuts during the turn. if (possLeg.size() < possLegAllowance) { doLog(logger, timeStr + " Poss straight leg. Cache it."); // ok, we'll add this to the list possLeg.add(cut); straightCutHandled = true; } else { // ok, we were in a turn. End it zigs.addAll(thisZig); doLog(logger, timeStr + " Zig ended."); // close the leg thisZig = null; } } if (!straightCutHandled) { // ok, we're in a leg if (thisLeg == null) { doLog(logger, timeStr + " New Leg."); thisLeg = new LegOfCuts(); // right. We've allowed a couple of potential cuts // but, we've ended up on a straight leg. Add the stored // cuts to the leg if (!possLeg.isEmpty()) { doLog(logger, timeStr + " Have poss straight leg cuts."); thisLeg.addAll(possLeg); possLeg.clear(); } legs.add(thisLeg); } // if we have a pending first cut, // we should store it if (firstCut != null) { thisLeg.add(firstCut); firstCut = null; } thisLeg.add(cut); } } } lastDelta = delta; lastTime = time; } } // ok, do some last minute tidying // are we still in a zig? if (thisZig != null) { doLog(logger, "Finishing zig."); // store the zig cuts zigs.addAll(thisZig); thisZig = null; } // do we have any possible straight leg cuts if (!possLeg.isEmpty()) { doLog(logger, "Append trailing straight cuts."); thisLeg = new LegOfCuts(); thisLeg.addAll(possLeg); possLeg.clear(); legs.add(thisLeg); } return new LegsAndZigs(legs, zigs); } public LegsAndZigs sliceIntoLegsUsingAmbiguity(final TrackWrapper track, final double minZig, double maxSteady, final Logger logger, final TimeSeries scores) { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts zigCuts = new LegOfCuts(); final LegsAndZigs res = new LegsAndZigs(legs, zigCuts); // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); if (sensor.getVisible()) { final LegsAndZigs thisL = sliceIntoLegsUsingAmbiguity(sensor, minZig, maxSteady, logger, scores); if (thisL.legs.size() > 0) { res.legs.addAll(thisL.legs); } if (thisL.zigCuts.size() > 0) { res.zigCuts.addAll(thisL.zigCuts); } } } return res; } public void undoResolve(final List<LegOfCuts> legs) { // ok, clear all their ambiguity for (final LegOfCuts leg : legs) { for (final SensorContactWrapper cut : leg) { cut.setHasAmbiguousBearing(true); } } } public void undoResolveBearings(final List<ResolvedLeg> legs) { for (final ResolvedLeg leg : legs) { for (final SensorContactWrapper cut : leg.leg) { // cool, we have a course - we can go for it. remember the bearings final double bearing1 = cut.getBearing(); final double bearing2 = cut.getAmbiguousBearing(); if (leg.keepFirst) { cut.setBearing(bearing2); cut.setAmbiguousBearing(bearing1); } else { cut.setBearing(bearing1); cut.setAmbiguousBearing(bearing2); } // remember we're morally ambiguous cut.setHasAmbiguousBearing(true); } } } }
org.mwc.debrief.track_shift/src/org/mwc/debrief/track_shift/ambiguity/AmbiguityResolver.java
package org.mwc.debrief.track_shift.ambiguity; import java.awt.Color; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.ConsoleHandler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.apache.commons.math3.fitting.WeightedObservedPoint; import org.jfree.data.time.FixedMillisecond; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesDataItem; import org.mwc.debrief.track_shift.ambiguity.LegOfCuts.WhichBearing; import org.mwc.debrief.track_shift.ambiguity.LegOfCuts.WhichPeriod; import org.mwc.debrief.track_shift.controls.ZoneChart.ColorProvider; import org.mwc.debrief.track_shift.controls.ZoneChart.Zone; import org.mwc.debrief.track_shift.views.StackedDotHelper; import Debrief.ReaderWriter.Replay.ImportReplay; import Debrief.Wrappers.FixWrapper; import Debrief.Wrappers.SensorContactWrapper; import Debrief.Wrappers.SensorWrapper; import Debrief.Wrappers.TrackWrapper; import MWC.GUI.BaseLayer; import MWC.GUI.Editable; import MWC.GUI.Layers; import MWC.GUI.JFreeChart.ColouredDataItem; import MWC.GenericData.HiResDate; import MWC.GenericData.TimePeriod; public class AmbiguityResolver { public static class LegsAndZigs { private final List<LegOfCuts> legs; private final LegOfCuts zigCuts; public LegsAndZigs(final List<LegOfCuts> legs, final LegOfCuts zigCuts) { this.legs = legs; this.zigCuts = zigCuts; } public List<LegOfCuts> getLegs() { return legs; } public LegOfCuts getZigs() { return zigCuts; } } private static class Perm implements Comparable<Perm> { private final double score; private final boolean firstOne; private final boolean secondOne; public Perm(final double score, final boolean firstOne, final boolean secondOne) { this.score = score; this.firstOne = firstOne; this.secondOne = secondOne; } @Override public int compareTo(final Perm o) { final Double dScore = score; return dScore.compareTo(o.score); } } public static class ResolvedLeg { final boolean keepFirst; final LegOfCuts leg; public ResolvedLeg(final LegOfCuts leg, final boolean keepFirst) { this.leg = leg; this.keepFirst = keepFirst; } } // //////////////////////////////////////////////////////////////////////////////////////////////// // testing for this class // //////////////////////////////////////////////////////////////////////////////////////////////// static public final class TestResolveAmbig extends junit.framework.TestCase { private TrackWrapper getData(final String name) throws FileNotFoundException { // get our sample data-file final ImportReplay importer = new ImportReplay(); final Layers theLayers = new Layers(); final String fName = "../org.mwc.cmap.combined.feature/root_installs/sample_data/S2R/" + name; final File inFile = new File(fName); assertTrue("input file exists", inFile.exists()); final FileInputStream is = new FileInputStream(fName); importer.importThis(fName, is, theLayers); // sort out the sensors importer.storePendingSensors(); assertEquals("has some layers", 3, theLayers.size()); // get the sensor track final TrackWrapper track = (TrackWrapper) theLayers.findLayer("SENSOR"); return track; } private TimeSeries getOSCourse(final TrackWrapper track) { final TimeSeries ts = new TimeSeries("OS Course"); final Enumeration<Editable> pts = track.getPositionIterator(); while (pts.hasMoreElements()) { final FixWrapper fw = (FixWrapper) pts.nextElement(); final double course = fw.getCourseDegs(); final FixedMillisecond thisMilli = new FixedMillisecond(fw.getDateTimeGroup().getDate().getTime()); final ColouredDataItem crseBearing = new ColouredDataItem(thisMilli, course, fw.getColor(), true, null, true, true); ts.add(crseBearing); } return ts; } public void testDitchUsingAmbiguity() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks2.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); // make the sensor visible final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); // ok, get resolving final AmbiguityResolver solver = new AmbiguityResolver(); // try to get zones using ambiguity delta final LegsAndZigs res = solver.sliceIntoLegsUsingAmbiguity(track, 0.7, 0.2, null, null); final List<LegOfCuts> legs = res.legs; final LegOfCuts zigs = res.zigCuts; assertNotNull("found zones", legs); assertEquals("found correct number of zones", 12, legs.size()); assertNotNull("found zigs", zigs); assertEquals("found correct number of zig cuts", 23, zigs.size()); // ok, ditch those cuts final int fullSensorLen = sensor.size(); Map<SensorWrapper, LegOfCuts> deleted = solver.deleteTheseCuts(zigs); assertEquals("fewer cuts", 98, sensor.size()); // ok, and undo them solver.restoreCuts(deleted); assertEquals("fewer cuts", fullSensorLen, sensor.size()); // and do it again, so we've got fewer cuts deleted = solver.deleteTheseCuts(zigs); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull(resolvedLegs); assertEquals("right num legs", 12, legs.size()); assertEquals("correct leg", 251d, resolvedLegs.get(0).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 253d, resolvedLegs.get(1).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 251d, resolvedLegs.get(2).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 254d, resolvedLegs.get(3).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 258d, resolvedLegs.get(4).leg.get(0) .getBearing(), 1d); assertEquals("correct leg", 269d, resolvedLegs.get(5).leg.get(0) .getBearing(), 1d); // ok, and cancel the leg resolving solver.undoResolveBearings(resolvedLegs); // and re-check they're ambiguous assertEquals("is unresloved", true, resolvedLegs.get(0).leg.get(0) .getHasAmbiguousBearing()); } public void testGetCurve() throws FileNotFoundException { final LegOfCuts leg4 = new LegOfCuts(); final SensorWrapper sensor = new SensorWrapper("name"); leg4.add(wrapMe(sensor, 80, 92d, 260d)); leg4.add(wrapMe(sensor, 90, 82d, 280d)); leg4.add(wrapMe(sensor, 100, 72d, 300d)); leg4.add(wrapMe(sensor, 110, 62d, 320d)); leg4.add(wrapMe(sensor, 120, 52d, 340d)); leg4.add(wrapMe(sensor, 130, 42d, 360d)); leg4.add(wrapMe(sensor, 140, 32d, 380d)); final double[] curve = leg4.getCurve(WhichPeriod.ALL, WhichBearing.AMBIGUOUS); assertNotNull("produced curve", curve); assertEquals("curve correct length", 3, curve.length); assertEquals("correct offset", 100d, curve[0], 0.001); leg4.set(5, wrapMe(sensor, 130, 42d, 0d)); leg4.set(6, wrapMe(sensor, 140, 32d, 20d)); final double[] curve2Ambig = leg4.getCurve(WhichPeriod.ALL, WhichBearing.AMBIGUOUS); final double[] curve2 = leg4.getCurve(WhichPeriod.ALL, WhichBearing.CORE); assertNotNull("produced curve", curve2Ambig); assertEquals("curve correct length", 3, curve2Ambig.length); assertEquals("correct offset", 100d, curve2Ambig[0], 0.001); double beforeValue = valueAt(60, curve2Ambig); assertEquals("correct next value", beforeValue, 220d, 0.001); double afterValue = valueAt(150, curve2Ambig); assertEquals("correct next value", afterValue, 400d, 0.001); beforeValue = valueAt(60, curve2); assertEquals("correct next value", beforeValue, 112d, 0.001); afterValue = valueAt(150, curve2); assertEquals("correct next value", afterValue, 22d, 0.001); } public void testGetCurveSector() throws FileNotFoundException { final LegOfCuts leg4 = new LegOfCuts(); final SensorWrapper sensor = new SensorWrapper("name"); leg4.add(wrapMe(sensor, 80, 92d, 260d)); leg4.add(wrapMe(sensor, 90, 82d, 280d)); leg4.add(wrapMe(sensor, 100, 72d, 300d)); leg4.add(wrapMe(sensor, 110, 62d, 320d)); leg4.add(wrapMe(sensor, 120, 52d, 340d)); // start off with it too short List<SensorContactWrapper> leg = leg4.extractPortion(WhichPeriod.EARLY); assertNotNull("leg retrieved", leg); assertEquals("correct length", 5, leg.size()); // ok, add more data, so that we need to trim the data leg4.add(wrapMe(sensor, 130, 42d, 0d)); leg4.add(wrapMe(sensor, 140, 32d, 20d)); leg4.add(wrapMe(sensor, 150, 22d, 40d)); leg4.add(wrapMe(sensor, 160, 12d, 60d)); leg4.add(wrapMe(sensor, 170, 2d, 80d)); leg4.add(wrapMe(sensor, 180, 12d, 60d)); leg4.add(wrapMe(sensor, 190, 22d, 40d)); leg4.add(wrapMe(sensor, 200, 32d, 20d)); leg4.add(wrapMe(sensor, 210, 42d, 0d)); leg4.add(wrapMe(sensor, 220, 52d, 340d)); leg4.add(wrapMe(sensor, 230, 62d, 320d)); leg4.add(wrapMe(sensor, 240, 72d, 300d)); // check we retrieve the expected data leg = leg4.extractPortion(WhichPeriod.ALL); assertNotNull("leg retrieved", leg); assertEquals("correct length", 17, leg.size()); assertEquals("correct start", 80, leg.get(0).getDTG().getDate().getTime()); assertEquals("correct end", 240, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); leg = leg4.extractPortion(WhichPeriod.EARLY); assertNotNull("leg retrieved", leg); assertEquals("correct length", 8, LegOfCuts.LEG_LENGTH); assertEquals("correct start", 80, leg.get(0).getDTG().getDate().getTime()); assertEquals("correct end", 150, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); leg = leg4.extractPortion(WhichPeriod.LATE); assertNotNull("leg retrieved", leg); assertEquals("correct length", 8, LegOfCuts.LEG_LENGTH); assertEquals("correct start", 170, leg.get(0).getDTG().getDate() .getTime()); assertEquals("correct end", 240, leg.get(leg.size() - 1).getDTG() .getDate().getTime()); // try the calculated values double[] curve = leg4.getCurve(WhichPeriod.ALL, WhichBearing.CORE); assertEquals("correct value (since we can't fit good curve)", 119.20, valueAt(80, curve), 0.01); assertEquals("correct value (since we can't fit good curve)", 78.79, valueAt(240, curve), 0.01); curve = leg4.getCurve(WhichPeriod.EARLY, WhichBearing.CORE); assertEquals("correct value", 92, valueAt(80, curve), 0.01); curve = leg4.getCurve(WhichPeriod.EARLY, WhichBearing.AMBIGUOUS); assertEquals("correct value", 260, valueAt(80, curve), 0.01); curve = leg4.getCurve(WhichPeriod.LATE, WhichBearing.CORE); assertEquals("correct value", 72, valueAt(240, curve), 0.01); curve = leg4.getCurve(WhichPeriod.LATE, WhichBearing.AMBIGUOUS); assertEquals("correct value", -60, valueAt(240, curve), 0.01); } public void testGettingLegs() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); final ColorProvider provider = new ColorProvider() { @Override public Color getZoneColor() { return Color.blue; } }; final TimeSeries osCourse = getOSCourse(track); // try to slice the O/S zones final ArrayList<Zone> zonesList = StackedDotHelper.sliceOwnship(osCourse, provider); final Zone[] zones = zonesList.toArray(new Zone[] {}); // ok, get resolving final AmbiguityResolver res = new AmbiguityResolver(); // drop cuts in turn res.findCutsNotInLeg(track, zones, null); // now get the legs final List<LegOfCuts> legs = res.sliceIntoLegs(track, zones); assertEquals("right num", zones.length, legs.size()); assertEquals("right num (after working it out by hand)", 13, legs.size()); // now resolve ambiguity res.resolve(legs); } public void testOnlyDitchVisible() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks2.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); // make the sensor visible final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); sensor.setVisible(true); // set some cuts to hidden int ctr = 0; final Enumeration<Editable> numer = sensor.elements(); while (numer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) numer.nextElement(); if (ctr > 20 && ctr < 50) { scw.setVisible(false); } ctr++; } // ok, get resolving final AmbiguityResolver solver = new AmbiguityResolver(); // try to get zones using ambiguity delta final LegsAndZigs res = solver.sliceIntoLegsUsingAmbiguity(track, 0.7, 0.2, null, null); final List<LegOfCuts> legs = res.legs; final LegOfCuts zigs = res.zigCuts; assertNotNull("found zones", legs); assertEquals("found correct number of zones", 8, legs.size()); assertNotNull("found zigs", zigs); assertEquals("found correct number of zig cuts", 17, zigs.size()); // ok, ditch those cuts solver.deleteTheseCuts(zigs); assertEquals("fewer cuts", 104, sensor.size()); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull(resolvedLegs); assertEquals("right num legs", 8, legs.size()); } public void testProcessCuts() throws FileNotFoundException { List<WeightedObservedPoint> obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80d, 260d)); obs.add(new WeightedObservedPoint(1, 90, 280d)); obs.add(new WeightedObservedPoint(1, 100, 300d)); obs.add(new WeightedObservedPoint(1, 110, 320d)); obs.add(new WeightedObservedPoint(1, 120, 340d)); obs.add(new WeightedObservedPoint(1, 130, 0d)); obs.add(new WeightedObservedPoint(1, 140, 20d)); List<WeightedObservedPoint> res = AmbiguityResolver.putObsInCorrectDomain(obs); assertEquals("correct last score", 380d, res.get(res.size() - 1).getY(), 0.001); obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80, 160d)); obs.add(new WeightedObservedPoint(1, 90, 140d)); obs.add(new WeightedObservedPoint(1, 100, 120d)); obs.add(new WeightedObservedPoint(1, 110, 80d)); obs.add(new WeightedObservedPoint(1, 120, 30d)); obs.add(new WeightedObservedPoint(1, 130, 340d)); obs.add(new WeightedObservedPoint(1, 140, 320d)); res = AmbiguityResolver.putObsInCorrectDomain(obs); assertEquals("correct last score", -40d, res.get(res.size() - 1).getY(), 0.001); obs = new ArrayList<WeightedObservedPoint>(); obs.add(new WeightedObservedPoint(1, 80, -160d)); obs.add(new WeightedObservedPoint(1, 90, -140d)); obs.add(new WeightedObservedPoint(1, 100, -120d)); obs.add(new WeightedObservedPoint(1, 110, -80d)); obs.add(new WeightedObservedPoint(1, 120, -30d)); obs.add(new WeightedObservedPoint(1, 130, 20d)); obs.add(new WeightedObservedPoint(1, 140, 40d)); res = AmbiguityResolver.putObsInCorrectRange(obs); assertEquals("correct last score", 200d, res.get(0).getY(), 0.001); assertEquals("correct last score", 40d, res.get(res.size() - 1).getY(), 0.001); } public void testResolve() throws FileNotFoundException { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final SensorWrapper sensor = new SensorWrapper("name"); final LegOfCuts leg1 = new LegOfCuts(); leg1.add(wrapMe(sensor, 100, 180d, 270d)); leg1.add(wrapMe(sensor, 110, 170d, 280d)); leg1.add(wrapMe(sensor, 120, 160d, 290d)); leg1.add(wrapMe(sensor, 130, 150d, 300d)); leg1.add(wrapMe(sensor, 140, 140d, 310d)); legs.add(leg1); final LegOfCuts leg2 = new LegOfCuts(); leg2.add(wrapMe(sensor, 160, 182d, 220d)); leg2.add(wrapMe(sensor, 170, 183d, 221d)); leg2.add(wrapMe(sensor, 180, 184d, 222d)); leg2.add(wrapMe(sensor, 190, 185d, 223d)); leg2.add(wrapMe(sensor, 200, 186d, 224d)); legs.add(leg2); final LegOfCuts leg3 = new LegOfCuts(); leg3.add(wrapMe(sensor, 220, 92d, 200d)); leg3.add(wrapMe(sensor, 230, 83d, 210d)); leg3.add(wrapMe(sensor, 240, 74d, 220d)); leg3.add(wrapMe(sensor, 250, 65d, 230d)); leg3.add(wrapMe(sensor, 260, 56d, 240d)); legs.add(leg3); final LegOfCuts leg4 = new LegOfCuts(); leg4.add(wrapMe(sensor, 280, 92d, 260d)); leg4.add(wrapMe(sensor, 290, 73d, 280d)); leg4.add(wrapMe(sensor, 300, 54d, 300d)); leg4.add(wrapMe(sensor, 310, 35d, 320d)); leg4.add(wrapMe(sensor, 320, 16d, 340d)); leg4.add(wrapMe(sensor, 330, 9d, 0d)); leg4.add(wrapMe(sensor, 340, 355d, 20d)); legs.add(leg4); // put the good cut in the wrong domain final LegOfCuts leg5 = new LegOfCuts(); leg5.add(wrapMe(sensor, 360, 41d, 260d)); leg5.add(wrapMe(sensor, 370, 43d, 240d)); leg5.add(wrapMe(sensor, 380, 45d, 220d)); leg5.add(wrapMe(sensor, 390, 47d, 200d)); leg5.add(wrapMe(sensor, 400, 49d, 180d)); leg5.add(wrapMe(sensor, 410, 51d, 160d)); leg5.add(wrapMe(sensor, 420, 53d, 140d)); legs.add(leg5); // make the first cuts very wonky final LegOfCuts leg6 = new LegOfCuts(); leg6.add(wrapMe(sensor, 440, 141d, 350d)); leg6.add(wrapMe(sensor, 450, 143d, 20d)); leg6.add(wrapMe(sensor, 460, 145d, 70d)); leg6.add(wrapMe(sensor, 470, 147d, 80d)); leg6.add(wrapMe(sensor, 480, 149d, 90d)); leg6.add(wrapMe(sensor, 490, 151d, 100d)); leg6.add(wrapMe(sensor, 500, 153d, 110d)); legs.add(leg6); final AmbiguityResolver solver = new AmbiguityResolver(); final List<ResolvedLeg> resolvedLegs = solver.resolve(legs); assertNotNull("have list of resolved", resolvedLegs); assertEquals("correct num legs", 6, resolvedLegs.size()); // ok, check the legs assertFalse("not ambig", leg1.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg2.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg3.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg4.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg5.get(0).getHasAmbiguousBearing()); assertFalse("not ambig", leg6.get(0).getHasAmbiguousBearing()); assertEquals("correct bearing", 180d, leg1.get(0).getBearing()); assertEquals("correct bearing", 182d, leg2.get(0).getBearing()); assertEquals("correct bearing", 200d, leg3.get(0).getBearing()); assertEquals("correct bearing", 260d, leg4.get(0).getBearing()); assertEquals("correct bearing", 41d, leg5.get(0).getBearing()); assertEquals("correct bearing", 350d, leg6.get(0).getBearing()); } public void testSplittingAllTime() throws FileNotFoundException { final TrackWrapper track = getData("Ambig_tracks.rep"); assertNotNull("found track", track); // has sensors assertEquals("has sensor", 1, track.getSensors().size()); final SensorWrapper sensor = (SensorWrapper) track.getSensors().elements().nextElement(); final ColorProvider provider = new ColorProvider() { @Override public Color getZoneColor() { return Color.blue; } }; final TimeSeries osCourse = getOSCourse(track); // try to slice the O/S zones final ArrayList<Zone> zonesList = StackedDotHelper.sliceOwnship(osCourse, provider); final Zone[] zones = zonesList.toArray(new Zone[] {}); // ok, get resolving final AmbiguityResolver res = new AmbiguityResolver(); // drop cuts in turn final int numCuts = sensor.size(); assertEquals("right cuts at start", 721, numCuts); final List<SensorContactWrapper> toDel = res.findCutsNotInLeg(track, zones, null); assertEquals("have cuts to delete", 133, toDel.size()); @SuppressWarnings("unused") final List<LegOfCuts> legs = res.sliceIntoLegs(track, zones); // ok, check the data } /** * check that we allow a couple of apparently steady cuts during a turn * * @throws FileNotFoundException */ public void testSteadyInTurn() throws FileNotFoundException { final SensorWrapper sensor = new SensorWrapper("name"); sensor.add(wrapMe(sensor, 100000, 180d, 270d)); sensor.add(wrapMe(sensor, 110000, 170d, 280d)); sensor.add(wrapMe(sensor, 120000, 160d, 290d)); sensor.add(wrapMe(sensor, 130000, 150d, 300d)); sensor.add(wrapMe(sensor, 140000, 140d, 310d)); sensor.add(wrapMe(sensor, 150000, 130d, 310d)); sensor.add(wrapMe(sensor, 160000, 122d, 220d)); sensor.add(wrapMe(sensor, 170000, 113d, 221d)); sensor.add(wrapMe(sensor, 180000, 104d, 222d)); sensor.add(wrapMe(sensor, 190000, 095d, 223d)); sensor.add(wrapMe(sensor, 200000, 086d, 224d)); sensor.add(wrapMe(sensor, 210000, 076d, 224d)); sensor.add(wrapMe(sensor, 220000, 62d, 200d)); sensor.add(wrapMe(sensor, 230000, 53d, 210d)); sensor.add(wrapMe(sensor, 240000, 44d, 220d)); sensor.add(wrapMe(sensor, 250000, 35d, 230d)); sensor.add(wrapMe(sensor, 260000, 26d, 240d)); sensor.add(wrapMe(sensor, 270000, 36d, 240d)); sensor.add(wrapMe(sensor, 280000, 42d, 260d)); sensor.add(wrapMe(sensor, 290000, 53d, 280d)); sensor.add(wrapMe(sensor, 300000, 64d, 300d)); sensor.add(wrapMe(sensor, 310000, 75d, 320d)); sensor.add(wrapMe(sensor, 320000, 66d, 340d)); sensor.add(wrapMe(sensor, 330000, 56d, 0d)); sensor.add(wrapMe(sensor, 340000, 45d, 20d)); sensor.add(wrapMe(sensor, 350000, 35d, 30d)); sensor.add(wrapMe(sensor, 360000, 35d, 30d)); sensor.add(wrapMe(sensor, 370000, 15d, 70d)); sensor.add(wrapMe(sensor, 380000, 355d, 20d)); sensor.add(wrapMe(sensor, 390000, 355d, 20d)); sensor.add(wrapMe(sensor, 400000, 345d, 20d)); // sensor.add(wrapMe(sensor, 410000, 345d, 20d)); sensor.setVisible(true); final TrackWrapper host = new TrackWrapper(); host.setName("Host"); host.add(sensor); final AmbiguityResolver solver = new AmbiguityResolver(); final Logger logger = Logger.getLogger("Test output"); logger.setUseParentHandlers(false); logger.addHandler(new ConsoleHandler() { @Override public void publish(final LogRecord record) { System.out.println(record.getMessage()); } }); final LegsAndZigs sliced = solver.sliceIntoLegsUsingAmbiguity(host, 2.2, 0.2, logger, null); // for(LegOfCuts leg: sliced.legs) // { // System.out.println(leg.get(0).getDTG().getDate().getTime() + " - " + // leg.get(leg.size()-1).getDTG().getDate().getTime()); // } // // System.out.println("==="); // for(SensorContactWrapper cut: sliced.zigCuts) // { // System.out.println(cut.getDTG().getDate().getTime()); // } assertNotNull("produced slices", sliced); assertEquals("correct legs", 3, sliced.legs.size()); assertEquals("correct turning cuts", 8, sliced.zigCuts.size()); } public void testWeighting() { final SensorWrapper sensor = new SensorWrapper("name"); final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts leg1 = new LegOfCuts(); leg1.add(wrapMe(sensor, 280, 92d, 260d)); leg1.add(wrapMe(sensor, 290, 73d, 280d)); leg1.add(wrapMe(sensor, 300, 54d, 300d)); leg1.add(wrapMe(sensor, 310, 35d, 320d)); leg1.add(wrapMe(sensor, 320, 16d, 340d)); leg1.add(wrapMe(sensor, 330, 9d, 0d)); leg1.add(wrapMe(sensor, 340, 355d, 20d)); legs.add(leg1); // put the good cut in the wrong domain final LegOfCuts leg2 = new LegOfCuts(); leg2.add(wrapMe(sensor, 360, 42d, 260d)); leg2.add(wrapMe(sensor, 370, 43d, 240d)); leg2.add(wrapMe(sensor, 380, 45d, 220d)); leg2.add(wrapMe(sensor, 390, 47d, 200d)); leg2.add(wrapMe(sensor, 400, 49d, 180d)); leg2.add(wrapMe(sensor, 410, 51d, 160d)); leg2.add(wrapMe(sensor, 420, 53d, 140d)); legs.add(leg2); // make the first cuts very wonky final LegOfCuts leg3 = new LegOfCuts(); leg3.add(wrapMe(sensor, 440, 141d, 350d)); leg3.add(wrapMe(sensor, 450, 143d, 20d)); leg3.add(wrapMe(sensor, 460, 145d, 70d)); leg3.add(wrapMe(sensor, 470, 147d, 80d)); leg3.add(wrapMe(sensor, 480, 149d, 90d)); leg3.add(wrapMe(sensor, 490, 151d, 100d)); leg3.add(wrapMe(sensor, 500, 153d, 110d)); legs.add(leg3); final AmbiguityResolver resolver = new AmbiguityResolver(); final List<ResolvedLeg> resolvedLegs = resolver.resolve(legs); assertNotNull("have legs", resolvedLegs); assertEquals("correct bearing", 260d, leg1.get(0).getBearing()); assertEquals("correct bearing", 42d, leg2.get(0).getBearing()); assertEquals("correct bearing", 350d, leg3.get(0).getBearing()); } private SensorContactWrapper wrapMe(final SensorWrapper sensor, final long dtg, final double bearing1, final double bearing2) { return new SensorContactWrapper("track", new HiResDate(dtg), null, bearing1, bearing2, null, null, Color.RED, "label", 0, sensor .getName()); } } private static void doLog(final Logger logger, final String msg) { if (logger != null) { logger.log(Level.INFO, msg); } } public static List<WeightedObservedPoint> putObsInCorrectDomain( final List<WeightedObservedPoint> obs) { final List<WeightedObservedPoint> res = new ArrayList<WeightedObservedPoint>(); double lastVal = Double.MIN_VALUE; for (final WeightedObservedPoint ob : obs) { double thisVal = ob.getY(); if (lastVal != Double.MIN_VALUE) { double valToUse; // ok, have we jumped up? if (thisVal - lastVal > 200) { // ok, reduce it valToUse = thisVal - 360d; } else if (thisVal - lastVal < -200) { // ok, increase it valToUse = thisVal + 360d; } else { valToUse = thisVal; } res.add(new WeightedObservedPoint(ob.getWeight(), ob.getX(), valToUse)); thisVal = valToUse; } else { res.add(ob); } lastVal = thisVal; } return res; } public static List<WeightedObservedPoint> putObsInCorrectRange( final List<WeightedObservedPoint> obs) { final List<WeightedObservedPoint> res = new ArrayList<WeightedObservedPoint>(); for (final WeightedObservedPoint ob : obs) { double thisVal = ob.getY(); while (thisVal < 0) { thisVal += 360d; } while (thisVal >= 360) { thisVal -= 360d; } res.add(new WeightedObservedPoint(ob.getWeight(), ob.getX(), thisVal)); } return res; } private static double trim(final double val) { double res = val; while (res < -360d) { res += 360d; } while (res >= 360d) { res -= 360d; } return res; } private static double valueAt(final long time, final double[] slope) { return slope[0] + slope[1] * time + slope[2] * Math.pow(time, 2); } private double calcDelta(final double one, final double two) { double res = Math.abs(one - two); while (res > 360d) { res -= 360d; } while (res <= -360d) { res += 360d; } return res; } public Map<SensorWrapper, LegOfCuts> deleteTheseCuts( final List<SensorContactWrapper> cutsToDelete) { final Map<SensorWrapper, LegOfCuts> deletedCuts = new HashMap<SensorWrapper, LegOfCuts>(); for (final SensorContactWrapper t : cutsToDelete) { // store the details of this sensor, so we can undo it LegOfCuts list = deletedCuts.get(t.getSensor()); if (list == null) { list = new LegOfCuts(); deletedCuts.put(t.getSensor(), list); } list.add(t); t.getSensor().removeElement(t); } return deletedCuts; } public void ditchBearings(final List<ResolvedLeg> legs) { for (final ResolvedLeg leg : legs) { ditchBearingsForThisLeg(leg.leg, leg.keepFirst); } } private void ditchBearingsForThisLeg(final LegOfCuts leg, final boolean keepFirst) { for (final SensorContactWrapper cut : leg) { // cool, we have a course - we can go for it. remember the bearings final double bearing1 = cut.getBearing(); final double bearing2 = cut.getAmbiguousBearing(); if (keepFirst) { cut.setBearing(bearing1); cut.setAmbiguousBearing(bearing2); } else { cut.setBearing(bearing2); cut.setAmbiguousBearing(bearing1); } // remember we're morally ambiguous cut.setHasAmbiguousBearing(false); } } public LegOfCuts findCutsNotInLeg(final TrackWrapper track, final Zone[] zones, final TimePeriod period) { final LegOfCuts toDelete = new LegOfCuts(); if (zones != null && zones.length > 0) { // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); final Enumeration<Editable> cNumer = sensor.elements(); while (cNumer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cNumer.nextElement(); final HiResDate dtg = scw.getDTG(); if (outOfZones(zones, dtg)) { toDelete.add(scw); } } } } return toDelete; } private long midTimeFor(final LegOfCuts lastLeg, final LegOfCuts leg) { final long startTime = lastLeg.get(lastLeg.size() - 1).getDTG().getDate().getTime(); final long endTime = leg.get(0).getDTG().getDate().getTime(); // and the mid-way value return startTime + (endTime - startTime) / 2; } private boolean outOfZones(final Zone[] zones, final HiResDate dtg) { final long thisLong = dtg.getDate().getTime(); boolean found = false; for (final Zone zone : zones) { if (zone.getStart() <= thisLong && zone.getEnd() >= thisLong) { // ok, valid. found = true; break; } } return !found; } @SuppressWarnings("unused") private void outputCurve(final String title, final long midTime, final LegOfCuts leg, final double[] slopeOne, final double[] slopeTwo) { System.out.println(title); final long firstTime = leg.get(0).getDTG().getDate().getTime(); final boolean firstLeg = firstTime < midTime; final boolean twoLegs = slopeTwo != null; if (!firstLeg) { // ok, output the mid-point final double legTwo = twoLegs ? valueAt(midTime, slopeTwo) : Double.NaN; System.out.println(midTime + ", " + trim(valueAt(midTime, slopeOne)) + ", " + trim(legTwo)); } // now loop through for (final SensorContactWrapper cut : leg) { final long thisTime = cut.getDTG().getDate().getTime(); double legTwo = twoLegs ? valueAt(thisTime, slopeTwo) : Double.NaN; if (legTwo > 360d) { legTwo -= 360d; } System.out.println(thisTime + ", " + trim(valueAt(thisTime, slopeOne)) + ", " + trim(legTwo)); } if (firstLeg) { // ok, output the mid-point final double legTwo = twoLegs ? valueAt(midTime, slopeTwo) : Double.NaN; System.out.println(midTime + ", " + trim(valueAt(midTime, slopeOne)) + ", " + trim(legTwo)); } } @SuppressWarnings("unused") private void outputLeg(final String title, final LegOfCuts lastLeg) { System.out.println(title); for (final SensorContactWrapper cut : lastLeg) { System.out.println(cut.getDTG().getDate().getTime() + ", " + cut.getBearing() + ", " + cut.getAmbiguousBearing()); } } public List<ResolvedLeg> resolve(final List<LegOfCuts> legs) { final List<ResolvedLeg> res = new ArrayList<ResolvedLeg>(); // ok, loop through the legs LegOfCuts lastLeg = null; for (final LegOfCuts leg : legs) { if (lastLeg != null) { // find the time 1/2 way between the legs final long midTime = midTimeFor(lastLeg, leg); // ok, retrieve slopes final double[] lastSlopeOne = lastLeg.getCurve(WhichPeriod.LATE, WhichBearing.CORE); final double[] lastSlopeTwo = lastLeg.getCurve(WhichPeriod.LATE, WhichBearing.AMBIGUOUS); // and generate the slope for this leg final double[] thisSlopeOne = leg.getCurve(WhichPeriod.EARLY, WhichBearing.CORE); final double[] thisSlopeTwo = leg.getCurve(WhichPeriod.EARLY, WhichBearing.AMBIGUOUS); // hmm, see if this has already been resolved if (thisSlopeTwo == null) { continue; } // get the slope scores we know we need final double lastSlopeValOne = trim(valueAt(midTime, lastSlopeOne)); final double nextSlopeValOne = trim(valueAt(midTime, thisSlopeOne)); final double nextSlopeValTwo = trim(valueAt(midTime, thisSlopeTwo)); // ok, is the first track resolved? if (lastSlopeTwo == null) { // ok, the previous leg has been sorted. just sort this leg final double oneone = calcDelta(lastSlopeValOne, nextSlopeValOne); final double onetwo = calcDelta(lastSlopeValOne, nextSlopeValTwo); final List<Perm> items = new ArrayList<>(); items.add(new Perm(oneone, true, true)); items.add(new Perm(onetwo, true, false)); Collections.sort(items); // check that the two solutions aren't too similar. If they are, // then it would be better to move onto the next leg. final Perm closest = items.get(0); final Perm nextClosest = items.get(1); final double firstTwoDiff = Math.abs(nextClosest.score - closest.score); final double cutOff = 10d; if (firstTwoDiff > cutOff) { ditchBearingsForThisLeg(leg, closest.secondOne); res.add(new ResolvedLeg(leg, closest.secondOne)); } } else { // ok, we've got to compare both of them final double lastSlopeValTwo = trim(valueAt(midTime, lastSlopeTwo)); // find the difference in the legs final double oneone = calcDelta(lastSlopeValOne, nextSlopeValOne); final double onetwo = calcDelta(lastSlopeValOne, nextSlopeValTwo); final double twoone = calcDelta(lastSlopeValTwo, nextSlopeValOne); final double twotwo = calcDelta(lastSlopeValTwo, nextSlopeValTwo); // store the permutations final List<Perm> items = new ArrayList<>(); items.add(new Perm(oneone, true, true)); items.add(new Perm(onetwo, true, false)); items.add(new Perm(twoone, false, true)); items.add(new Perm(twotwo, false, false)); // sort the permutations, so we can easily get the best Collections.sort(items); final Perm closest = items.get(0); // ditch the unnecessary bearing ditchBearingsForThisLeg(lastLeg, closest.firstOne); ditchBearingsForThisLeg(leg, closest.secondOne); // remember what we've done. res.add(new ResolvedLeg(lastLeg, closest.firstOne)); res.add(new ResolvedLeg(leg, closest.secondOne)); } } lastLeg = leg; } return res; } public List<ResolvedLeg> resolve(final TrackWrapper primaryTrack, final Zone[] zones) { final List<LegOfCuts> legs = sliceIntoLegs(primaryTrack, zones); return resolve(legs); } public void restoreCuts(final Map<SensorWrapper, LegOfCuts> deletedCuts) { for (final SensorWrapper sensor : deletedCuts.keySet()) { final ArrayList<SensorContactWrapper> cuts = deletedCuts.get(sensor); for (final SensorContactWrapper cut : cuts) { sensor.add(cut); } } } private List<LegOfCuts> sliceIntoLegs(final TrackWrapper track, final Zone[] zones) { final List<LegOfCuts> res = new ArrayList<LegOfCuts>(); if (zones != null && zones.length > 0) { // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); if (sensor.getVisible()) { for (final Zone zone : zones) { LegOfCuts thisC = null; final Enumeration<Editable> cNumer = sensor.elements(); while (cNumer.hasMoreElements()) { final SensorContactWrapper scw = (SensorContactWrapper) cNumer.nextElement(); final long dtg = scw.getDTG().getDate().getTime(); if (zone.getStart() <= dtg && zone.getEnd() >= dtg) { // ok, this cut is in this zone if (thisC == null) { thisC = new LegOfCuts(); } thisC.add(scw); } else if (zone.getEnd() < dtg) { // ok, we've passed the end of this zone continue; } } if (thisC != null) { res.add(thisC); } } } } } return res; } public LegsAndZigs sliceIntoLegsUsingAmbiguity(final SensorWrapper sensor, final double minZig, double maxSteady, final Logger logger, final TimeSeries scores) { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts zigs = new LegOfCuts(); if(scores != null) { scores.clear(); } final Enumeration<Editable> enumer = sensor.elements(); Double lastDelta = null; HiResDate lastTime = null; LegOfCuts thisLeg = null; LegOfCuts thisZig = null; SensorContactWrapper firstCut = null; final LegOfCuts possLeg = new LegOfCuts(); final int possLegAllowance = 2; while (enumer.hasMoreElements()) { final SensorContactWrapper cut = (SensorContactWrapper) enumer.nextElement(); if (cut.getVisible() && cut.getHasAmbiguousBearing()) { // ok, TA data final double delta = cut.getAmbiguousBearing() - cut.getBearing(); final HiResDate time = cut.getDTG(); // is this the first cut? if (lastDelta == null) { // store it. we'll add it to whatever type of data we build firstCut = cut; } else { double valueDelta = delta - lastDelta; // if we're not already in a turn, then any // monster delta will prob be related to domain if (thisLeg != null) { if (valueDelta < -180) { valueDelta += 360d; } else if (valueDelta > 180) { valueDelta -= 180d; } } // ok, work out the change rate final long timeDeltaMillis = time.getDate().getTime() - lastTime.getDate().getTime(); final long timeDeltaSecs = timeDeltaMillis / 1000L; final double rate = Math.abs(valueDelta / timeDeltaSecs); if(scores != null) { FixedMillisecond sec = new FixedMillisecond(time.getDate().getTime()); TimeSeriesDataItem item = new TimeSeriesDataItem(sec, rate); scores.add(item); } final String timeStr = time.getDate().toString(); final String stats = timeStr + " brg:" + (int) cut.getBearing() + " ambig:" + (int) cut.getAmbiguousBearing() + " step (secs)" + (int) timeDeltaSecs + " rate:" + rate; doLog(logger, stats); // if(time.getDate().getTime() == 260000) // { // System.out.println("here"); // } if (rate > minZig) { // ok, we were on a straight leg if (thisLeg != null) { // close the leg thisLeg = null; doLog(logger, timeStr + " End leg."); } // ok, we're in a leg if (thisZig == null) { thisZig = new LegOfCuts(); doLog(logger, timeStr + " New zig."); } // do we have any pending cuts if (!possLeg.isEmpty()) { doLog(logger, timeStr + " Did have poss straight cuts. Drop them, we're in a turn"); // ok, we have a couple of cuts that look like they're straight. // well, they're not. they're actually in a turn thisZig.addAll(possLeg); // and clear the list possLeg.clear(); } // if we have a pending first cut, // we should store it if (firstCut != null) { thisZig.add(firstCut); firstCut = null; } thisZig.add(cut); } else { boolean straightCutHandled = false; if (thisZig != null) { // hmm, we were in a turn, and now things are straight. // but, we want to allow a number of low-rate-change // entries, just in cases there's a coincidental // couple of steady cuts during the turn. if (possLeg.size() < possLegAllowance) { doLog(logger, timeStr + " Poss straight leg. Cache it."); // ok, we'll add this to the list possLeg.add(cut); straightCutHandled = true; } else { // ok, we were in a turn. End it zigs.addAll(thisZig); doLog(logger, timeStr + " Zig ended."); // close the leg thisZig = null; } } if (!straightCutHandled) { // ok, we're in a leg if (thisLeg == null) { doLog(logger, timeStr + " New Leg."); thisLeg = new LegOfCuts(); // right. We've allowed a couple of potential cuts // but, we've ended up on a straight leg. Add the stored // cuts to the leg if (!possLeg.isEmpty()) { doLog(logger, timeStr + " Have poss straight leg cuts."); thisLeg.addAll(possLeg); possLeg.clear(); } legs.add(thisLeg); } // if we have a pending first cut, // we should store it if (firstCut != null) { thisLeg.add(firstCut); firstCut = null; } thisLeg.add(cut); } } } lastDelta = delta; lastTime = time; } } // ok, do some last minute tidying // are we still in a zig? if (thisZig != null) { doLog(logger, "Finishing zig."); // store the zig cuts zigs.addAll(thisZig); thisZig = null; } // do we have any possible straight leg cuts if (!possLeg.isEmpty()) { doLog(logger, "Append trailing straight cuts."); thisLeg = new LegOfCuts(); thisLeg.addAll(possLeg); possLeg.clear(); legs.add(thisLeg); } return new LegsAndZigs(legs, zigs); } public LegsAndZigs sliceIntoLegsUsingAmbiguity(final TrackWrapper track, final double minZig, double maxSteady, final Logger logger, final TimeSeries scores) { final List<LegOfCuts> legs = new ArrayList<LegOfCuts>(); final LegOfCuts zigCuts = new LegOfCuts(); final LegsAndZigs res = new LegsAndZigs(legs, zigCuts); // ok, go for it final BaseLayer sensors = track.getSensors(); final Enumeration<Editable> numer = sensors.elements(); while (numer.hasMoreElements()) { final SensorWrapper sensor = (SensorWrapper) numer.nextElement(); if (sensor.getVisible()) { final LegsAndZigs thisL = sliceIntoLegsUsingAmbiguity(sensor, minZig, maxSteady, logger, scores); if (thisL.legs.size() > 0) { res.legs.addAll(thisL.legs); } if (thisL.zigCuts.size() > 0) { res.zigCuts.addAll(thisL.zigCuts); } } } return res; } public void undoResolve(final List<LegOfCuts> legs) { // ok, clear all their ambiguity for (final LegOfCuts leg : legs) { for (final SensorContactWrapper cut : leg) { cut.setHasAmbiguousBearing(true); } } } public void undoResolveBearings(final List<ResolvedLeg> legs) { for (final ResolvedLeg leg : legs) { for (final SensorContactWrapper cut : leg.leg) { // cool, we have a course - we can go for it. remember the bearings final double bearing1 = cut.getBearing(); final double bearing2 = cut.getAmbiguousBearing(); if (leg.keepFirst) { cut.setBearing(bearing2); cut.setAmbiguousBearing(bearing1); } else { cut.setBearing(bearing1); cut.setAmbiguousBearing(bearing2); } // remember we're morally ambiguous cut.setHasAmbiguousBearing(true); } } } }
Fix values used in testing
org.mwc.debrief.track_shift/src/org/mwc/debrief/track_shift/ambiguity/AmbiguityResolver.java
Fix values used in testing
<ide><path>rg.mwc.debrief.track_shift/src/org/mwc/debrief/track_shift/ambiguity/AmbiguityResolver.java <ide> <ide> // try to get zones using ambiguity delta <ide> final LegsAndZigs res = <del> solver.sliceIntoLegsUsingAmbiguity(track, 0.7, 0.2, null, null); <add> solver.sliceIntoLegsUsingAmbiguity(track, 0.2, 0.2, null, null); <ide> final List<LegOfCuts> legs = res.legs; <ide> final LegOfCuts zigs = res.zigCuts; <ide> <ide> <ide> // try to get zones using ambiguity delta <ide> final LegsAndZigs res = <del> solver.sliceIntoLegsUsingAmbiguity(track, 0.7, 0.2, null, null); <add> solver.sliceIntoLegsUsingAmbiguity(track, 0.2, 0.2, null, null); <ide> final List<LegOfCuts> legs = res.legs; <ide> final LegOfCuts zigs = res.zigCuts; <ide>
JavaScript
mit
ffb0971f1c7b06fccdf76806f6e159e130dc0e43
0
svn2github/SVGKit,svn2github/SVGKit,svn2github/SVGKit,svn2github/SVGKit
/*** SVGKit.js 0.1 See <http://svgkit.sourceforge.net/> for documentation, downloads, license, etc. (c) 2006-2007 Jason Gallicchio. All rights Reserved. Some notes: http://www.sitepoint.com/article/oriented-programming-2 http://www.sitepoint.com/article/javascript-objects At some point I'd like to auto-detect if user has SVG and if it's Adobe or W3C: http://blog.codedread.com/archives/2005/06/21/detecting-svg-viewer-capabilities/ http://blog.codedread.com/archives/2006/01/13/inlaying-svg-with-html/ http://www.adobe.com/svg/workflow/autoinstall.html Also, transmogrify <object> tags into <embed> tags automatically, perhaps using <![if IE]> and before the content loads. This should work if included in an SVG to for inline scripting. Do I want to do anything with events or just let the DOM and MochiKit handle them? Maybe some built-in zoom and scroll that you can turn on. toXML needs namespaces. Assign aliases at top and use (have some common ones defined.) svgDocument.getElementById(id) does not work for inline. Is this because svgDocument is document? This is used in createUniqueID and leads to failure of SVGCanvas test 21: lineargradient. Probably for the same reason svgDocument.getElementsByTagName("defs") doesnt' work. * After the script runs, these work in the console. * After an error (or something) it seems to kind of work since test 22 works after test 21 fails, but strangely the DOM tree and the printed source code are wrong. Indeed, switching the order always makes the second of the two work graphically, but fail DOM/XML wise. IE doesn't seem to be able to pull anything out once it's put in: >>> document.svgkit.svgElement.getElementsByTagName('path') [undefined, undefined, undefined, undefined] It knows that I added four paths, but I can't get them out. Same for svgElement.childNodes Problem of divs loading and unloading, especially with multiple writeln() in the interpreter. Perhaps on unload, save xml and then restore on a load. The problem is that each time the object or embed is shown (first time or after being hidden) there is a delay before the SVG content is accessible. Can't draw anything until it's loaded. Really annoying in the interpreter. inline doesn't have this problem. Maybe everything is going in that direction anyway. Bugs: * translate(1) and then call translate doesn't detect that this means x=1. Code seems to be there, but regexp doesnt' match. * Dragging is sketchy when the mouse leavs the object. * Reading XML should read the namespaces into the SVGKit._namespaces dictionary. Integration with MochiKit: * See if it's any slower using iterators * See if MochiKit.Style and MochiKit.Visual effects work. yes: hideElement(circle) showElement(circle) setOpacity(circle, 0.2) no: elementDimensions(circle) Using SVG in the Browser: * Should always provide fallback content -- png, pdf, (shudder) swf * Interactivity requires SVG, but initial static content should have static fallback (for fast load) * Best effort to have it work on Firefox, Opera, Safari, IE+ASV, Batik, Rhino, GNOME, KDE * Text sucks -- different settings/browsers render it in vastly differens sizes. * Automatically generate links to an image translation server. Fatures: * Automatic resizing with browser window (like Google Maps) * Mouse tracking -- ala KevLinDev? Do you need the clear 100% rectangle? * enablePan(element), enableZoom(element), enableFollow(), enableDrag() enablePanZoomImmunity() * Create PNGs: http://www.kevlindev.com/gui/utilities/js_png/index.htm Emulate Support For: * getURL and setURL to non-ASP: http://jibbering.com/2002/5/dynamic-update-svg.html * SMIL animation: http://www.vectoreal.com/smilscript/ SVG (and most client-side web stuff) is depressing. Things looked so bright back in 1999 and here we are SEVEN years later and even I just learned about the standard. I want to show what can be done. I didn't have anything invested in SVG when I started, but it's the only non-proprietary interactive vector graphics format. Make a MochiMin version as an option for inclusion instaed of full MochiKit. Conform SVG coding and output style to: http://jwatt.org/svg/authoring/ specifically look into using name-space aware: getAttribute, removeAttribute, setAttribute Embed images where possible -- read binary data, convert to 64, then include directly. href to images don't work very well -- they translate into absolute URIs. TODO: s.scale(10) should do the right thing. Right now you NEED scale(10,10) also the scale(1,1)scale(1,1) returns scale(2,2) because right now it's always aditive ***/ //////////////////////////// // Setup //////////////////////////// if (typeof(dojo) != 'undefined') { dojo.provide("SVGKit"); dojo.require("MochiKit.DOM"); } if (typeof(JSAN) != 'undefined') { JSAN.use("MochiKit.Iter", []); } try { if (typeof(MochiKit.DOM) == 'undefined') { throw ""; } } catch (e) { throw "SVGKit depends on MochiKit.DOM!"; } if (typeof(SVGKit) == 'undefined' || SVGCanvas == null) { // Constructor SVGKit = function(p1, p2, p3, p4, p5) { if (MochiKit.Base.isUndefinedOrNull(this.__init__)){ log("You called SVG() as a fnuction without new. Shame on you, but I'll give you a new object anyway"); return new SVGKit(p1, p2, p3, p4, p5); } this.__init__(p1, p2, p3, p4, p5); return null; }; } SVGKit.NAME = "SVGKit"; SVGKit.VERSION = "0.1"; SVGKit.__repr__ = function () { return "[" + SVGKit.NAME + " " + SVGKit.VERSION + "]"; }; SVGKit.prototype.__repr__ = SVGKit.__repr__; SVGKit.toString = function () { return this.__repr__(); }; SVGKit.prototype.toString = SVGKit.toString; SVGKit.EXPORT = [ ]; SVGKit.EXPORT_OK = [ ]; //////////////////////////// // Defaults //////////////////////////// //SVGKit._defaultType = 'embed'; //SVGKit._defaultType = 'object'; SVGKit._defaultType = 'inline'; SVGKit._namespaces = { 'svg': 'http://www.w3.org/2000/svg', 'xlink': 'http://www.w3.org/1999/xlink', 'ev': 'http://www.w3.org/2001/xml-events', 'xmlns': 'http://www.w3.org/2000/xmlns/' } SVGKit._svgMIME = 'image/svg+xml'; SVGKit._svgEmptyName = 'empty.svg'; SVGKit._SVGiKitBaseURI = ''; SVGKit._errorText = "You can't display SVG. Download the latest Firefox!" ; SVGKit._convert_url = 'http://svgkit.sourceforge.net/cgi-bin/convert_svg.py' // Should be customized to your own server //////////////////////////// // Constructor //////////////////////////// SVGKit.prototype.__init__ = function (p1, p2, p3, p4, p5) { // TODO: Make thse work right. // __init__() For JavaScript included in an SVG. // __init__(node) Already have an HTML element -- autodetect the type // __init__(id) Have the id for an HTML element (if your id ends in .svg, pass in the node instead because strings ending in .svg will be treated as filenames.) // __init__(filename, id, type, width, height) Create a new HTML element that references filename (must end in .svg) // __init__(width, height, id, type) Create a new SVG from scratch with width, height, and id // The following are described at http://www.w3.org/TR/SVG/struct.html this.htmlElement = null; // the <object> or <embed> html element the SVG lives in, otherwise null this.svgDocument = null; // When an 'svg' element is embedded inline this will be document this.svgElement = null; // corresponds to the 'svg' element //this._redrawId = null; // The reference that SVG's suspendRedraw returns. Needed to cancel suspension. //SVGKit._defaultType = // Determine a good default dynamically ('inline' , 'object', or 'embed') //log("SVGKit.__init__(", p1, p2, p3, p4, p5, ")"); this.setBaseURI(); if (MochiKit.Base.isUndefinedOrNull(p1)) { // This JS was included inside of an SVG file, and this was included in the // root element's onload event, which you need to to do get a target. /* var evt = p1; if ( window.svgDocument == null ) this.svgDocument = evt.target.ownerDocument; */ this.svgDocument = document; this.svgElement = this.svgDocument.rootElement; // or svgDocument.documentElement; this.htmlElement = this.svgElement; } else if (typeof(p1) == 'string') { if (p1.length>5 && p1.substr(p1.length-4,4).toLowerCase()=='.svg') // IE doesn't do substr(-4) this.loadSVG(p1, p2, p3, p4, p5); else this.whenReady( bind(this.grabSVG, this, p1) ); } else if (typeof(p1) == 'object') { // Not <object> but a JS object this.grabSVG(p1); } else { this.createSVG(p1, p2, p3, p4) } // Note that this.svgDocument and this.svgElement may not be set at this point. Must wait for onload callback. //log("Done creating/grabing svg."); this._addDOMFunctions(); //log("Done with _addDOMFunctions"); window.svgkit = this; // For debugging, especially in IE } //////////////////////////// // General Utilities //////////////////////////// SVGKit.firstNonNull = function() { for (var i=0; i<arguments.length; i++) if ( !MochiKit.Base.isUndefinedOrNull(arguments[i]) ) return arguments[i] return null; } //////////////////////////// // Browser Related //////////////////////////// SVGKit.prototype.setBaseURI = function() { /*** To create an empty SVG using <object> or <embed> you need to give the tag a valid SVG file, so an empty one lives in the same directory as the JavaScript. This function finds that directory and sets the _SVGiKitBaseURI variable for future use. ***/ var scripts = document.getElementsByTagName("script"); for (var i = 0; i < scripts.length; i++) { var src = scripts[i].getAttribute("src"); if (!src) { continue; } if (src.match(/SVGKit\.js$/)) { SVGKit._SVGiKitBaseURI = src.substring(0, src.lastIndexOf('SVGKit.js')); } } } SVGKit.prototype.isIE = function() { // Borrowed from PlotKit: var ie = navigator.appVersion.match(/MSIE (\d\.\d)/); var opera = (navigator.userAgent.toLowerCase().indexOf("opera") != -1); return ie && (ie[1] >= 6) && (!opera); } SVGKit.prototype.whenReady = function (func, every_time /* =false */) { /*** Calls func when the SVG is ready. If you create or try to use an SVG inside of <embed> or <object>, the SVG file must be loaded. The browser does this asynchronously, and you can't do anything to the SVG until it's been loaded. If the file already loaded or you're working with an inline SVG, func will get called instantly. If it hasn't loaded yet, func will get added to the elemen's onload event callstack. TODO: Should this happen every time the div surrounding the SVG is hidden and shown? If you just add it to onload, it does. TODO: Fix the loading of SVG from XML file thing -- something more sophistocated than calling 0.5 seconds later. ***/ if (this.svgElement != null && this.svgDocument != null && !MochiKit.Base.isUndefinedOrNull(func) ) { //log("func=",func); func.call(this); //func.apply(this); //func(); if (every_time) addToCallStack(this.htmlElement, 'onload', func); // Incompatable with Mochikit.Signal } else if (this.htmlElement != null) { //log("adding to onload event for htmlElement=", this.htmlElement, " the func=", func); //if (every_time) addToCallStack(this.htmlElement, 'onload', func); // Incompatable with Mochikit.Signal //else // addToCallStack(this.htmlElement, 'onload', function() {func(); ); } else { // Try again half a second later. This is only for loaing an SVG from an XML file to an inline element. //log("doing callLater for func=", func); callLater(0.5, func); } } SVGKit.prototype.resize = function(width, height) { /*** Sets the size of the htmlElement and svgElement. No defaults given. ***/ this.setSize(this.svgElement, width, height); this.setSize(this.htmlElement, width, height); } SVGKit.prototype.resizeSVGElement = function(width, height) { /*** Sets the size of the svgElement If no size is given, it's assumed you wnat to set the size based on the size of the htmlElement to get rid of scroll bars or something. ***/ // I don't use first non-null because it would have to do two slow DOM lookups // to pass them as arguments. if (MochiKit.Base.isUndefinedOrNull(width)) width = getNodeAttribute(this.htmlElement, 'width') if (MochiKit.Base.isUndefinedOrNull(height)) height = getNodeAttribute(this.htmlElement, 'height') this.setSize(this.svgElement, width, height); } SVGKit.prototype.resizeHTMLElement = function(width, height) { /*** Sets the size of the htmlElement If no size is given, it's assumed you want to set it based on the size of the SVG it contains ***/ if (MochiKit.Base.isUndefinedOrNull(width)) width = getNodeAttribute(this.svgElement, 'width') if (MochiKit.Base.isUndefinedOrNull(height)) height = getNodeAttribute(this.svgElement, 'height') this.setSize(this.htmlElement, width, height); } SVGKit.prototype.setSize = function(element, width, height) { setNodeAttribute(element, 'width', width); setNodeAttribute(element, 'height', height); } SVGKit.prototype.conversionHTML = function(divElement) { var cgi = 'http://frank.harvard.edu/~jason/cgi-bin/svgconvert' var types = ['svg','pdf','png','jpg','ps','xfig']; for (var i=0; i<types.length; i++) { appendChildNodes(divElement, MochiKit.DOM.createDOM('a',{href:cgi+types[i]}, types[i]), ' '); } } //////////////////////////// // Getting Hold of an SVG //////////////////////////// SVGKit.prototype.createSVG = function (width, height, id /* optional */, type /* =default */) { /*** Loads a blank SVG and sets its size and the size of any HTML element it lives in to the given width and height. ***/ //log("createSVG(", width, height, id , type,")"); type = SVGKit.firstNonNull(type, SVGKit._defaultType); //log("type=", type); if (type=='inline') { this.createInlineSVG(width, height, id); } else { this.loadSVG(SVGKit._svgEmptyName, id, type, width, height) } } SVGKit.prototype.createInlineSVG = function(width, height, id) { /*** Make sure html tag has SVG namespace support: <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" xmlns:svg="http://www.w3.org/2000/svg"> ***/ var attrs = { // Make sure this matches what's in empty.svg 'xmlns': SVGKit._namespaces['svg'], // for <circle> type tags with implicit namespace 'xmlns:svg': SVGKit._namespaces['svg'], // for <svg:circle ...> type tags with explicit namespace 'xmlns:xlink': 'http://www.w3.org/1999/xlink', 'xmlns:ev': 'http://www.w3.org/2001/xml-events', 'version': '1.1', 'baseProfile': 'full', 'width': width, 'height': height }; if (!MochiKit.Base.isUndefinedOrNull(id)) { attrs['id'] = id; } // Borrowed from PlotKit: if (!this.isIE()) { this.svgDocument = document; this.svgElement = this.createSVGDOM('svg', attrs); // Create an element in the SVG namespace this.htmlElement = this.svgElement; // html can work with the <svg> tag directly //this.svgDocument = this.svgElement.getSVGDocument() this.svgDocument = this.svgElement.ownerDocument; //log("in create: this.svgDocument=",this.svgDocument); } else { // IE log('createInlineSVG with IE. width:', width, 'height:', height) var width = attrs["width"] ? attrs["width"] : "100"; var height = attrs["height"] ? attrs["height"] : "100"; var eid = attrs["id"] ? attrs["id"] : "notunique"; var html = '<svg:svg width="' + width + '" height="' + height + '" ' + 'id="' + eid + '" version="1.1" baseProfile="full">'; log('html:', html) log('document:', document) this.htmlElement = document.createElement(html); log('htmlElement:', this.htmlElement) // create embedded SVG inside SVG. this.svgDocument = this.htmlElement.getSVGDocument(); log('svgDocument:', this.svgDocument) this.svgElement = this.svgDocument.createElementNS(SVGKit._namespaces['svg'], 'svg'); log('svgElement:', this.svgElement) this.svgElement.setAttribute("width", width); this.svgElement.setAttribute("height", height); this.svgElement.setAttribute('xmlns:xlink', attrs['xmlns:xlink']); log("in create: this.svgElement=",this.svgElement); this.svgDocument.appendChild(this.svgElement); } } SVGKit.prototype.loadSVG = function (filename, id /* optional */, type /* =default */, width /* = from file */, height /* = from file */) { /*** Create a new HTML DOM element of specified type ('object', 'embed', or 'svg') and set the attributes appropriately. You'd never call this for JavaScript code within the SVG. If you're type is inline and you're loading from a file other than empty.svg, you have to wait for the XML to load for the htmlElement to be set. In code that appends this htmlElement to the document, you have to call waitReady() Conversely, if you're type is embed or object, you CAN'T call whenReady to append the htmlElement to the document because it will ever be ready until it's displayed! There must be a better way to handle this. For object and embed, createSVG just loads empty.svg, but for inline, create is more complicated and doesn't involve empty.svg. It's loading that's hard. This code should be reworked. @param type: The tag that we will create @param width: default from file or 100 @param height: default from file or 100 @param id: Optionally assign the HTML element an id. @rtype: DOMElement ***/ // TODO If it is new, default width and height are 100. If it's from a file, defaults come from the file. // You can still set the width and height if you want the thing to scroll. var attrs = {}; if (!MochiKit.Base.isUndefinedOrNull(id)) { attrs['id'] = id; } type = SVGKit.firstNonNull(type, SVGKit._defaultType); //log("loadSVG(", filename, id, type, width, height,")"); if (type=='inline') { if (this.isIE()) { this.createSVG(width, height, id, type); //log("after create: this.svgElement=",this.svgElement); } //this.htmlElement = null; // This is required to tell whenReady that we won't be ready until the assynch request returns. var copyXMLtoSVG = function(event) { if (!this.isIE()) { var xmlDoc = event.currentTarget; this.htmlElement = xmlDoc.documentElement.cloneNode(true); this.svgDocument = document; this.svgElement = this.htmlElement; } else { var newElement = event.documentElement.cloneNode(true); this.svgDocument.replaceChild(newElement, this.svgDocument.rootElement); this.svgElement = newElement; /* for (var i=0; i<newElement.childNodes.length; i++) { var clone = newElement.childNodes[i].cloneNode(true); //log("in copyXMLtoSVG for loop this.svgElement=",this.svgElement); this.svgElement.appendChild(clone); // This doesn't work: this.svgElement is [disposed object] } */ } } SVGKit.importXML(filename, bind(copyXMLtoSVG, this)); } else if (type=='object') { // IE: Cannot support attrs['data'] = SVGKit._SVGiKitBaseURI + filename; attrs['type'] = SVGKit._svgMIME; //log('loadSVG, data =', attrs['data'], ' type =', attrs['type']) this.htmlElement = MochiKit.DOM.createDOM('object', attrs, SVGKit._errorText); //var svg = this; // Define svg in context of function below. function finishObject(width, height, event) { // IE doesn't have contentDocument // IE would have to use some sort of SVG pool of objects // that add themselves to a list uppon load. this.svgDocument = this.htmlElement.contentDocument; this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. this.resize(width, height); //log('this.svgDocument', this.svgDocument, 'this.svgElement', this.svgElement) } this.whenReady( bind(finishObject, this, width, height) ); } else if (type=='embed') { // IE: Cannot support attrs['src'] = SVGKit._SVGiKitBaseURI + filename; attrs['type'] = SVGKit._svgMIME; attrs['pluginspage'] = 'http://www.adobe.com/svg/viewer/install/'; log("Going to createDOM('embed')"); this.htmlElement = MochiKit.DOM.createDOM('embed', attrs ); function finishEmbed(width, height, event) { // IE doesn't load the embed when you include it in the DOM tree. // if no real fix, you could create an SVG "pool" of empty width=1, height=1 // and move them around. This seems to work in IE. // width=0, height=0 works in Firefox, but not IE. //log("new embed: this.htmlElement = " + this.htmlElement) ; //log("new embed: Going to this.htmlElement.getSVGDocumen() )") ; this.svgDocument = this.htmlElement.getSVGDocument(); this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. this.resize(width, height); } this.whenReady( bind(finishEmbed, this, width, height) ); } } SVGKit.importXML = function (file, onloadCallback) { /*** Pass it a URL to load, it loads it asyncronously (the only way) and then calls callback when it's done. I use this to load SVG documents into an already existing SVG document. ***/ // http://www.sitepoint.com/article/xml-javascript-mozilla/2 // http://www-128.ibm.com/developerworks/web/library/wa-ie2mozgd/ // http://www.quirksmode.org/dom/importxml.html var xmlDoc; var moz = (typeof document.implementation != 'undefined') && (typeof document.implementation.createDocument != 'undefined'); var ie = (typeof window.ActiveXObject != 'undefined'); if (moz) { //var parser = new DOMParser(); //xmlDoc = parser.parseFromString(xmlString, "text/xml"); xmlDoc = document.implementation.createDocument("", "", null); xmlDoc.onload = onloadCallback; } else if (ie) { log("importXML for ie"); xmlDoc = new ActiveXObject("Microsoft.XMLDOM"); xmlDoc.async = false; log("set xmlDoc.async = false"); //document.xmlDoc = xmlDoc; //xmlDoc.loadXML(xmlString) //while(xmlDoc.readyState != 4) {}; if (onloadCallback) { xmlDoc.onreadystatechange = function () { if (xmlDoc.readyState == 4) onloadCallback(xmlDoc) }; } } xmlDoc.load(file); // Same for both, surprisingly. return xmlDoc; } SVGKit.prototype.grabSVG = function (htmlElement) { /*** Given an HTML element (or its id) that refers to an SVG, get the SVGDocument object. If htmlElement is an 'object' use contentDocument. If htmlElement is an 'embed' use getSVGDocument(). If htmlElement is an 'svg' or 'svg:svg' were inlnie. If you're w3C compatible like Firefox, svgElement is htmlElement If you're IE it's just like Embed. If is's an object or embed and it's not showing or the SVG file hasn't loaded, this won't work. @param htmlElement: either an id string or a dom element ('object', 'embed', 'svg) ***/ log("grabSVG htmlElement (node or id) = ", htmlElement); this.htmlElement = MochiKit.DOM.getElement(htmlElement); log("htmlElement (node) = ", this.htmlElement); var tagName = this.htmlElement.tagName.toLowerCase(); log("tagName = ", tagName, " htmlElement.contentDocument=", this.htmlElement.contentDocument, "(this will be blank for inline)"); var isInline = tagName == 'svg' || tagName == 'svg:svg'; // svg:svg is IE style if (isInline && !this.isIE()) { this.svgDocument = document; this.svgElement = this.htmlElement; } else if (tagName == 'embed' || isInline && this.isIE()) { // IE Bug: htmlElement.getSVGDocument is undefined, but htmlElement.getSVGDocument() works, so you can't test for it. this.svgDocument = this.htmlElement.getSVGDocument(); this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. } else if (tagName == 'object' && this.htmlElement.contentDocument) { // IE Bug: <object> SVGs display, but have no property to access their contents. this.svgDocument = this.htmlElement.contentDocument; this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. } log("grabSVG: type=",tagName, " this.svgDocument = ", this.svgDocument, " this.svgElement = ", this.svgElement); } //////////////////////////// // Content Manipulation //////////////////////////// SVGKit.prototype.updateNodeAttributesSVG = function (node, attrs) { /*** Basically copied directly from MochiKit with some namespace stuff. ***/ var elem = node; var self = MochiKit.DOM; if (typeof(node) == 'string') { elem = self.getElement(node); } if (attrs) { var updatetree = MochiKit.Base.updatetree; if (self.attributeArray.compliant) { // not IE, good. for (var k in attrs) { var v = attrs[k]; if (typeof(v) == 'object' && typeof(elem[k]) == 'object') { if (k == "style" && MochiKit.Style) { MochiKit.Style.setStyle(elem, v); } else { updatetree(elem[k], v); } } /* SVGKit Additions START */ else if (k == 'xmlns') { // No prefix elem.setAttributeNS(SVGKit._namespaces['xmlns'], k, v); } else if (k.search(':') != -1) { var tmp = k.split(':') var prefix = tmp[0] var localName = tmp[1] //elem.setAttributeNS(SVGKit._namespaces[prefix], localName, v); var uri = SVGKit._namespaces[prefix] if (uri != null) elem.setAttributeNS(uri, k, v); // Second parameter is "qualified name" } /* SVGKit Additions END */ else if (k.substring(0, 2) == "on") { if (typeof(v) == "string") { v = new Function(v); } elem[k] = v; } else { elem.setAttributeNS(null, k, v); } } } else { // IE is insane in the membrane var renames = self.attributeArray.renames; for (k in attrs) { v = attrs[k]; var renamed = renames[k]; if (k == "style" && typeof(v) == "string") { elem.style.cssText = v; } else if (typeof(renamed) == "string") { elem[renamed] = v; } else if (typeof(elem[k]) == 'object' && typeof(v) == 'object') { if (k == "style" && MochiKit.Style) { MochiKit.Style.setStyle(elem, v); } else { updatetree(elem[k], v); } } else if (k.substring(0, 2) == "on") { if (typeof(v) == "string") { v = new Function(v); } elem[k] = v; } else { elem.setAttribute(k, v); } } } } return elem; }, SVGKit.prototype.createSVGDOM = function (name, attrs/*, nodes... */) { /*** Like MochiKit.createDOM, but with the SVG namespace. ***/ var elem; var dom = MochiKit.DOM; if (typeof(name) == 'string') { try { // W3C Complient elem = this.svgDocument.createElementNS(SVGKit._namespaces['svg'], name); } catch (e) { // IE log("Creating element with name=", name, " in SVG namespace for IE"); elem = this.svgDocument.createElement(name); elem.setAttribute("xmlns", SVGKit._namespaces['svg']); //elem = this.svgDocument.createElement('svg:'+name); } } else { elem = name; // Parameter "name" was really an object } if (attrs) { this.updateNodeAttributesSVG(elem, attrs); } if (arguments.length <= 2) { return elem; } else { var args = MochiKit.Base.extend([elem], arguments, 2); return dom.appendChildNodes.apply(this, args); } }; SVGKit.prototype.createSVGDOMFunc = function (/* tag, attrs, *nodes */) { /*** Convenience function to create a partially applied createSVGDOM @param tag: The name of the tag @param attrs: Optionally specify the attributes to apply @param *nodes: Optionally specify any children nodes it should have @rtype: function ***/ var m = MochiKit.Base; return m.partial.apply( this, m.extend([this.createSVGDOM], arguments) ); }; SVGKit.prototype.append = function (node) { /*** Convenience method for appending to the root element of the SVG. Anything you draw by calling this will show up on top of everything else. ***/ this.svgElement.appendChild(node); } SVGKit.prototype.circle = function() { /*** Stupid function for quick testing. ***/ var c = this.CIRCLE( {'cx':50, 'cy':50, 'r':20, 'fill':'purple', 'fill-opacity':.3} ); this.append(c); } SVGKit.prototype.uniqueIdCount = 0; SVGKit.prototype.createUniqueID = function(base) { /*** For gradients and things, often you want them to have a unique id of the form 'gradient123' where the number is sequentially increasing. You would pass this function 'gradient' and it would look for the lowest number which returns no elements when you do a getElementByID. Right now it does a linear search because you typically don't create all that many of these, but maybe a hash table could be kept of the last result for quick access. This would have to be done on a per-SVG basis and is still no garuntee that the next number will be free if a node of that name/number gets created outside of this function. ***/ //var uniqueIdCount=0; var id; var element; do { id = base + this.uniqueIdCount; this.uniqueIdCount++; element = this.svgDocument.getElementById(id); // Works in IE and Firefox //element = this.svgElement.getElementById(id); // Works in IE, not Firefox //log("createUniqueID: Going to try id=",id," element=", element); } while ( !MochiKit.Base.isUndefinedOrNull(element) ); //log("Got unique id=",id); return id; } SVGKit.prototype.getDefs = function(createIfNeeded /* = false */) { /*** Return the <defs> tag inside of the SVG document where definitions like gradients and markers are stored. @param createIfNeeded -- If this is true, a <defs> element will be created if none already exists. @returns the defs element. If createIfNeeded is false, this my return null ***/ var defs = this.svgElement.getElementsByTagName("defs"); if (defs.length>0) { //log("getDefs... found defs: defs.length=",defs.length, " defs[0]=",defs[0]) return defs[0]; } if (!MochiKit.Base.isUndefinedOrNull(createIfNeeded) && !createIfNeeded) { //log("getDefs... returning null cuz createIfNeeded=",createIfNeeded) return null; } defs = this.DEFS(null); //log("Created defs", defs, "... going to insert first") this.svgElement.insertBefore(defs, this.svgElement.firstChild); //this.append(defs); //log("insert first worked") // Check to see if it actually got appended: //var defs2 = this.svgDocument.getElementsByTagName("defs"); var defs2 = this.svgElement.getElementsByTagName("defs"); //log("ending getDefs...defs2.length=",defs2.length, " defs2[0]=",defs2[0]) return defs; } /* // These are pretty redundant. Use : suspend_handle_id = this.svgElement.suspendRedraw(max_wait_milliseconds) this.svgElement.unsuspendRedraw(suspend_handle_id) this.svgElement.unsuspendRedrawAll() SVGKit.prototype.suspendRedraw = function (miliseconds) { miliseconds = SVGKit.firstNonNull(miliseconds, 1000); var tempRedrawId = this.svgElement.suspendRedraw(miliseconds); this.unsuspendRedraw() this._redrawId = tempRedrawId } SVGKit.prototype.unsuspendRedraw = function () { if (this._redrawId != null) { this.svgElement.unsuspendRedraw(this._redrawId); this._redrawId = null; } } */ SVGKit.prototype.deleteContent = function() { /*** Deletes all graphics content, but leaves definitions ***/ var defs = this.getDefs() MochiKit.DOM.replaceChildNodes(this.svgElement, defs) } //////////////////////////// // Transformations //////////////////////////// /* The following take an element and transforms it. If the last item in the transform string is the same as the type of transformation that you're trying to do (e.g. rotate), replace it for efficiency. If it's not the same, append to the end. Note that translate(2,0) gets turned into translate(2) by the browser, and this should be handled. If the elem passed is not an id for an element, it is treated as a string transformation which gets updated and returned. Regular Expressions are hard coded so they can be compiled once on load. TODO: Make sure the arguments are valid numbers to avoid illegal transforms */ SVGKit.rotateRE = /(.*)rotate\(\s*([0-9eE\+\-\.]*)\s*\)\s*$/ SVGKit.prototype.rotate = function(elem, degrees) { /*** Test: SVGKit.prototype.rotate('translate( 1 ,2 ) rotate( 70)', -10) SVGKit.prototype.rotate('rotate(1) translate(2,2) ', -10) ***/ var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._oneParameter(elem, degrees, SVGKit.rotateRE, 'rotate') } var old_transform = element.getAttribute('transform') var new_transform = this._oneParameter(old_transform, degrees, SVGKit.rotateRE, 'rotate') element.setAttribute('transform', new_transform); return new_transform; } SVGKit.translateRE = /(.*)translate\(\s*([0-9eE\+\-\.]*)\s*,?\s*([0-9eE\+\-\.]*)?\s*\)\s*$/ SVGKit.prototype.translate = function(elem, tx, ty) { /*** SVGKit.prototype.: translate(' translate( 1 ,2 ) ', -10,-20) translate(' translate(1) ', -10,-20) translate(' translate(10,20) ', 0, -20) translate('translate(10,10) rotate(20)', 10, 10) == 'translate(10,10) rotate(20)translate(10,10)' translate('translate(10,10)', -10, -10) == '' translate('translate(10)', -10) == '' ***/ var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._twoParameter(elem, tx, ty, SVGKit.translateRE, 'translate') } var old_transform = element.getAttribute('transform') var new_transform = this._twoParameter(old_transform, tx, ty, SVGKit.translateRE,'translate'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.scaleRE = /(.*)scale\(\s*([0-9eE\+\-\.]*)\s*,?\s*([0-9eE\+\-\.]*)?\s*\)\s*$/ SVGKit.prototype.scale = function(elem, sx, sy) { var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._twoParameter(elem, sx, sy, SVGKit.scaleRE, 'scale'); } var old_transform = element.getAttribute('transform') var new_transform = this._twoParameter(old_transform, sx, sy, SVGKit.scaleRE, 'scale'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.matrixRE = null SVGKit.prototype.matrix = function(elem, a, b, c, d, e, f) { var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._sixParameter(elem, a, b, c, d, e, f, SVGKit.matrixRE, 'matrix'); } var old_transform = element.getAttribute('transform') var new_transform = this._sixParameter(old_transform, a, b, c, d, e, f, SVGKit.matrixRE, 'matrix'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.prototype._oneParameter = function(old_transform, degrees, regexp, name) { /*** rotate('translate(1,2)rotate(12)', -12) -> 'translate(1,2)' rotate('translate(1,2)rotate(12)', -11) -> 'translate(1,2)rotate(1)' rotate('rotate( 4 ) rotate( 12 )', -12) -> 'rotate( 4 ) ' ***/ if (MochiKit.Base.isUndefinedOrNull(degrees) || degrees == 0) return old_transform; regexp.lastIndex = 0; //var transform = elem.getAttribute('transform') //var transform = elem; var new_transform, array; if (old_transform==null || old_transform=='') new_transform = name+'('+degrees+')' else if ( (array = regexp.exec(old_transform)) != null ) { var old_angle = parseFloat(array[2]); var new_angle = old_angle+degrees; new_transform = array[1]; if (new_angle!=0) new_transform += 'rotate('+new_angle+')'; } else new_transform = old_transform + 'rotate('+degrees+')'; return new_transform; } SVGKit.prototype._twoParameter = function(old_transform, x, y, regexp, name) { // Test: SVGKit.prototype._twoParameter('translate( 1 ,2 ) scale( 3 , 4 )', 1, 1, SVGKit.scaleRE, 'scale') // Test: SVGKit.prototype._twoParameter('translate(3)', 1, 1, SVGKit.translateRE, 'translate') // Test: SVGKit.prototype._twoParameter('translate(10,20)', 0, -20, SVGKit.translateRE, 'translate') if (MochiKit.Base.isUndefinedOrNull(x) || MochiKit.Base.isUndefinedOrNull(name)) return old_transform; // y = SVGKit.firstNonNull(y, 0); if (x==0 && y==0) return old_transform; regexp.lastIndex = 0; //var transform = elem var new_transform, array; if (MochiKit.Base.isUndefinedOrNull(old_transform) || old_transform=='') new_transform = name+'('+x+','+y+')'; else if ( (array = regexp.exec(old_transform)) != null ) { var old_x = parseFloat(array[2]); var new_x = old_x+x; var old_y; if (array[3]!=null) old_y = parseFloat(array[3]); else old_y = 0; var new_y = old_y+y; new_transform = array[1]; if (new_x!=0 || new_y!=0) new_transform += name+'('+new_x+','+new_y+')'; } else new_transform = old_transform + name+'('+x+','+y+')'; return new_transform } SVGKit.prototype._sixParameter = function(old_transform, a, b, c, d, e, f, regexp, name) { if (MochiKit.Base.isUndefinedOrNull(d) || MochiKit.Base.isUndefinedOrNull(name)) return old_transform; if (MochiKit.Base.isUndefinedOrNull(e)) e = 0; if (MochiKit.Base.isUndefinedOrNull(f)) f = 0; var new_transform = name+'('+a+','+b+','+c+','+d+','+e+','+f+')'; return new_transform } //////////////////////////// // Output //////////////////////////// SVGKit.prototype.toXML = function (dom /* = this.svgElement */, decorate /* = false */) { /*** This doesn't work yet cuz toHTML converts everything to lower case. @param dom: Element to convert. @param decorate: boolean: Include <?xml version="1.0" encoding="UTF-8" standalone="no"?> ? returns a string of XML. ***/ dom = SVGKit.firstNonNull(dom, this.svgElement); var decoration = MochiKit.Base.isUndefinedOrNull(decorate) || !decorate ? '' : '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' var source = this.emitXML(dom).join(""); return decoration + source.replace(/>/g, ">\n"); // Add newlines after all closing tags. } SVGKit.prototype.emitXML = function(dom, /* optional */lst) { /*** A case insensitive and namespace aware version of MochiKit.DOM's emitHTML. My changes are marked with "SVGKit" comments. TODO: Make namespace-aware. ***/ if (typeof(lst) == 'undefined' || lst === null) { lst = []; } // queue is the call stack, we're doing this non-recursively var queue = [dom]; var self = MochiKit.DOM; var escapeHTML = self.escapeHTML; var attributeArray = self.attributeArray; while (queue.length) { dom = queue.pop(); if (typeof(dom) == 'string') { lst.push(dom); } else if (dom.nodeType == 1) { // we're not using higher order stuff here // because safari has heisenbugs.. argh. // // I think it might have something to do with // garbage collection and function calls. lst.push('<' + dom.nodeName); // SVGKit: got rid of toLowerCase() var attributes = []; var domAttr = attributeArray(dom); for (var i = 0; i < domAttr.length; i++) { var a = domAttr[i]; attributes.push([ " ", a.name, '="', escapeHTML(a.value), '"' ]); } attributes.sort(); for (i = 0; i < attributes.length; i++) { var attrs = attributes[i]; for (var j = 0; j < attrs.length; j++) { lst.push(attrs[j]); } } if (dom.hasChildNodes()) { lst.push(">"); // queue is the FILO call stack, so we put the close tag // on first queue.push("</" + dom.nodeName + ">"); // SVGKit: got rid of toLowerCase() var cnodes = dom.childNodes; for (i = cnodes.length - 1; i >= 0; i--) { queue.push(cnodes[i]); } } else { lst.push('/>'); } } else if (dom.nodeType == 3) { lst.push(escapeHTML(dom.nodeValue)); } } return lst; } //////////////////////////// // Utilities for HTML //////////////////////////// SVGKit.prototype.convertForm = function(options) { /*** Returns HTML <form> element with a text area that gets filled with SVG source and buttons The result of the form gets sent to a server for conversion to pdf, png, etc. ***/ defaults = { converter_url : SVGKit._convert_url, new_window : true, update_button : true, hide_textarea : false, rows : 14, cols : 55, types : ['svg', 'pdf', 'ps', 'png', 'jpg'] } var opts = {} if (!MochiKit.Base.isUndefinedOrNull(options)) update(opts, options) setdefault(opts, defaults) var target = null if (opts.new_window) target = "_blank" // Form will open result in new window. // target="_blank" is a deprecated feature, but very useful since you can't right click // on the submit button to choose if you want to open it in a new window, and going back is SLOW var textArea = TEXTAREA({rows:opts.rows, cols:opts.cols, wrap:"off", name:'source'}, "SVG Source") var form = FORM({name:'convert', method:'post', action:opts.converter_url, target:target}, textArea) var svg = this var setSrc = function() { // Uses newly created text Area replaceChildNodes(textArea, svg.toXML()) } svg.whenReady(setSrc) if (opts.hide_textarea) hideElement(textArea) else appendChildNodes(form, BR(null)) // Buttons get added below. if (opts.update_button) { var updateButton = INPUT({type:"button", value:"Update"}) appendChildNodes(form, updateButton, " ") updateButton['onclick'] = setSrc } var make_convert_button = function(type) { var button=INPUT({type:"submit", name:"type", value:type}) //if (!opts.update_button) // button['onclick'] = setSrc // Happens before conversion? return SPAN(null, button, " ") // Put a space after each button } appendChildNodes(form, map(make_convert_button, opts.types)) return form } SVGKit.codeContainer = function(initial_code, doit, rows /*14*/, cols /*60*/) { /*** Returns HTML <div> that contains code that can be executed when the "Do It" button is pressed. The doit function is expected to take the parsed javascript The doit function is responsible for putting the svg where it belongs in the html page s = getElement('SVGKit_svg').childNodes[0] svg = new SVGKit(s) svg.append(svg.RECT({x:30, y:30, width:500, height:50}) ) ***/ rows = SVGKit.firstNonNull(rows, 14) cols = SVGKit.firstNonNull(cols, 50) var div, codeArea, buttonDoIt div = DIV(null, codeArea=TEXTAREA({rows:rows, cols:cols, wrap:"off"}, initial_code), BR(null), buttonDoIt=INPUT({type:"button", value:"Do It"}) ) var doit_button_hit = function() { var func = eval(codeArea.value) doit(func) } buttonDoIt['onclick'] = doit_button_hit return div } //////////////////////////// // Class Utilities //////////////////////////// SVGKit.__new__ = function () { var m = MochiKit.Base; this.EXPORT_TAGS = { ":common": this.EXPORT, ":all": m.concat(this.EXPORT, this.EXPORT_OK) }; m.nameFunctions(this); } SVGKit.__new__(this); SVGKit.prototype._addDOMFunctions = function() { // The following has been converted by Zeba Wunderlich's Perl Script // from http://www.w3.org/TR/SVG/eltindex.html this.$ = function(id) { return this.svgDocument.getElementById(id) } this.A = this.createSVGDOMFunc("a") this.ALTGLYPH = this.createSVGDOMFunc("altGlyph") this.ALTGLYPHDEF = this.createSVGDOMFunc("altGlyphDef") this.ALTGLYPHITEM = this.createSVGDOMFunc("altGlyphItem") this.ANIMATE = this.createSVGDOMFunc("animate") this.ANIMATECOLOR = this.createSVGDOMFunc("animateColor") this.ANIMATEMOTION = this.createSVGDOMFunc("animateMotion") this.ANIMATETRANSFORM = this.createSVGDOMFunc("animateTransform") this.CIRCLE = this.createSVGDOMFunc("circle") this.CLIPPATH = this.createSVGDOMFunc("clipPath") this.COLOR_PROFILE = this.createSVGDOMFunc("color-profile") this.CURSOR = this.createSVGDOMFunc("cursor") this.DEFINITION_SRC = this.createSVGDOMFunc("definition-src") this.DEFS = this.createSVGDOMFunc("defs") this.DESC = this.createSVGDOMFunc("desc") this.ELLIPSE = this.createSVGDOMFunc("ellipse") this.FEBLEND = this.createSVGDOMFunc("feBlend") this.FECOLORMATRIX = this.createSVGDOMFunc("feColorMatrix") this.FECOMPONENTTRANSFER = this.createSVGDOMFunc("feComponentTransfer") this.FECOMPOSITE = this.createSVGDOMFunc("feComposite") this.FECONVOLVEMATRIX = this.createSVGDOMFunc("feConvolveMatrix") this.FEDIFFUSELIGHTING = this.createSVGDOMFunc("feDiffuseLighting") this.FEDISPLACEMENTMAP = this.createSVGDOMFunc("feDisplacementMap") this.FEDISTANTLIGHT = this.createSVGDOMFunc("feDistantLight") this.FEFLOOD = this.createSVGDOMFunc("feFlood") this.FEFUNCA = this.createSVGDOMFunc("feFuncA") this.FEFUNCB = this.createSVGDOMFunc("feFuncB") this.FEFUNCG = this.createSVGDOMFunc("feFuncG") this.FEFUNCR = this.createSVGDOMFunc("feFuncR") this.FEGAUSSIANBLUR = this.createSVGDOMFunc("feGaussianBlur") this.FEIMAGE = this.createSVGDOMFunc("feImage") this.FEMERGE = this.createSVGDOMFunc("feMerge") this.FEMERGENODE = this.createSVGDOMFunc("feMergeNode") this.FEMORPHOLOGY = this.createSVGDOMFunc("feMorphology") this.FEOFFSET = this.createSVGDOMFunc("feOffset") this.FEPOINTLIGHT = this.createSVGDOMFunc("fePointLight") this.FESPECULARLIGHTING = this.createSVGDOMFunc("feSpecularLighting") this.FESPOTLIGHT = this.createSVGDOMFunc("feSpotLight") this.FETILE = this.createSVGDOMFunc("feTile") this.FETURBULENCE = this.createSVGDOMFunc("feTurbulence") this.FILTER = this.createSVGDOMFunc("filter") this.FONT = this.createSVGDOMFunc("font") this.FONT_FACE = this.createSVGDOMFunc("font-face") this.FONT_FACE_FORMAT = this.createSVGDOMFunc("font-face-format") this.FONT_FACE_NAME = this.createSVGDOMFunc("font-face-name") this.FONT_FACE_SRC = this.createSVGDOMFunc("font-face-src") this.FONT_FACE_URI = this.createSVGDOMFunc("font-face-uri") this.FOREIGNOBJECT = this.createSVGDOMFunc("foreignObject") this.G = this.createSVGDOMFunc("g") this.GLYPH = this.createSVGDOMFunc("glyph") this.GLYPHREF = this.createSVGDOMFunc("glyphRef") this.HKERN = this.createSVGDOMFunc("hkern") this.IMAGE = this.createSVGDOMFunc("image") this.LINE = this.createSVGDOMFunc("line") this.LINEARGRADIENT = this.createSVGDOMFunc("linearGradient") this.MARKER = this.createSVGDOMFunc("marker") this.MASK = this.createSVGDOMFunc("mask") this.METADATA = this.createSVGDOMFunc("metadata") this.MISSING_GLYPH = this.createSVGDOMFunc("missing-glyph") this.MPATH = this.createSVGDOMFunc("mpath") this.PATH = this.createSVGDOMFunc("path") this.PATTERN = this.createSVGDOMFunc("pattern") this.POLYGON = this.createSVGDOMFunc("polygon") this.POLYLINE = this.createSVGDOMFunc("polyline") this.RADIALGRADIENT = this.createSVGDOMFunc("radialGradient") this.RECT = this.createSVGDOMFunc("rect") this.SCRIPT = this.createSVGDOMFunc("script") this.SET = this.createSVGDOMFunc("set") this.STOP = this.createSVGDOMFunc("stop") this.STYLE = this.createSVGDOMFunc("style") this.SVG = this.createSVGDOMFunc("svg") this.SWITCH = this.createSVGDOMFunc("switch") this.SYMBOL = this.createSVGDOMFunc("symbol") this.TEXT = this.createSVGDOMFunc("text") this.TEXTPATH = this.createSVGDOMFunc("textPath") this.TITLE = this.createSVGDOMFunc("title") this.TREF = this.createSVGDOMFunc("tref") this.TSPAN = this.createSVGDOMFunc("tspan") this.USE = this.createSVGDOMFunc("use") this.VIEW = this.createSVGDOMFunc("view") this.VKERN = this.createSVGDOMFunc("vkern") } // The following line probably isn't neccesary since I don't export anything: // MochiKit.Base._exportSymbols(this, SVGKit);
SVGKit/SVGKit.js
/*** SVGKit.js 0.1 See <http://svgkit.sourceforge.net/> for documentation, downloads, license, etc. (c) 2006-2007 Jason Gallicchio. All rights Reserved. Some notes: http://www.sitepoint.com/article/oriented-programming-2 http://www.sitepoint.com/article/javascript-objects At some point I'd like to auto-detect if user has SVG and if it's Adobe or W3C: http://blog.codedread.com/archives/2005/06/21/detecting-svg-viewer-capabilities/ http://blog.codedread.com/archives/2006/01/13/inlaying-svg-with-html/ http://www.adobe.com/svg/workflow/autoinstall.html Also, transmogrify <object> tags into <embed> tags automatically, perhaps using <![if IE]> and before the content loads. This should work if included in an SVG to for inline scripting. Do I want to do anything with events or just let the DOM and MochiKit handle them? Maybe some built-in zoom and scroll that you can turn on. toXML needs namespaces. Assign aliases at top and use (have some common ones defined.) svgDocument.getElementById(id) does not work for inline. Is this because svgDocument is document? This is used in createUniqueID and leads to failure of SVGCanvas test 21: lineargradient. Probably for the same reason svgDocument.getElementsByTagName("defs") doesnt' work. * After the script runs, these work in the console. * After an error (or something) it seems to kind of work since test 22 works after test 21 fails, but strangely the DOM tree and the printed source code are wrong. Indeed, switching the order always makes the second of the two work graphically, but fail DOM/XML wise. IE doesn't seem to be able to pull anything out once it's put in: >>> document.svgkit.svgElement.getElementsByTagName('path') [undefined, undefined, undefined, undefined] It knows that I added four paths, but I can't get them out. Same for svgElement.childNodes Problem of divs loading and unloading, especially with multiple writeln() in the interpreter. Perhaps on unload, save xml and then restore on a load. The problem is that each time the object or embed is shown (first time or after being hidden) there is a delay before the SVG content is accessible. Can't draw anything until it's loaded. Really annoying in the interpreter. inline doesn't have this problem. Maybe everything is going in that direction anyway. Bugs: * translate(1) and then call translate doesn't detect that this means x=1. Code seems to be there, but regexp doesnt' match. * Dragging is sketchy when the mouse leavs the object. * Reading XML should read the namespaces into the SVGKit._namespaces dictionary. Integration with MochiKit: * See if it's any slower using iterators * See if MochiKit.Style and MochiKit.Visual effects work. yes: hideElement(circle) showElement(circle) setOpacity(circle, 0.2) no: elementDimensions(circle) Using SVG in the Browser: * Should always provide fallback content -- png, pdf, (shudder) swf * Interactivity requires SVG, but initial static content should have static fallback (for fast load) * Best effort to have it work on Firefox, Opera, Safari, IE+ASV, Batik, Rhino, GNOME, KDE * Text sucks -- different settings/browsers render it in vastly differens sizes. * Automatically generate links to an image translation server. Fatures: * Automatic resizing with browser window (like Google Maps) * Mouse tracking -- ala KevLinDev? Do you need the clear 100% rectangle? * enablePan(element), enableZoom(element), enableFollow(), enableDrag() enablePanZoomImmunity() * Create PNGs: http://www.kevlindev.com/gui/utilities/js_png/index.htm Emulate Support For: * getURL and setURL to non-ASP: http://jibbering.com/2002/5/dynamic-update-svg.html * SMIL animation: http://www.vectoreal.com/smilscript/ SVG (and most client-side web stuff) is depressing. Things looked so bright back in 1999 and here we are SEVEN years later and even I just learned about the standard. I want to show what can be done. I didn't have anything invested in SVG when I started, but it's the only non-proprietary interactive vector graphics format. Make a MochiMin version as an option for inclusion instaed of full MochiKit. Conform SVG coding and output style to: http://jwatt.org/svg/authoring/ specifically look into using name-space aware: getAttribute, removeAttribute, setAttribute Embed images where possible -- read binary data, convert to 64, then include directly. href to images don't work very well -- they translate into absolute URIs. TODO: s.scale(10) should do the right thing. Right now you NEED scale(10,10) also the scale(1,1)scale(1,1) returns scale(2,2) because right now it's always aditive ***/ //////////////////////////// // Setup //////////////////////////// if (typeof(dojo) != 'undefined') { dojo.provide("SVGKit"); dojo.require("MochiKit.DOM"); } if (typeof(JSAN) != 'undefined') { JSAN.use("MochiKit.Iter", []); } try { if (typeof(MochiKit.DOM) == 'undefined') { throw ""; } } catch (e) { throw "SVGKit depends on MochiKit.DOM!"; } if (typeof(SVGKit) == 'undefined' || SVGCanvas == null) { // Constructor SVGKit = function(p1, p2, p3, p4, p5) { if (MochiKit.Base.isUndefinedOrNull(this.__init__)){ log("You called SVG() as a fnuction without new. Shame on you, but I'll give you a new object anyway"); return new SVGKit(p1, p2, p3, p4, p5); } this.__init__(p1, p2, p3, p4, p5); return null; }; } SVGKit.NAME = "SVGKit"; SVGKit.VERSION = "0.1"; SVGKit.__repr__ = function () { return "[" + SVGKit.NAME + " " + SVGKit.VERSION + "]"; }; SVGKit.prototype.__repr__ = SVGKit.__repr__; SVGKit.toString = function () { return this.__repr__(); }; SVGKit.prototype.toString = SVGKit.toString; SVGKit.EXPORT = [ ]; SVGKit.EXPORT_OK = [ ]; //////////////////////////// // Defaults //////////////////////////// //SVGKit._defaultType = 'embed'; //SVGKit._defaultType = 'object'; SVGKit._defaultType = 'inline'; SVGKit._namespaces = { 'svg': 'http://www.w3.org/2000/svg', 'xlink': 'http://www.w3.org/1999/xlink', 'ev': 'http://www.w3.org/2001/xml-events', 'xmlns': 'http://www.w3.org/2000/xmlns/' } SVGKit._svgMIME = 'image/svg+xml'; SVGKit._svgEmptyName = 'empty.svg'; SVGKit._SVGiKitBaseURI = ''; SVGKit._errorText = "You can't display SVG. Download Firefox 1.5." ; SVGKit._convert_url = 'http://brainflux.org/cgi-bin/convert_svg.py' // Should be customized to your own server //////////////////////////// // Constructor //////////////////////////// SVGKit.prototype.__init__ = function (p1, p2, p3, p4, p5) { // TODO: Make thse work right. // __init__() For JavaScript included in an SVG. // __init__(node) Already have an HTML element -- autodetect the type // __init__(id) Have the id for an HTML element (if your id ends in .svg, pass in the node instead because strings ending in .svg will be treated as filenames.) // __init__(filename, id, type, width, height) Create a new HTML element that references filename (must end in .svg) // __init__(width, height, id, type) Create a new SVG from scratch with width, height, and id // The following are described at http://www.w3.org/TR/SVG/struct.html this.htmlElement = null; // the <object> or <embed> html element the SVG lives in, otherwise null this.svgDocument = null; // When an 'svg' element is embedded inline this will be document this.svgElement = null; // corresponds to the 'svg' element //this._redrawId = null; // The reference that SVG's suspendRedraw returns. Needed to cancel suspension. //SVGKit._defaultType = // Determine a good default dynamically ('inline' , 'object', or 'embed') //log("SVGKit.__init__(", p1, p2, p3, p4, p5, ")"); this.setBaseURI(); if (MochiKit.Base.isUndefinedOrNull(p1)) { // This JS was included inside of an SVG file, and this was included in the // root element's onload event, which you need to to do get a target. /* var evt = p1; if ( window.svgDocument == null ) this.svgDocument = evt.target.ownerDocument; */ this.svgDocument = document; this.svgElement = this.svgDocument.rootElement; // or svgDocument.documentElement; this.htmlElement = this.svgElement; } else if (typeof(p1) == 'string') { if (p1.length>5 && p1.substr(p1.length-4,4).toLowerCase()=='.svg') // IE doesn't do substr(-4) this.loadSVG(p1, p2, p3, p4, p5); else this.whenReady( bind(this.grabSVG, this, p1) ); } else if (typeof(p1) == 'object') { // Not <object> but a JS object this.grabSVG(p1); } else { this.createSVG(p1, p2, p3, p4) } // Note that this.svgDocument and this.svgElement may not be set at this point. Must wait for onload callback. //log("Done creating/grabing svg."); this._addDOMFunctions(); //log("Done with _addDOMFunctions"); window.svgkit = this; // For debugging, especially in IE } //////////////////////////// // General Utilities //////////////////////////// SVGKit.firstNonNull = function() { for (var i=0; i<arguments.length; i++) if ( !MochiKit.Base.isUndefinedOrNull(arguments[i]) ) return arguments[i] return null; } //////////////////////////// // Browser Related //////////////////////////// SVGKit.prototype.setBaseURI = function() { /*** To create an empty SVG using <object> or <embed> you need to give the tag a valid SVG file, so an empty one lives in the same directory as the JavaScript. This function finds that directory and sets the _SVGiKitBaseURI variable for future use. ***/ var scripts = document.getElementsByTagName("script"); for (var i = 0; i < scripts.length; i++) { var src = scripts[i].getAttribute("src"); if (!src) { continue; } if (src.match(/SVGKit\.js$/)) { SVGKit._SVGiKitBaseURI = src.substring(0, src.lastIndexOf('SVGKit.js')); } } } SVGKit.prototype.isIE = function() { // Borrowed from PlotKit: var ie = navigator.appVersion.match(/MSIE (\d\.\d)/); var opera = (navigator.userAgent.toLowerCase().indexOf("opera") != -1); return ie && (ie[1] >= 6) && (!opera); } SVGKit.prototype.whenReady = function (func, every_time /* =false */) { /*** Calls func when the SVG is ready. If you create or try to use an SVG inside of <embed> or <object>, the SVG file must be loaded. The browser does this asynchronously, and you can't do anything to the SVG until it's been loaded. If the file already loaded or you're working with an inline SVG, func will get called instantly. If it hasn't loaded yet, func will get added to the elemen's onload event callstack. TODO: Should this happen every time the div surrounding the SVG is hidden and shown? If you just add it to onload, it does. TODO: Fix the loading of SVG from XML file thing -- something more sophistocated than calling 0.5 seconds later. ***/ if (this.svgElement != null && this.svgDocument != null && !MochiKit.Base.isUndefinedOrNull(func) ) { //log("func=",func); func.call(this); //func.apply(this); //func(); if (every_time) addToCallStack(this.htmlElement, 'onload', func); // Incompatable with Mochikit.Signal } else if (this.htmlElement != null) { //log("adding to onload event for htmlElement=", this.htmlElement, " the func=", func); //if (every_time) addToCallStack(this.htmlElement, 'onload', func); // Incompatable with Mochikit.Signal //else // addToCallStack(this.htmlElement, 'onload', function() {func(); ); } else { // Try again half a second later. This is only for loaing an SVG from an XML file to an inline element. //log("doing callLater for func=", func); callLater(0.5, func); } } SVGKit.prototype.resize = function(width, height) { /*** Sets the size of the htmlElement and svgElement. No defaults given. ***/ this.setSize(this.svgElement, width, height); this.setSize(this.htmlElement, width, height); } SVGKit.prototype.resizeSVGElement = function(width, height) { /*** Sets the size of the svgElement If no size is given, it's assumed you wnat to set the size based on the size of the htmlElement to get rid of scroll bars or something. ***/ // I don't use first non-null because it would have to do two slow DOM lookups // to pass them as arguments. if (MochiKit.Base.isUndefinedOrNull(width)) width = getNodeAttribute(this.htmlElement, 'width') if (MochiKit.Base.isUndefinedOrNull(height)) height = getNodeAttribute(this.htmlElement, 'height') this.setSize(this.svgElement, width, height); } SVGKit.prototype.resizeHTMLElement = function(width, height) { /*** Sets the size of the htmlElement If no size is given, it's assumed you want to set it based on the size of the SVG it contains ***/ if (MochiKit.Base.isUndefinedOrNull(width)) width = getNodeAttribute(this.svgElement, 'width') if (MochiKit.Base.isUndefinedOrNull(height)) height = getNodeAttribute(this.svgElement, 'height') this.setSize(this.htmlElement, width, height); } SVGKit.prototype.setSize = function(element, width, height) { setNodeAttribute(element, 'width', width); setNodeAttribute(element, 'height', height); } SVGKit.prototype.conversionHTML = function(divElement) { var cgi = 'http://frank.harvard.edu/~jason/cgi-bin/svgconvert' var types = ['svg','pdf','png','jpg','ps','xfig']; for (var i=0; i<types.length; i++) { appendChildNodes(divElement, MochiKit.DOM.createDOM('a',{href:cgi+types[i]}, types[i]), ' '); } } //////////////////////////// // Getting Hold of an SVG //////////////////////////// SVGKit.prototype.createSVG = function (width, height, id /* optional */, type /* =default */) { /*** Loads a blank SVG and sets its size and the size of any HTML element it lives in to the given width and height. ***/ //log("createSVG(", width, height, id , type,")"); type = SVGKit.firstNonNull(type, SVGKit._defaultType); //log("type=", type); if (type=='inline') { this.createInlineSVG(width, height, id); } else { this.loadSVG(SVGKit._svgEmptyName, id, type, width, height) } } SVGKit.prototype.createInlineSVG = function(width, height, id) { /*** Make sure html tag has SVG namespace support: <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" xmlns:svg="http://www.w3.org/2000/svg"> ***/ var attrs = { // Make sure this matches what's in empty.svg 'xmlns': SVGKit._namespaces['svg'], // for <circle> type tags with implicit namespace 'xmlns:svg': SVGKit._namespaces['svg'], // for <svg:circle ...> type tags with explicit namespace 'xmlns:xlink': 'http://www.w3.org/1999/xlink', 'xmlns:ev': 'http://www.w3.org/2001/xml-events', 'version': '1.1', 'baseProfile': 'full', 'width': width, 'height': height }; if (!MochiKit.Base.isUndefinedOrNull(id)) { attrs['id'] = id; } // Borrowed from PlotKit: if (!this.isIE()) { this.svgDocument = document; this.svgElement = this.createSVGDOM('svg', attrs); // Create an element in the SVG namespace this.htmlElement = this.svgElement; // html can work with the <svg> tag directly //this.svgDocument = this.svgElement.getSVGDocument() this.svgDocument = this.svgElement.ownerDocument; //log("in create: this.svgDocument=",this.svgDocument); } else { // IE log('createInlineSVG with IE. width:', width, 'height:', height) var width = attrs["width"] ? attrs["width"] : "100"; var height = attrs["height"] ? attrs["height"] : "100"; var eid = attrs["id"] ? attrs["id"] : "notunique"; var html = '<svg:svg width="' + width + '" height="' + height + '" ' + 'id="' + eid + '" version="1.1" baseProfile="full">'; log('html:', html) log('document:', document) this.htmlElement = document.createElement(html); log('htmlElement:', this.htmlElement) // create embedded SVG inside SVG. this.svgDocument = this.htmlElement.getSVGDocument(); log('svgDocument:', this.svgDocument) this.svgElement = this.svgDocument.createElementNS(SVGKit._namespaces['svg'], 'svg'); log('svgElement:', this.svgElement) this.svgElement.setAttribute("width", width); this.svgElement.setAttribute("height", height); this.svgElement.setAttribute('xmlns:xlink', attrs['xmlns:xlink']); log("in create: this.svgElement=",this.svgElement); this.svgDocument.appendChild(this.svgElement); } } SVGKit.prototype.loadSVG = function (filename, id /* optional */, type /* =default */, width /* = from file */, height /* = from file */) { /*** Create a new HTML DOM element of specified type ('object', 'embed', or 'svg') and set the attributes appropriately. You'd never call this for JavaScript code within the SVG. If you're type is inline and you're loading from a file other than empty.svg, you have to wait for the XML to load for the htmlElement to be set. In code that appends this htmlElement to the document, you have to call waitReady() Conversely, if you're type is embed or object, you CAN'T call whenReady to append the htmlElement to the document because it will ever be ready until it's displayed! There must be a better way to handle this. For object and embed, createSVG just loads empty.svg, but for inline, create is more complicated and doesn't involve empty.svg. It's loading that's hard. This code should be reworked. @param type: The tag that we will create @param width: default from file or 100 @param height: default from file or 100 @param id: Optionally assign the HTML element an id. @rtype: DOMElement ***/ // TODO If it is new, default width and height are 100. If it's from a file, defaults come from the file. // You can still set the width and height if you want the thing to scroll. var attrs = {}; if (!MochiKit.Base.isUndefinedOrNull(id)) { attrs['id'] = id; } type = SVGKit.firstNonNull(type, SVGKit._defaultType); //log("loadSVG(", filename, id, type, width, height,")"); if (type=='inline') { if (this.isIE()) { this.createSVG(width, height, id, type); //log("after create: this.svgElement=",this.svgElement); } //this.htmlElement = null; // This is required to tell whenReady that we won't be ready until the assynch request returns. var copyXMLtoSVG = function(event) { if (!this.isIE()) { var xmlDoc = event.currentTarget; this.htmlElement = xmlDoc.documentElement.cloneNode(true); this.svgDocument = document; this.svgElement = this.htmlElement; } else { var newElement = event.documentElement.cloneNode(true); this.svgDocument.replaceChild(newElement, this.svgDocument.rootElement); this.svgElement = newElement; /* for (var i=0; i<newElement.childNodes.length; i++) { var clone = newElement.childNodes[i].cloneNode(true); //log("in copyXMLtoSVG for loop this.svgElement=",this.svgElement); this.svgElement.appendChild(clone); // This doesn't work: this.svgElement is [disposed object] } */ } } SVGKit.importXML(filename, bind(copyXMLtoSVG, this)); } else if (type=='object') { // IE: Cannot support attrs['data'] = SVGKit._SVGiKitBaseURI + filename; attrs['type'] = SVGKit._svgMIME; //log('loadSVG, data =', attrs['data'], ' type =', attrs['type']) this.htmlElement = MochiKit.DOM.createDOM('object', attrs, SVGKit._errorText); //var svg = this; // Define svg in context of function below. function finishObject(width, height, event) { // IE doesn't have contentDocument // IE would have to use some sort of SVG pool of objects // that add themselves to a list uppon load. this.svgDocument = this.htmlElement.contentDocument; this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. this.resize(width, height); //log('this.svgDocument', this.svgDocument, 'this.svgElement', this.svgElement) } this.whenReady( bind(finishObject, this, width, height) ); } else if (type=='embed') { // IE: Cannot support attrs['src'] = SVGKit._SVGiKitBaseURI + filename; attrs['type'] = SVGKit._svgMIME; attrs['pluginspage'] = 'http://www.adobe.com/svg/viewer/install/'; log("Going to createDOM('embed')"); this.htmlElement = MochiKit.DOM.createDOM('embed', attrs ); function finishEmbed(width, height, event) { // IE doesn't load the embed when you include it in the DOM tree. // if no real fix, you could create an SVG "pool" of empty width=1, height=1 // and move them around. This seems to work in IE. // width=0, height=0 works in Firefox, but not IE. //log("new embed: this.htmlElement = " + this.htmlElement) ; //log("new embed: Going to this.htmlElement.getSVGDocumen() )") ; this.svgDocument = this.htmlElement.getSVGDocument(); this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. this.resize(width, height); } this.whenReady( bind(finishEmbed, this, width, height) ); } } SVGKit.importXML = function (file, onloadCallback) { /*** Pass it a URL to load, it loads it asyncronously (the only way) and then calls callback when it's done. I use this to load SVG documents into an already existing SVG document. ***/ // http://www.sitepoint.com/article/xml-javascript-mozilla/2 // http://www-128.ibm.com/developerworks/web/library/wa-ie2mozgd/ // http://www.quirksmode.org/dom/importxml.html var xmlDoc; var moz = (typeof document.implementation != 'undefined') && (typeof document.implementation.createDocument != 'undefined'); var ie = (typeof window.ActiveXObject != 'undefined'); if (moz) { //var parser = new DOMParser(); //xmlDoc = parser.parseFromString(xmlString, "text/xml"); xmlDoc = document.implementation.createDocument("", "", null); xmlDoc.onload = onloadCallback; } else if (ie) { log("importXML for ie"); xmlDoc = new ActiveXObject("Microsoft.XMLDOM"); xmlDoc.async = false; log("set xmlDoc.async = false"); //document.xmlDoc = xmlDoc; //xmlDoc.loadXML(xmlString) //while(xmlDoc.readyState != 4) {}; if (onloadCallback) { xmlDoc.onreadystatechange = function () { if (xmlDoc.readyState == 4) onloadCallback(xmlDoc) }; } } xmlDoc.load(file); // Same for both, surprisingly. return xmlDoc; } SVGKit.prototype.grabSVG = function (htmlElement) { /*** Given an HTML element (or its id) that refers to an SVG, get the SVGDocument object. If htmlElement is an 'object' use contentDocument. If htmlElement is an 'embed' use getSVGDocument(). If htmlElement is an 'svg' or 'svg:svg' were inlnie. If you're w3C compatible like Firefox, svgElement is htmlElement If you're IE it's just like Embed. If is's an object or embed and it's not showing or the SVG file hasn't loaded, this won't work. @param htmlElement: either an id string or a dom element ('object', 'embed', 'svg) ***/ log("grabSVG htmlElement (node or id) = ", htmlElement); this.htmlElement = MochiKit.DOM.getElement(htmlElement); log("htmlElement (node) = ", this.htmlElement); var tagName = this.htmlElement.tagName.toLowerCase(); log("tagName = ", tagName, " htmlElement.contentDocument=", this.htmlElement.contentDocument, "(this will be blank for inline)"); var isInline = tagName == 'svg' || tagName == 'svg:svg'; // svg:svg is IE style if (isInline && !this.isIE()) { this.svgDocument = document; this.svgElement = this.htmlElement; } else if (tagName == 'embed' || isInline && this.isIE()) { // IE Bug: htmlElement.getSVGDocument is undefined, but htmlElement.getSVGDocument() works, so you can't test for it. this.svgDocument = this.htmlElement.getSVGDocument(); this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. } else if (tagName == 'object' && this.htmlElement.contentDocument) { // IE Bug: <object> SVGs display, but have no property to access their contents. this.svgDocument = this.htmlElement.contentDocument; this.svgElement = this.svgDocument.rootElement; // svgDocument.documentElement works too. } log("grabSVG: type=",tagName, " this.svgDocument = ", this.svgDocument, " this.svgElement = ", this.svgElement); } //////////////////////////// // Content Manipulation //////////////////////////// SVGKit.prototype.updateNodeAttributesSVG = function (node, attrs) { /*** Basically copied directly from MochiKit with some namespace stuff. ***/ var elem = node; var self = MochiKit.DOM; if (typeof(node) == 'string') { elem = self.getElement(node); } if (attrs) { var updatetree = MochiKit.Base.updatetree; if (self.attributeArray.compliant) { // not IE, good. for (var k in attrs) { var v = attrs[k]; if (typeof(v) == 'object' && typeof(elem[k]) == 'object') { if (k == "style" && MochiKit.Style) { MochiKit.Style.setStyle(elem, v); } else { updatetree(elem[k], v); } } /* SVGKit Additions START */ else if (k == 'xmlns') { // No prefix elem.setAttributeNS(SVGKit._namespaces['xmlns'], k, v); } else if (k.search(':') != -1) { var tmp = k.split(':') var prefix = tmp[0] var localName = tmp[1] //elem.setAttributeNS(SVGKit._namespaces[prefix], localName, v); var uri = SVGKit._namespaces[prefix] if (uri != null) elem.setAttributeNS(uri, k, v); // Second parameter is "qualified name" } /* SVGKit Additions END */ else if (k.substring(0, 2) == "on") { if (typeof(v) == "string") { v = new Function(v); } elem[k] = v; } else { elem.setAttributeNS(null, k, v); } } } else { // IE is insane in the membrane var renames = self.attributeArray.renames; for (k in attrs) { v = attrs[k]; var renamed = renames[k]; if (k == "style" && typeof(v) == "string") { elem.style.cssText = v; } else if (typeof(renamed) == "string") { elem[renamed] = v; } else if (typeof(elem[k]) == 'object' && typeof(v) == 'object') { if (k == "style" && MochiKit.Style) { MochiKit.Style.setStyle(elem, v); } else { updatetree(elem[k], v); } } else if (k.substring(0, 2) == "on") { if (typeof(v) == "string") { v = new Function(v); } elem[k] = v; } else { elem.setAttribute(k, v); } } } } return elem; }, SVGKit.prototype.createSVGDOM = function (name, attrs/*, nodes... */) { /*** Like MochiKit.createDOM, but with the SVG namespace. ***/ var elem; var dom = MochiKit.DOM; if (typeof(name) == 'string') { try { // W3C Complient elem = this.svgDocument.createElementNS(SVGKit._namespaces['svg'], name); } catch (e) { // IE log("Creating element with name=", name, " in SVG namespace for IE"); elem = this.svgDocument.createElement(name); elem.setAttribute("xmlns", SVGKit._namespaces['svg']); //elem = this.svgDocument.createElement('svg:'+name); } } else { elem = name; // Parameter "name" was really an object } if (attrs) { this.updateNodeAttributesSVG(elem, attrs); } if (arguments.length <= 2) { return elem; } else { var args = MochiKit.Base.extend([elem], arguments, 2); return dom.appendChildNodes.apply(this, args); } }; SVGKit.prototype.createSVGDOMFunc = function (/* tag, attrs, *nodes */) { /*** Convenience function to create a partially applied createSVGDOM @param tag: The name of the tag @param attrs: Optionally specify the attributes to apply @param *nodes: Optionally specify any children nodes it should have @rtype: function ***/ var m = MochiKit.Base; return m.partial.apply( this, m.extend([this.createSVGDOM], arguments) ); }; SVGKit.prototype.append = function (node) { /*** Convenience method for appending to the root element of the SVG. Anything you draw by calling this will show up on top of everything else. ***/ this.svgElement.appendChild(node); } SVGKit.prototype.circle = function() { /*** Stupid function for quick testing. ***/ var c = this.CIRCLE( {'cx':50, 'cy':50, 'r':20, 'fill':'purple', 'fill-opacity':.3} ); this.append(c); } SVGKit.prototype.uniqueIdCount = 0; SVGKit.prototype.createUniqueID = function(base) { /*** For gradients and things, often you want them to have a unique id of the form 'gradient123' where the number is sequentially increasing. You would pass this function 'gradient' and it would look for the lowest number which returns no elements when you do a getElementByID. Right now it does a linear search because you typically don't create all that many of these, but maybe a hash table could be kept of the last result for quick access. This would have to be done on a per-SVG basis and is still no garuntee that the next number will be free if a node of that name/number gets created outside of this function. ***/ //var uniqueIdCount=0; var id; var element; do { id = base + this.uniqueIdCount; this.uniqueIdCount++; element = this.svgDocument.getElementById(id); // Works in IE and Firefox //element = this.svgElement.getElementById(id); // Works in IE, not Firefox //log("createUniqueID: Going to try id=",id," element=", element); } while ( !MochiKit.Base.isUndefinedOrNull(element) ); //log("Got unique id=",id); return id; } SVGKit.prototype.getDefs = function(createIfNeeded /* = false */) { /*** Return the <defs> tag inside of the SVG document where definitions like gradients and markers are stored. @param createIfNeeded -- If this is true, a <defs> element will be created if none already exists. @returns the defs element. If createIfNeeded is false, this my return null ***/ var defs = this.svgElement.getElementsByTagName("defs"); if (defs.length>0) { //log("getDefs... found defs: defs.length=",defs.length, " defs[0]=",defs[0]) return defs[0]; } if (!MochiKit.Base.isUndefinedOrNull(createIfNeeded) && !createIfNeeded) { //log("getDefs... returning null cuz createIfNeeded=",createIfNeeded) return null; } defs = this.DEFS(null); //log("Created defs", defs, "... going to insert first") this.svgElement.insertBefore(defs, this.svgElement.firstChild); //this.append(defs); //log("insert first worked") // Check to see if it actually got appended: //var defs2 = this.svgDocument.getElementsByTagName("defs"); var defs2 = this.svgElement.getElementsByTagName("defs"); //log("ending getDefs...defs2.length=",defs2.length, " defs2[0]=",defs2[0]) return defs; } /* // These are pretty redundant. Use : suspend_handle_id = this.svgElement.suspendRedraw(max_wait_milliseconds) this.svgElement.unsuspendRedraw(suspend_handle_id) this.svgElement.unsuspendRedrawAll() SVGKit.prototype.suspendRedraw = function (miliseconds) { miliseconds = SVGKit.firstNonNull(miliseconds, 1000); var tempRedrawId = this.svgElement.suspendRedraw(miliseconds); this.unsuspendRedraw() this._redrawId = tempRedrawId } SVGKit.prototype.unsuspendRedraw = function () { if (this._redrawId != null) { this.svgElement.unsuspendRedraw(this._redrawId); this._redrawId = null; } } */ SVGKit.prototype.deleteContent = function() { /*** Deletes all graphics content, but leaves definitions ***/ var defs = this.getDefs() MochiKit.DOM.replaceChildNodes(this.svgElement, defs) } //////////////////////////// // Transformations //////////////////////////// /* The following take an element and transforms it. If the last item in the transform string is the same as the type of transformation that you're trying to do (e.g. rotate), replace it for efficiency. If it's not the same, append to the end. Note that translate(2,0) gets turned into translate(2) by the browser, and this should be handled. If the elem passed is not an id for an element, it is treated as a string transformation which gets updated and returned. Regular Expressions are hard coded so they can be compiled once on load. TODO: Make sure the arguments are valid numbers to avoid illegal transforms */ SVGKit.rotateRE = /(.*)rotate\(\s*([0-9eE\+\-\.]*)\s*\)\s*$/ SVGKit.prototype.rotate = function(elem, degrees) { /*** Test: SVGKit.prototype.rotate('translate( 1 ,2 ) rotate( 70)', -10) SVGKit.prototype.rotate('rotate(1) translate(2,2) ', -10) ***/ var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._oneParameter(elem, degrees, SVGKit.rotateRE, 'rotate') } var old_transform = element.getAttribute('transform') var new_transform = this._oneParameter(old_transform, degrees, SVGKit.rotateRE, 'rotate') element.setAttribute('transform', new_transform); return new_transform; } SVGKit.translateRE = /(.*)translate\(\s*([0-9eE\+\-\.]*)\s*,?\s*([0-9eE\+\-\.]*)?\s*\)\s*$/ SVGKit.prototype.translate = function(elem, tx, ty) { /*** SVGKit.prototype.: translate(' translate( 1 ,2 ) ', -10,-20) translate(' translate(1) ', -10,-20) translate(' translate(10,20) ', 0, -20) translate('translate(10,10) rotate(20)', 10, 10) == 'translate(10,10) rotate(20)translate(10,10)' translate('translate(10,10)', -10, -10) == '' translate('translate(10)', -10) == '' ***/ var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._twoParameter(elem, tx, ty, SVGKit.translateRE, 'translate') } var old_transform = element.getAttribute('transform') var new_transform = this._twoParameter(old_transform, tx, ty, SVGKit.translateRE,'translate'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.scaleRE = /(.*)scale\(\s*([0-9eE\+\-\.]*)\s*,?\s*([0-9eE\+\-\.]*)?\s*\)\s*$/ SVGKit.prototype.scale = function(elem, sx, sy) { var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._twoParameter(elem, sx, sy, SVGKit.scaleRE, 'scale'); } var old_transform = element.getAttribute('transform') var new_transform = this._twoParameter(old_transform, sx, sy, SVGKit.scaleRE, 'scale'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.matrixRE = null SVGKit.prototype.matrix = function(elem, a, b, c, d, e, f) { var element = MochiKit.DOM.getElement(elem); if (MochiKit.Base.isUndefinedOrNull(element)) { return this._sixParameter(elem, a, b, c, d, e, f, SVGKit.matrixRE, 'matrix'); } var old_transform = element.getAttribute('transform') var new_transform = this._sixParameter(old_transform, a, b, c, d, e, f, SVGKit.matrixRE, 'matrix'); element.setAttribute('transform', new_transform); return new_transform; } SVGKit.prototype._oneParameter = function(old_transform, degrees, regexp, name) { /*** rotate('translate(1,2)rotate(12)', -12) -> 'translate(1,2)' rotate('translate(1,2)rotate(12)', -11) -> 'translate(1,2)rotate(1)' rotate('rotate( 4 ) rotate( 12 )', -12) -> 'rotate( 4 ) ' ***/ if (MochiKit.Base.isUndefinedOrNull(degrees) || degrees == 0) return old_transform; regexp.lastIndex = 0; //var transform = elem.getAttribute('transform') //var transform = elem; var new_transform, array; if (old_transform==null || old_transform=='') new_transform = name+'('+degrees+')' else if ( (array = regexp.exec(old_transform)) != null ) { var old_angle = parseFloat(array[2]); var new_angle = old_angle+degrees; new_transform = array[1]; if (new_angle!=0) new_transform += 'rotate('+new_angle+')'; } else new_transform = old_transform + 'rotate('+degrees+')'; return new_transform; } SVGKit.prototype._twoParameter = function(old_transform, x, y, regexp, name) { // Test: SVGKit.prototype._twoParameter('translate( 1 ,2 ) scale( 3 , 4 )', 1, 1, SVGKit.scaleRE, 'scale') // Test: SVGKit.prototype._twoParameter('translate(3)', 1, 1, SVGKit.translateRE, 'translate') // Test: SVGKit.prototype._twoParameter('translate(10,20)', 0, -20, SVGKit.translateRE, 'translate') if (MochiKit.Base.isUndefinedOrNull(x) || MochiKit.Base.isUndefinedOrNull(name)) return old_transform; // y = SVGKit.firstNonNull(y, 0); if (x==0 && y==0) return old_transform; regexp.lastIndex = 0; //var transform = elem var new_transform, array; if (MochiKit.Base.isUndefinedOrNull(old_transform) || old_transform=='') new_transform = name+'('+x+','+y+')'; else if ( (array = regexp.exec(old_transform)) != null ) { var old_x = parseFloat(array[2]); var new_x = old_x+x; var old_y; if (array[3]!=null) old_y = parseFloat(array[3]); else old_y = 0; var new_y = old_y+y; new_transform = array[1]; if (new_x!=0 || new_y!=0) new_transform += name+'('+new_x+','+new_y+')'; } else new_transform = old_transform + name+'('+x+','+y+')'; return new_transform } SVGKit.prototype._sixParameter = function(old_transform, a, b, c, d, e, f, regexp, name) { if (MochiKit.Base.isUndefinedOrNull(d) || MochiKit.Base.isUndefinedOrNull(name)) return old_transform; if (MochiKit.Base.isUndefinedOrNull(e)) e = 0; if (MochiKit.Base.isUndefinedOrNull(f)) f = 0; var new_transform = name+'('+a+','+b+','+c+','+d+','+e+','+f+')'; return new_transform } //////////////////////////// // Output //////////////////////////// SVGKit.prototype.toXML = function (dom /* = this.svgElement */, decorate /* = false */) { /*** This doesn't work yet cuz toHTML converts everything to lower case. @param dom: Element to convert. @param decorate: boolean: Include <?xml version="1.0" encoding="UTF-8" standalone="no"?> ? returns a string of XML. ***/ dom = SVGKit.firstNonNull(dom, this.svgElement); var decoration = MochiKit.Base.isUndefinedOrNull(decorate) || !decorate ? '' : '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' var source = this.emitXML(dom).join(""); return decoration + source.replace(/>/g, ">\n"); // Add newlines after all closing tags. } SVGKit.prototype.emitXML = function(dom, /* optional */lst) { /*** A case insensitive and namespace aware version of MochiKit.DOM's emitHTML. My changes are marked with "SVGKit" comments. TODO: Make namespace-aware. ***/ if (typeof(lst) == 'undefined' || lst === null) { lst = []; } // queue is the call stack, we're doing this non-recursively var queue = [dom]; var self = MochiKit.DOM; var escapeHTML = self.escapeHTML; var attributeArray = self.attributeArray; while (queue.length) { dom = queue.pop(); if (typeof(dom) == 'string') { lst.push(dom); } else if (dom.nodeType == 1) { // we're not using higher order stuff here // because safari has heisenbugs.. argh. // // I think it might have something to do with // garbage collection and function calls. lst.push('<' + dom.nodeName); // SVGKit: got rid of toLowerCase() var attributes = []; var domAttr = attributeArray(dom); for (var i = 0; i < domAttr.length; i++) { var a = domAttr[i]; attributes.push([ " ", a.name, '="', escapeHTML(a.value), '"' ]); } attributes.sort(); for (i = 0; i < attributes.length; i++) { var attrs = attributes[i]; for (var j = 0; j < attrs.length; j++) { lst.push(attrs[j]); } } if (dom.hasChildNodes()) { lst.push(">"); // queue is the FILO call stack, so we put the close tag // on first queue.push("</" + dom.nodeName + ">"); // SVGKit: got rid of toLowerCase() var cnodes = dom.childNodes; for (i = cnodes.length - 1; i >= 0; i--) { queue.push(cnodes[i]); } } else { lst.push('/>'); } } else if (dom.nodeType == 3) { lst.push(escapeHTML(dom.nodeValue)); } } return lst; } //////////////////////////// // Utilities for HTML //////////////////////////// SVGKit.prototype.convertForm = function(options) { /*** Returns HTML <form> element with a text area that gets filled with SVG source and buttons The result of the form gets sent to a server for conversion to pdf, png, etc. ***/ defaults = { converter_url : SVGKit._convert_url, new_window : true, update_button : true, hide_textarea : false, rows : 14, cols : 55, types : ['svg', 'pdf', 'ps', 'png', 'jpg'] } var opts = {} if (!MochiKit.Base.isUndefinedOrNull(options)) update(opts, options) setdefault(opts, defaults) var target = null if (opts.new_window) target = "_blank" // Form will open result in new window. // target="_blank" is a deprecated feature, but very useful since you can't right click // on the submit button to choose if you want to open it in a new window, and going back is SLOW var textArea = TEXTAREA({rows:opts.rows, cols:opts.cols, wrap:"off", name:'source'}, "SVG Source") var form = FORM({name:'convert', method:'post', action:opts.converter_url, target:target}, textArea) var svg = this var setSrc = function() { // Uses newly created text Area replaceChildNodes(textArea, svg.toXML()) } svg.whenReady(setSrc) if (opts.hide_textarea) hideElement(textArea) else appendChildNodes(form, BR(null)) // Buttons get added below. if (opts.update_button) { var updateButton = INPUT({type:"button", value:"Update"}) appendChildNodes(form, updateButton, " ") updateButton['onclick'] = setSrc } var make_convert_button = function(type) { var button=INPUT({type:"submit", name:"type", value:type}) //if (!opts.update_button) // button['onclick'] = setSrc // Happens before conversion? return SPAN(null, button, " ") // Put a space after each button } appendChildNodes(form, map(make_convert_button, opts.types)) return form } SVGKit.codeContainer = function(initial_code, doit, rows /*14*/, cols /*60*/) { /*** Returns HTML <div> that contains code that can be executed when the "Do It" button is pressed. The doit function is expected to take the parsed javascript The doit function is responsible for putting the svg where it belongs in the html page s = getElement('SVGKit_svg').childNodes[0] svg = new SVGKit(s) svg.append(svg.RECT({x:30, y:30, width:500, height:50}) ) ***/ rows = SVGKit.firstNonNull(rows, 14) cols = SVGKit.firstNonNull(cols, 50) var div, codeArea, buttonDoIt div = DIV(null, codeArea=TEXTAREA({rows:rows, cols:cols, wrap:"off"}, initial_code), BR(null), buttonDoIt=INPUT({type:"button", value:"Do It"}) ) var doit_button_hit = function() { var func = eval(codeArea.value) doit(func) } buttonDoIt['onclick'] = doit_button_hit return div } //////////////////////////// // Class Utilities //////////////////////////// SVGKit.__new__ = function () { var m = MochiKit.Base; this.EXPORT_TAGS = { ":common": this.EXPORT, ":all": m.concat(this.EXPORT, this.EXPORT_OK) }; m.nameFunctions(this); } SVGKit.__new__(this); SVGKit.prototype._addDOMFunctions = function() { // The following has been converted by Zeba Wunderlich's Perl Script // from http://www.w3.org/TR/SVG/eltindex.html this.$ = function(id) { return this.svgDocument.getElementById(id) } this.A = this.createSVGDOMFunc("a") this.ALTGLYPH = this.createSVGDOMFunc("altGlyph") this.ALTGLYPHDEF = this.createSVGDOMFunc("altGlyphDef") this.ALTGLYPHITEM = this.createSVGDOMFunc("altGlyphItem") this.ANIMATE = this.createSVGDOMFunc("animate") this.ANIMATECOLOR = this.createSVGDOMFunc("animateColor") this.ANIMATEMOTION = this.createSVGDOMFunc("animateMotion") this.ANIMATETRANSFORM = this.createSVGDOMFunc("animateTransform") this.CIRCLE = this.createSVGDOMFunc("circle") this.CLIPPATH = this.createSVGDOMFunc("clipPath") this.COLOR_PROFILE = this.createSVGDOMFunc("color-profile") this.CURSOR = this.createSVGDOMFunc("cursor") this.DEFINITION_SRC = this.createSVGDOMFunc("definition-src") this.DEFS = this.createSVGDOMFunc("defs") this.DESC = this.createSVGDOMFunc("desc") this.ELLIPSE = this.createSVGDOMFunc("ellipse") this.FEBLEND = this.createSVGDOMFunc("feBlend") this.FECOLORMATRIX = this.createSVGDOMFunc("feColorMatrix") this.FECOMPONENTTRANSFER = this.createSVGDOMFunc("feComponentTransfer") this.FECOMPOSITE = this.createSVGDOMFunc("feComposite") this.FECONVOLVEMATRIX = this.createSVGDOMFunc("feConvolveMatrix") this.FEDIFFUSELIGHTING = this.createSVGDOMFunc("feDiffuseLighting") this.FEDISPLACEMENTMAP = this.createSVGDOMFunc("feDisplacementMap") this.FEDISTANTLIGHT = this.createSVGDOMFunc("feDistantLight") this.FEFLOOD = this.createSVGDOMFunc("feFlood") this.FEFUNCA = this.createSVGDOMFunc("feFuncA") this.FEFUNCB = this.createSVGDOMFunc("feFuncB") this.FEFUNCG = this.createSVGDOMFunc("feFuncG") this.FEFUNCR = this.createSVGDOMFunc("feFuncR") this.FEGAUSSIANBLUR = this.createSVGDOMFunc("feGaussianBlur") this.FEIMAGE = this.createSVGDOMFunc("feImage") this.FEMERGE = this.createSVGDOMFunc("feMerge") this.FEMERGENODE = this.createSVGDOMFunc("feMergeNode") this.FEMORPHOLOGY = this.createSVGDOMFunc("feMorphology") this.FEOFFSET = this.createSVGDOMFunc("feOffset") this.FEPOINTLIGHT = this.createSVGDOMFunc("fePointLight") this.FESPECULARLIGHTING = this.createSVGDOMFunc("feSpecularLighting") this.FESPOTLIGHT = this.createSVGDOMFunc("feSpotLight") this.FETILE = this.createSVGDOMFunc("feTile") this.FETURBULENCE = this.createSVGDOMFunc("feTurbulence") this.FILTER = this.createSVGDOMFunc("filter") this.FONT = this.createSVGDOMFunc("font") this.FONT_FACE = this.createSVGDOMFunc("font-face") this.FONT_FACE_FORMAT = this.createSVGDOMFunc("font-face-format") this.FONT_FACE_NAME = this.createSVGDOMFunc("font-face-name") this.FONT_FACE_SRC = this.createSVGDOMFunc("font-face-src") this.FONT_FACE_URI = this.createSVGDOMFunc("font-face-uri") this.FOREIGNOBJECT = this.createSVGDOMFunc("foreignObject") this.G = this.createSVGDOMFunc("g") this.GLYPH = this.createSVGDOMFunc("glyph") this.GLYPHREF = this.createSVGDOMFunc("glyphRef") this.HKERN = this.createSVGDOMFunc("hkern") this.IMAGE = this.createSVGDOMFunc("image") this.LINE = this.createSVGDOMFunc("line") this.LINEARGRADIENT = this.createSVGDOMFunc("linearGradient") this.MARKER = this.createSVGDOMFunc("marker") this.MASK = this.createSVGDOMFunc("mask") this.METADATA = this.createSVGDOMFunc("metadata") this.MISSING_GLYPH = this.createSVGDOMFunc("missing-glyph") this.MPATH = this.createSVGDOMFunc("mpath") this.PATH = this.createSVGDOMFunc("path") this.PATTERN = this.createSVGDOMFunc("pattern") this.POLYGON = this.createSVGDOMFunc("polygon") this.POLYLINE = this.createSVGDOMFunc("polyline") this.RADIALGRADIENT = this.createSVGDOMFunc("radialGradient") this.RECT = this.createSVGDOMFunc("rect") this.SCRIPT = this.createSVGDOMFunc("script") this.SET = this.createSVGDOMFunc("set") this.STOP = this.createSVGDOMFunc("stop") this.STYLE = this.createSVGDOMFunc("style") this.SVG = this.createSVGDOMFunc("svg") this.SWITCH = this.createSVGDOMFunc("switch") this.SYMBOL = this.createSVGDOMFunc("symbol") this.TEXT = this.createSVGDOMFunc("text") this.TEXTPATH = this.createSVGDOMFunc("textPath") this.TITLE = this.createSVGDOMFunc("title") this.TREF = this.createSVGDOMFunc("tref") this.TSPAN = this.createSVGDOMFunc("tspan") this.USE = this.createSVGDOMFunc("use") this.VIEW = this.createSVGDOMFunc("view") this.VKERN = this.createSVGDOMFunc("vkern") } // The following line probably isn't neccesary since I don't export anything: // MochiKit.Base._exportSymbols(this, SVGKit);
Comment change on location of convert python script git-svn-id: c6f97cf4c8acc6a30836c0e2115847d880ab0465@389 d1cfad4a-9815-0410-8b6e-f025c34e1c2d
SVGKit/SVGKit.js
Comment change on location of convert python script
<ide><path>VGKit/SVGKit.js <ide> SVGKit._svgMIME = 'image/svg+xml'; <ide> SVGKit._svgEmptyName = 'empty.svg'; <ide> SVGKit._SVGiKitBaseURI = ''; <del>SVGKit._errorText = "You can't display SVG. Download Firefox 1.5." ; <del>SVGKit._convert_url = 'http://brainflux.org/cgi-bin/convert_svg.py' // Should be customized to your own server <add>SVGKit._errorText = "You can't display SVG. Download the latest Firefox!" ; <add>SVGKit._convert_url = 'http://svgkit.sourceforge.net/cgi-bin/convert_svg.py' // Should be customized to your own server <ide> <ide> <ide> ////////////////////////////
Java
mit
930d3b53e587dfbd65cb371817f3a98d59968d05
0
podio/podio-android
/* * Copyright (C) 2014 Copyright Citrix Systems, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to * do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.podio.sdk.client; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import android.os.Handler; import android.os.Looper; import com.podio.sdk.PodioException; import com.podio.sdk.ResultListener; import com.podio.sdk.SessionListener; import com.podio.sdk.domain.Session; /** * @author László Urszuly */ public class RequestFuture<T> extends FutureTask<RestResult<T>> { private static final Handler HANDLER = new Handler(Looper.getMainLooper()); private ResultListener<? super T> resultListener; private SessionListener sessionListener; public RequestFuture(Callable<RestResult<T>> callable) { super(callable); } @Override protected void done() { reportResult(sessionListener); reportResult(resultListener); } @Override public RestResult<T> get() throws InterruptedException, ExecutionException { try { return super.get(); } catch (ExecutionException e) { if (e.getCause() instanceof PodioException) { throw (PodioException) e.getCause(); } else { throw e; } } } @Override public RestResult<T> get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { try { return super.get(timeout, unit); } catch (ExecutionException e) { if (e.getCause() instanceof PodioException) { throw (PodioException) e.getCause(); } else { throw e; } } } public RequestFuture<T> withResultListener(ResultListener<? super T> resultListener) { this.resultListener = resultListener; if (isDone()) { reportResult(resultListener); } return this; } public RequestFuture<T> withSessionListener(SessionListener sessionListener) { this.sessionListener = sessionListener; if (isDone()) { reportResult(sessionListener); } return this; } private RestResult<T> getResultNow() { // TODO: Catching the exceptions and returning null is a temporary, // quick-n-dirty fix to resolve immediate blocking issues in QA. A more // solid solution is being designed as we speak. try { return get(); } catch (InterruptedException e) { return null; // throw PodioException.fromThrowable(e); } catch (ExecutionException e) { return null; // if (e.getCause() instanceof PodioException) { // throw (PodioException) e.getCause(); // } else { // throw PodioException.fromThrowable(e); // } } } private void reportResult(final ResultListener<? super T> resultListener) { if (resultListener != null) { HANDLER.post(new Runnable() { @Override public void run() { RestResult<T> result = getResultNow(); T item = result.getItem(); resultListener.onRequestPerformed(item); } }); } } private void reportResult(final SessionListener sessionListener) { if (sessionListener != null) { HANDLER.post(new Runnable() { @Override public void run() { RestResult<T> result = getResultNow(); if (result != null && result.hasSession()) { Session session = result.getSession(); sessionListener.onSessionChanged(session); } } }); } } }
sdk/src/com/podio/sdk/client/RequestFuture.java
/* * Copyright (C) 2014 Copyright Citrix Systems, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to * do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.podio.sdk.client; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import android.os.Handler; import android.os.Looper; import com.podio.sdk.PodioException; import com.podio.sdk.ResultListener; import com.podio.sdk.SessionListener; import com.podio.sdk.domain.Session; /** * @author László Urszuly */ public class RequestFuture<T> extends FutureTask<RestResult<T>> { private static final Handler HANDLER = new Handler(Looper.getMainLooper()); private ResultListener<? super T> resultListener; private SessionListener sessionListener; public RequestFuture(Callable<RestResult<T>> callable) { super(callable); } @Override protected void done() { reportResult(sessionListener); reportResult(resultListener); } @Override public RestResult<T> get() throws InterruptedException, ExecutionException { try { return super.get(); } catch (ExecutionException e) { if (e.getCause() instanceof PodioException) { throw (PodioException) e.getCause(); } else { throw e; } } } @Override public RestResult<T> get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { try { return super.get(timeout, unit); } catch (ExecutionException e) { if (e.getCause() instanceof PodioException) { throw (PodioException) e.getCause(); } else { throw e; } } } public RequestFuture<T> withResultListener(ResultListener<? super T> resultListener) { this.resultListener = resultListener; if (isDone()) { reportResult(resultListener); } return this; } public RequestFuture<T> withSessionListener(SessionListener sessionListener) { this.sessionListener = sessionListener; if (isDone()) { reportResult(sessionListener); } return this; } private RestResult<T> getResultNow() { try { return get(); } catch (InterruptedException e) { throw PodioException.fromThrowable(e); } catch (ExecutionException e) { if (e.getCause() instanceof PodioException) { throw (PodioException) e.getCause(); } else { throw PodioException.fromThrowable(e); } } } private void reportResult(final ResultListener<? super T> resultListener) { if (resultListener != null) { HANDLER.post(new Runnable() { @Override public void run() { RestResult<T> result = getResultNow(); T item = result.getItem(); resultListener.onRequestPerformed(item); } }); } } private void reportResult(final SessionListener sessionListener) { if (sessionListener != null) { HANDLER.post(new Runnable() { @Override public void run() { RestResult<T> result = getResultNow(); if (result != null && result.hasSession()) { Session session = result.getSession(); sessionListener.onSessionChanged(session); } } }); } } }
Temporary quick-n-dirty fix for unhandled exception issue. Currently the SDK delivers all and every failures through an Exception. The "crash" described in the related issue is what seems to be the client failing in handeling such a failure. It is, hence, not a crash as in a malfunctioning SDK, but rather a result of an incomplete implementation in the client. Fixes: #2983
sdk/src/com/podio/sdk/client/RequestFuture.java
Temporary quick-n-dirty fix for unhandled exception issue.
<ide><path>dk/src/com/podio/sdk/client/RequestFuture.java <ide> } <ide> <ide> private RestResult<T> getResultNow() { <add> // TODO: Catching the exceptions and returning null is a temporary, <add> // quick-n-dirty fix to resolve immediate blocking issues in QA. A more <add> // solid solution is being designed as we speak. <ide> try { <ide> return get(); <ide> } catch (InterruptedException e) { <del> throw PodioException.fromThrowable(e); <add> return null; <add> // throw PodioException.fromThrowable(e); <ide> } catch (ExecutionException e) { <del> if (e.getCause() instanceof PodioException) { <del> throw (PodioException) e.getCause(); <del> } else { <del> throw PodioException.fromThrowable(e); <del> } <add> return null; <add> // if (e.getCause() instanceof PodioException) { <add> // throw (PodioException) e.getCause(); <add> // } else { <add> // throw PodioException.fromThrowable(e); <add> // } <ide> } <ide> } <ide>
Java
apache-2.0
18b811aac4313e1ac67dab4048695bd0cf9e40ca
0
apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro,apache/avro
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.io.parsing; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.avro.AvroTypeException; import org.apache.avro.Resolver; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.util.internal.Accessor; import org.apache.avro.util.internal.Accessor.ResolvingGrammarGeneratorAccessor; import com.fasterxml.jackson.databind.JsonNode; /** * The class that generates a resolving grammar to resolve between two schemas. */ public class ResolvingGrammarGenerator extends ValidatingGrammarGenerator { static { Accessor.setAccessor(new ResolvingGrammarGeneratorAccessor() { @Override protected void encode(Encoder e, Schema s, JsonNode n) throws IOException { ResolvingGrammarGenerator.encode(e, s, n); } }); } /** * Resolves the writer schema <tt>writer</tt> and the reader schema * <tt>reader</tt> and returns the start symbol for the grammar generated. * * @param writer The schema used by the writer * @param reader The schema used by the reader * @return The start symbol for the resolving grammar * @throws IOException */ public final Symbol generate(Schema writer, Schema reader) throws IOException { Resolver.Action r = Resolver.resolve(writer, reader); return Symbol.root(generate(r, new HashMap<>())); } /** * Takes a {@link Resolver.Action} for resolving two schemas and returns the * start symbol for a grammar that implements that resolution. If the action is * for a record and there's already a symbol for that record in <tt>seen</tt>, * then that symbol is returned. Otherwise a new symbol is generated and * returned. * * @param action The resolver to be implemented * @param seen The &lt;Action&gt; to symbol map of start symbols of resolving * grammars so far. * @return The start symbol for the resolving grammar * @throws IOException */ private Symbol generate(Resolver.Action action, Map<Object, Symbol> seen) throws IOException { if (action instanceof Resolver.DoNothing) { return simpleGen(action.writer, seen); } else if (action instanceof Resolver.ErrorAction) { return Symbol.error(action.toString()); } else if (action instanceof Resolver.Skip) { return Symbol.skipAction(simpleGen(action.writer, seen)); } else if (action instanceof Resolver.Promote) { return Symbol.resolve(simpleGen(action.writer, seen), simpleGen(action.reader, seen)); } else if (action instanceof Resolver.ReaderUnion) { Resolver.ReaderUnion ru = (Resolver.ReaderUnion) action; Symbol s = generate(ru.actualAction, seen); return Symbol.seq(Symbol.unionAdjustAction(ru.firstMatch, s), Symbol.UNION); } else if (action.writer.getType() == Schema.Type.ARRAY) { Symbol es = generate(((Resolver.Container) action).elementAction, seen); return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END, es), Symbol.ARRAY_START); } else if (action.writer.getType() == Schema.Type.MAP) { Symbol es = generate(((Resolver.Container) action).elementAction, seen); return Symbol.seq(Symbol.repeat(Symbol.MAP_END, es, Symbol.STRING), Symbol.MAP_START); } else if (action.writer.getType() == Schema.Type.UNION) { if (((Resolver.WriterUnion) action).unionEquiv) { return simpleGen(action.reader, seen); } Resolver.Action[] branches = ((Resolver.WriterUnion) action).actions; Symbol[] symbols = new Symbol[branches.length]; String[] labels = new String[branches.length]; int i = 0; for (Resolver.Action branch : branches) { symbols[i] = generate(branch, seen); labels[i] = action.writer.getTypes().get(i).getFullName(); i++; } return Symbol.seq(Symbol.alt(symbols, labels), Symbol.WRITER_UNION_ACTION); } else if (action instanceof Resolver.EnumAdjust) { Resolver.EnumAdjust e = (Resolver.EnumAdjust) action; Object[] adjs = new Object[e.adjustments.length]; for (int i = 0; i < adjs.length; i++) { adjs[i] = (0 <= e.adjustments[i] ? new Integer(e.adjustments[i]) : "No match for " + e.writer.getEnumSymbols().get(i)); } return Symbol.seq(Symbol.enumAdjustAction(e.reader.getEnumSymbols().size(), adjs), Symbol.ENUM); } else if (action instanceof Resolver.RecordAdjust) { Symbol result = seen.get(action); if (result == null) { final Resolver.RecordAdjust ra = (Resolver.RecordAdjust) action; int defaultCount = ra.readerOrder.length - ra.firstDefault; int count = 1 + ra.fieldActions.length + 3 * defaultCount; final Symbol[] production = new Symbol[count]; result = Symbol.seq(production); seen.put(action, result); production[--count] = Symbol.fieldOrderAction(ra.readerOrder); final Resolver.Action[] actions = ra.fieldActions; for (Resolver.Action wfa : actions) { production[--count] = generate(wfa, seen); } for (int i = ra.firstDefault; i < ra.readerOrder.length; i++) { final Schema.Field rf = ra.readerOrder[i]; byte[] bb = getBinary(rf.schema(), Accessor.defaultValue(rf)); production[--count] = Symbol.defaultStartAction(bb); production[--count] = simpleGen(rf.schema(), seen); production[--count] = Symbol.DEFAULT_END_ACTION; } } return result; } throw new IllegalArgumentException("Unrecognized Resolver.Action: " + action); } private Symbol simpleGen(Schema s, Map<Object, Symbol> seen) { switch (s.getType()) { case NULL: return Symbol.NULL; case BOOLEAN: return Symbol.BOOLEAN; case INT: return Symbol.INT; case LONG: return Symbol.LONG; case FLOAT: return Symbol.FLOAT; case DOUBLE: return Symbol.DOUBLE; case BYTES: return Symbol.BYTES; case STRING: return Symbol.STRING; case FIXED: return Symbol.seq(Symbol.intCheckAction(s.getFixedSize()), Symbol.FIXED); case ENUM: return Symbol.seq(Symbol.enumAdjustAction(s.getEnumSymbols().size(), null), Symbol.ENUM); case ARRAY: return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END, simpleGen(s.getElementType(), seen)), Symbol.ARRAY_START); case MAP: return Symbol.seq(Symbol.repeat(Symbol.MAP_END, simpleGen(s.getValueType(), seen), Symbol.STRING), Symbol.MAP_START); case UNION: { final List<Schema> subs = s.getTypes(); final Symbol[] symbols = new Symbol[subs.size()]; final String[] labels = new String[subs.size()]; int i = 0; for (Schema b : s.getTypes()) { symbols[i] = simpleGen(b, seen); labels[i++] = b.getFullName(); } return Symbol.seq(Symbol.alt(symbols, labels), Symbol.UNION); } case RECORD: { Symbol result = seen.get(s); if (result == null) { final Symbol[] production = new Symbol[s.getFields().size() + 1]; result = Symbol.seq(production); seen.put(s, result); int i = production.length; production[--i] = Symbol.fieldOrderAction(s.getFields().toArray(new Schema.Field[0])); for (Field f : s.getFields()) { production[--i] = simpleGen(f.schema(), seen); } // FieldOrderAction is needed even though the field-order hasn't changed, // because the _reader_ doesn't know the field order hasn't changed, and // thus it will probably call {@ ResolvingDecoder.fieldOrder} to find out. } return result; } default: throw new IllegalArgumentException("Unexpected schema: " + s); } } private static EncoderFactory factory = new EncoderFactory().configureBufferSize(32); /** * Returns the Avro binary encoded version of <tt>n</tt> according to the schema * <tt>s</tt>. * * @param s The schema for encoding * @param n The Json node that has the value to be encoded. * @return The binary encoded version of <tt>n</tt>. * @throws IOException */ private static byte[] getBinary(Schema s, JsonNode n) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); Encoder e = factory.binaryEncoder(out, null); encode(e, s, n); e.flush(); return out.toByteArray(); } /** * Encodes the given Json node <tt>n</tt> on to the encoder <tt>e</tt> according * to the schema <tt>s</tt>. * * @param e The encoder to encode into. * @param s The schema for the object being encoded. * @param n The Json node to encode. * @throws IOException */ public static void encode(Encoder e, Schema s, JsonNode n) throws IOException { switch (s.getType()) { case RECORD: for (Field f : s.getFields()) { String name = f.name(); JsonNode v = n.get(name); if (v == null) { v = Accessor.defaultValue(f); } if (v == null) { throw new AvroTypeException("No default value for: " + name); } encode(e, f.schema(), v); } break; case ENUM: e.writeEnum(s.getEnumOrdinal(n.textValue())); break; case ARRAY: e.writeArrayStart(); e.setItemCount(n.size()); Schema i = s.getElementType(); for (JsonNode node : n) { e.startItem(); encode(e, i, node); } e.writeArrayEnd(); break; case MAP: e.writeMapStart(); e.setItemCount(n.size()); Schema v = s.getValueType(); for (Iterator<String> it = n.fieldNames(); it.hasNext();) { e.startItem(); String key = it.next(); e.writeString(key); encode(e, v, n.get(key)); } e.writeMapEnd(); break; case UNION: e.writeIndex(0); encode(e, s.getTypes().get(0), n); break; case FIXED: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for fixed: " + n); byte[] bb = n.textValue().getBytes(StandardCharsets.ISO_8859_1); if (bb.length != s.getFixedSize()) { bb = Arrays.copyOf(bb, s.getFixedSize()); } e.writeFixed(bb); break; case STRING: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for string: " + n); e.writeString(n.textValue()); break; case BYTES: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for bytes: " + n); e.writeBytes(n.textValue().getBytes(StandardCharsets.ISO_8859_1)); break; case INT: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for int: " + n); e.writeInt(n.intValue()); break; case LONG: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for long: " + n); e.writeLong(n.longValue()); break; case FLOAT: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for float: " + n); e.writeFloat((float) n.doubleValue()); break; case DOUBLE: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for double: " + n); e.writeDouble(n.doubleValue()); break; case BOOLEAN: if (!n.isBoolean()) throw new AvroTypeException("Non-boolean default for boolean: " + n); e.writeBoolean(n.booleanValue()); break; case NULL: if (!n.isNull()) throw new AvroTypeException("Non-null default value for null type: " + n); e.writeNull(); break; } } }
lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.io.parsing; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.avro.AvroTypeException; import org.apache.avro.Resolver; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.util.internal.Accessor; import org.apache.avro.util.internal.Accessor.ResolvingGrammarGeneratorAccessor; import com.fasterxml.jackson.databind.JsonNode; /** * The class that generates a resolving grammar to resolve between two schemas. */ public class ResolvingGrammarGenerator extends ValidatingGrammarGenerator { static { Accessor.setAccessor(new ResolvingGrammarGeneratorAccessor() { @Override protected void encode(Encoder e, Schema s, JsonNode n) throws IOException { ResolvingGrammarGenerator.encode(e, s, n); } }); } /** * Resolves the writer schema <tt>writer</tt> and the reader schema * <tt>reader</tt> and returns the start symbol for the grammar generated. * * @param writer The schema used by the writer * @param reader The schema used by the reader * @return The start symbol for the resolving grammar * @throws IOException */ public final Symbol generate(Schema writer, Schema reader) throws IOException { Resolver.Action r = Resolver.resolve(writer, reader); return Symbol.root(generate(r, new HashMap<>())); } /** * Takes a {@link Resolver.Action} for resolving two schemas and returns the * start symbol for a grammar that implements that resolution. If the action is * for a record and there's already a symbol for that record in <tt>seen</tt>, * then that symbol is returned. Otherwise a new symbol is generated and * returned. * * @param action The resolver to be implemented * @param seen The &lt;Action&gt; to symbol map of start symbols of resolving * grammars so far. * @return The start symbol for the resolving grammar * @throws IOException */ private Symbol generate(Resolver.Action action, Map<Object, Symbol> seen) throws IOException { if (action instanceof Resolver.DoNothing) { return simpleGen(action.writer, seen); } else if (action instanceof Resolver.ErrorAction) { return Symbol.error(action.toString()); } else if (action instanceof Resolver.Skip) { return Symbol.skipAction(simpleGen(action.writer, seen)); } else if (action instanceof Resolver.Promote) { return Symbol.resolve(simpleGen(action.writer, seen), simpleGen(action.reader, seen)); } else if (action instanceof Resolver.ReaderUnion) { Resolver.ReaderUnion ru = (Resolver.ReaderUnion) action; Symbol s = generate(ru.actualAction, seen); return Symbol.seq(Symbol.unionAdjustAction(ru.firstMatch, s), Symbol.UNION); } else if (action.writer.getType() == Schema.Type.ARRAY) { Symbol es = generate(((Resolver.Container) action).elementAction, seen); return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END, es), Symbol.ARRAY_START); } else if (action.writer.getType() == Schema.Type.MAP) { Symbol es = generate(((Resolver.Container) action).elementAction, seen); return Symbol.seq(Symbol.repeat(Symbol.MAP_END, es, Symbol.STRING), Symbol.MAP_START); } else if (action.writer.getType() == Schema.Type.UNION) { if (((Resolver.WriterUnion) action).unionEquiv) { return simpleGen(action.writer, seen); } Resolver.Action[] branches = ((Resolver.WriterUnion) action).actions; Symbol[] symbols = new Symbol[branches.length]; String[] labels = new String[branches.length]; int i = 0; for (Resolver.Action branch : branches) { symbols[i] = generate(branch, seen); labels[i] = action.writer.getTypes().get(i).getFullName(); i++; } return Symbol.seq(Symbol.alt(symbols, labels), Symbol.WRITER_UNION_ACTION); } else if (action instanceof Resolver.EnumAdjust) { Resolver.EnumAdjust e = (Resolver.EnumAdjust) action; Object[] adjs = new Object[e.adjustments.length]; for (int i = 0; i < adjs.length; i++) { adjs[i] = (0 <= e.adjustments[i] ? new Integer(e.adjustments[i]) : "No match for " + e.writer.getEnumSymbols().get(i)); } return Symbol.seq(Symbol.enumAdjustAction(e.reader.getEnumSymbols().size(), adjs), Symbol.ENUM); } else if (action instanceof Resolver.RecordAdjust) { Symbol result = seen.get(action); if (result == null) { final Resolver.RecordAdjust ra = (Resolver.RecordAdjust) action; int defaultCount = ra.readerOrder.length - ra.firstDefault; int count = 1 + ra.fieldActions.length + 3 * defaultCount; final Symbol[] production = new Symbol[count]; result = Symbol.seq(production); seen.put(action, result); production[--count] = Symbol.fieldOrderAction(ra.readerOrder); final Resolver.Action[] actions = ra.fieldActions; for (Resolver.Action wfa : actions) { production[--count] = generate(wfa, seen); } for (int i = ra.firstDefault; i < ra.readerOrder.length; i++) { final Schema.Field rf = ra.readerOrder[i]; byte[] bb = getBinary(rf.schema(), Accessor.defaultValue(rf)); production[--count] = Symbol.defaultStartAction(bb); production[--count] = simpleGen(rf.schema(), seen); production[--count] = Symbol.DEFAULT_END_ACTION; } } return result; } throw new IllegalArgumentException("Unrecognized Resolver.Action: " + action); } private Symbol simpleGen(Schema s, Map<Object, Symbol> seen) { switch (s.getType()) { case NULL: return Symbol.NULL; case BOOLEAN: return Symbol.BOOLEAN; case INT: return Symbol.INT; case LONG: return Symbol.LONG; case FLOAT: return Symbol.FLOAT; case DOUBLE: return Symbol.DOUBLE; case BYTES: return Symbol.BYTES; case STRING: return Symbol.STRING; case FIXED: return Symbol.seq(Symbol.intCheckAction(s.getFixedSize()), Symbol.FIXED); case ENUM: return Symbol.seq(Symbol.enumAdjustAction(s.getEnumSymbols().size(), null), Symbol.ENUM); case ARRAY: return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END, simpleGen(s.getElementType(), seen)), Symbol.ARRAY_START); case MAP: return Symbol.seq(Symbol.repeat(Symbol.MAP_END, simpleGen(s.getValueType(), seen), Symbol.STRING), Symbol.MAP_START); case UNION: { final List<Schema> subs = s.getTypes(); final Symbol[] symbols = new Symbol[subs.size()]; final String[] labels = new String[subs.size()]; int i = 0; for (Schema b : s.getTypes()) { symbols[i] = simpleGen(b, seen); labels[i++] = b.getFullName(); } return Symbol.seq(Symbol.alt(symbols, labels), Symbol.UNION); } case RECORD: { Symbol result = seen.get(s); if (result == null) { final Symbol[] production = new Symbol[s.getFields().size() + 1]; result = Symbol.seq(production); seen.put(s, result); int i = production.length; production[--i] = Symbol.fieldOrderAction(s.getFields().toArray(new Schema.Field[0])); for (Field f : s.getFields()) { production[--i] = simpleGen(f.schema(), seen); } // FieldOrderAction is needed even though the field-order hasn't changed, // because the _reader_ doesn't know the field order hasn't changed, and // thus it will probably call {@ ResolvingDecoder.fieldOrder} to find out. } return result; } default: throw new IllegalArgumentException("Unexpected schema: " + s); } } private static EncoderFactory factory = new EncoderFactory().configureBufferSize(32); /** * Returns the Avro binary encoded version of <tt>n</tt> according to the schema * <tt>s</tt>. * * @param s The schema for encoding * @param n The Json node that has the value to be encoded. * @return The binary encoded version of <tt>n</tt>. * @throws IOException */ private static byte[] getBinary(Schema s, JsonNode n) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); Encoder e = factory.binaryEncoder(out, null); encode(e, s, n); e.flush(); return out.toByteArray(); } /** * Encodes the given Json node <tt>n</tt> on to the encoder <tt>e</tt> according * to the schema <tt>s</tt>. * * @param e The encoder to encode into. * @param s The schema for the object being encoded. * @param n The Json node to encode. * @throws IOException */ public static void encode(Encoder e, Schema s, JsonNode n) throws IOException { switch (s.getType()) { case RECORD: for (Field f : s.getFields()) { String name = f.name(); JsonNode v = n.get(name); if (v == null) { v = Accessor.defaultValue(f); } if (v == null) { throw new AvroTypeException("No default value for: " + name); } encode(e, f.schema(), v); } break; case ENUM: e.writeEnum(s.getEnumOrdinal(n.textValue())); break; case ARRAY: e.writeArrayStart(); e.setItemCount(n.size()); Schema i = s.getElementType(); for (JsonNode node : n) { e.startItem(); encode(e, i, node); } e.writeArrayEnd(); break; case MAP: e.writeMapStart(); e.setItemCount(n.size()); Schema v = s.getValueType(); for (Iterator<String> it = n.fieldNames(); it.hasNext();) { e.startItem(); String key = it.next(); e.writeString(key); encode(e, v, n.get(key)); } e.writeMapEnd(); break; case UNION: e.writeIndex(0); encode(e, s.getTypes().get(0), n); break; case FIXED: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for fixed: " + n); byte[] bb = n.textValue().getBytes(StandardCharsets.ISO_8859_1); if (bb.length != s.getFixedSize()) { bb = Arrays.copyOf(bb, s.getFixedSize()); } e.writeFixed(bb); break; case STRING: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for string: " + n); e.writeString(n.textValue()); break; case BYTES: if (!n.isTextual()) throw new AvroTypeException("Non-string default value for bytes: " + n); e.writeBytes(n.textValue().getBytes(StandardCharsets.ISO_8859_1)); break; case INT: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for int: " + n); e.writeInt(n.intValue()); break; case LONG: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for long: " + n); e.writeLong(n.longValue()); break; case FLOAT: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for float: " + n); e.writeFloat((float) n.doubleValue()); break; case DOUBLE: if (!n.isNumber()) throw new AvroTypeException("Non-numeric default value for double: " + n); e.writeDouble(n.doubleValue()); break; case BOOLEAN: if (!n.isBoolean()) throw new AvroTypeException("Non-boolean default for boolean: " + n); e.writeBoolean(n.booleanValue()); break; case NULL: if (!n.isNull()) throw new AvroTypeException("Non-null default value for null type: " + n); e.writeNull(); break; } } }
AVRO-2702: ResolvingGrammarGenerator Union to use reader schema instead of writer schema
lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
AVRO-2702: ResolvingGrammarGenerator Union to use reader schema instead of writer schema
<ide><path>ang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java <ide> <ide> } else if (action.writer.getType() == Schema.Type.UNION) { <ide> if (((Resolver.WriterUnion) action).unionEquiv) { <del> return simpleGen(action.writer, seen); <add> return simpleGen(action.reader, seen); <ide> } <ide> Resolver.Action[] branches = ((Resolver.WriterUnion) action).actions; <ide> Symbol[] symbols = new Symbol[branches.length];
Java
apache-2.0
e10cefb9793c50535e887dda3c844fd17d5df882
0
lefloh/ozark,lefloh/ozark,Daniel-Dos/ozark,mvc-spec/ozark,mvc-spec/ozark,dmaidaniuk/ozark,lefloh/ozark,Daniel-Dos/ozark,chkal/ozark,chkal/ozark,dmaidaniuk/ozark,dmaidaniuk/ozark,chkal/ozark,dmaidaniuk/ozark,mvc-spec/ozark,Daniel-Dos/ozark,dmaidaniuk/ozark
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2014-2015 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * http://glassfish.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.oracle.ozark.sample; import javax.ws.rs.ApplicationPath; import javax.ws.rs.core.Application; import java.util.HashSet; import java.util.Set; /** * Class MyApplication. * * @author Santiago Pericas-Geertsen */ @ApplicationPath("resources") public class MyApplication extends Application { @Override public Set<Class<?>> getClasses() { final Set<Class<?>> set = new HashSet<>(); set.add(BookController.class); return set; } }
test/book-cdi/src/main/java/com/oracle/ozark/sample/MyApplication.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2014-2015 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * http://glassfish.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.oracle.ozark.sample; import javax.ws.rs.ApplicationPath; import javax.ws.rs.core.Application; import java.util.HashSet; import java.util.Set; /** * Class MyApplication. * * @author Santiago Pericas-Geertsen */ @ApplicationPath("resources") public class MyApplication extends Application { @Override public Set<Class<?>> getClasses() { final Set<Class<?>> set = new HashSet<>(); set.add(BookController.class); set.add(BookController.class); return set; } }
Removed repeated line.
test/book-cdi/src/main/java/com/oracle/ozark/sample/MyApplication.java
Removed repeated line.
<ide><path>est/book-cdi/src/main/java/com/oracle/ozark/sample/MyApplication.java <ide> public Set<Class<?>> getClasses() { <ide> final Set<Class<?>> set = new HashSet<>(); <ide> set.add(BookController.class); <del> set.add(BookController.class); <ide> return set; <ide> } <ide> }
Java
apache-2.0
a635f9fc85d86c714ddfa860cfc96c180083671c
0
APriestman/autopsy,rcordovano/autopsy,maxrp/autopsy,wschaeferB/autopsy,eXcomm/autopsy,dgrove727/autopsy,rcordovano/autopsy,sidheshenator/autopsy,rcordovano/autopsy,karlmortensen/autopsy,sidheshenator/autopsy,rcordovano/autopsy,narfindustries/autopsy,esaunders/autopsy,mhmdfy/autopsy,millmanorama/autopsy,eXcomm/autopsy,karlmortensen/autopsy,dgrove727/autopsy,esaunders/autopsy,millmanorama/autopsy,wschaeferB/autopsy,APriestman/autopsy,karlmortensen/autopsy,narfindustries/autopsy,wschaeferB/autopsy,APriestman/autopsy,millmanorama/autopsy,esaunders/autopsy,sidheshenator/autopsy,APriestman/autopsy,maxrp/autopsy,APriestman/autopsy,narfindustries/autopsy,millmanorama/autopsy,sidheshenator/autopsy,eXcomm/autopsy,esaunders/autopsy,eXcomm/autopsy,wschaeferB/autopsy,mhmdfy/autopsy,rcordovano/autopsy,APriestman/autopsy,karlmortensen/autopsy,maxrp/autopsy,maxrp/autopsy,rcordovano/autopsy,esaunders/autopsy,dgrove727/autopsy,mhmdfy/autopsy,mhmdfy/autopsy,wschaeferB/autopsy,APriestman/autopsy
/* * Autopsy Forensic Browser * * Copyright 2013 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.report; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.HashSet; import java.util.logging.Level; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.SwingWorker; import org.openide.filesystems.FileUtil; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.report.ReportProgressPanel.ReportStatus; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class use GeneralReportModules, TableReportModules and * FileReportModules to generate a report. If desired, displayProgressPanels() * can be called to show report generation progress using ReportProgressPanel * objects displayed using a dialog box. */ public class ReportGenerator { private static final Logger logger = Logger.getLogger(ReportGenerator.class.getName()); private Case currentCase = Case.getCurrentCase(); private SleuthkitCase skCase = currentCase.getSleuthkitCase(); private Map<TableReportModule, ReportProgressPanel> tableProgress; private Map<GeneralReportModule, ReportProgressPanel> generalProgress; private Map<FileReportModule, ReportProgressPanel> fileProgress; private String reportPath; private ReportGenerationPanel panel = new ReportGenerationPanel(); static final String REPORTS_DIR = "Reports"; ReportGenerator(Map<TableReportModule, Boolean> tableModuleStates, Map<GeneralReportModule, Boolean> generalModuleStates, Map<FileReportModule, Boolean> fileListModuleStates) { // Create the root reports directory path of the form: <CASE DIRECTORY>/Reports/<Case name> <Timestamp>/ DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); Date date = new Date(); String dateNoTime = dateFormat.format(date); this.reportPath = currentCase.getCaseDirectory() + File.separator + REPORTS_DIR + File.separator + currentCase.getName() + " " + dateNoTime + File.separator; // Create the root reports directory. try { FileUtil.createFolder(new File(this.reportPath)); } catch (IOException ex) { logger.log(Level.SEVERE, "Failed to make report folder, may be unable to generate reports.", ex); } // Initialize the progress panels generalProgress = new HashMap<>(); tableProgress = new HashMap<>(); fileProgress = new HashMap<>(); setupProgressPanels(tableModuleStates, generalModuleStates, fileListModuleStates); } /** * Create a ReportProgressPanel for each report generation module selected by the user. * * @param tableModuleStates The enabled/disabled state of each TableReportModule * @param generalModuleStates The enabled/disabled state of each GeneralReportModule * @param fileListModuleStates The enabled/disabled state of each FileReportModule */ private void setupProgressPanels(Map<TableReportModule, Boolean> tableModuleStates, Map<GeneralReportModule, Boolean> generalModuleStates, Map<FileReportModule, Boolean> fileListModuleStates) { if (null != tableModuleStates) { for (Entry<TableReportModule, Boolean> entry : tableModuleStates.entrySet()) { if (entry.getValue()) { TableReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { tableProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { tableProgress.put(module, panel.addReport(module.getName(), null)); } } } } if (null != generalModuleStates) { for (Entry<GeneralReportModule, Boolean> entry : generalModuleStates.entrySet()) { if (entry.getValue()) { GeneralReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { generalProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { generalProgress.put(module, panel.addReport(module.getName(), null)); } } } } if (null != fileListModuleStates) { for(Entry<FileReportModule, Boolean> entry : fileListModuleStates.entrySet()) { if (entry.getValue()) { FileReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { fileProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { fileProgress.put(module, panel.addReport(module.getName(), null)); } } } } } /** * Display the progress panels to the user, and add actions to close the parent dialog. */ public void displayProgressPanels() { final JDialog dialog = new JDialog(new JFrame(), true); dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); dialog.setTitle("Report Generation Progress..."); dialog.add(this.panel); dialog.pack(); panel.addCloseAction(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dialog.dispose(); } }); dialog.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { panel.close(); } }); Dimension screenDimension = Toolkit.getDefaultToolkit().getScreenSize(); int w = dialog.getSize().width; int h = dialog.getSize().height; // set the location of the popUp Window on the center of the screen dialog.setLocation((screenDimension.width - w) / 2, (screenDimension.height - h) / 2); dialog.setVisible(true); } /** * Run the GeneralReportModules using a SwingWorker. */ public void generateGeneralReports() { GeneralReportsWorker worker = new GeneralReportsWorker(); worker.execute(); } /** * Run the TableReportModules using a SwingWorker. * * @param artifactTypeSelections the enabled/disabled state of the artifact types to be included in the report * @param tagSelections the enabled/disabled state of the tag names to be included in the report */ public void generateBlackboardArtifactsReports(Map<ARTIFACT_TYPE, Boolean> artifactTypeSelections, Map<String, Boolean> tagNameSelections) { if (!tableProgress.isEmpty() && null != artifactTypeSelections) { TableReportsWorker worker = new TableReportsWorker(artifactTypeSelections, tagNameSelections); worker.execute(); } } /** * Run the FileReportModules using a SwingWorker. * * @param enabledInfo the Information that should be included about each file * in the report. */ public void generateFileListReports(Map<FileReportDataTypes, Boolean> enabledInfo) { if (!fileProgress.isEmpty() && null != enabledInfo) { List<FileReportDataTypes> enabled = new ArrayList<>(); for (Entry<FileReportDataTypes, Boolean> e : enabledInfo.entrySet()) { if(e.getValue()) { enabled.add(e.getKey()); } } FileReportsWorker worker = new FileReportsWorker(enabled); worker.execute(); } } /** * SwingWorker to run GeneralReportModules. */ private class GeneralReportsWorker extends SwingWorker<Integer, Integer> { @Override protected Integer doInBackground() throws Exception { for (Entry<GeneralReportModule, ReportProgressPanel> entry : generalProgress.entrySet()) { GeneralReportModule module = entry.getKey(); if (generalProgress.get(module).getStatus() != ReportStatus.CANCELED) { module.generateReport(reportPath, generalProgress.get(module)); } } return 0; } } /** * SwingWorker to run FileReportModules. */ private class FileReportsWorker extends SwingWorker<Integer, Integer> { private List<FileReportDataTypes> enabledInfo = Arrays.asList(FileReportDataTypes.values()); private List<FileReportModule> fileModules = new ArrayList<>(); FileReportsWorker(List<FileReportDataTypes> enabled) { enabledInfo = enabled; for (Entry<FileReportModule, ReportProgressPanel> entry : fileProgress.entrySet()) { fileModules.add(entry.getKey()); } } @Override protected Integer doInBackground() throws Exception { for (FileReportModule module : fileModules) { ReportProgressPanel progress = fileProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { progress.start(); progress.updateStatusLabel("Querying database..."); } } List<AbstractFile> files = getFiles(); int numFiles = files.size(); for (FileReportModule module : fileModules) { module.startReport(reportPath); module.startTable(enabledInfo); fileProgress.get(module).setIndeterminate(false); fileProgress.get(module).setMaximumProgress(numFiles); } int i = 0; // Add files to report. for (AbstractFile file : files) { // Check to see if any reports have been cancelled. if (fileModules.isEmpty()) { break; } // Remove cancelled reports, add files to report otherwise. Iterator<FileReportModule> iter = fileModules.iterator(); while (iter.hasNext()) { FileReportModule module = iter.next(); ReportProgressPanel progress = fileProgress.get(module); if (progress.getStatus() == ReportStatus.CANCELED) { iter.remove(); } else { module.addRow(file, enabledInfo); progress.increment(); } if ((i % 100) == 0) { progress.updateStatusLabel("Now processing " + file.getName()); } } i++; } for (FileReportModule module : fileModules) { module.endTable(); module.endReport(); fileProgress.get(module).complete(); } return 0; } /** * Get all files in the image. * @return */ private List<AbstractFile> getFiles() { List<AbstractFile> absFiles; try { SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); absFiles = skCase.findAllFilesWhere("NOT meta_type = 2"); return absFiles; } catch (TskCoreException ex) { // TODO return Collections.EMPTY_LIST; } } } /** * SwingWorker to run TableReportModules to report on blackboard artifacts, * content tags, and blackboard artifact tags. */ private class TableReportsWorker extends SwingWorker<Integer, Integer> { private List<TableReportModule> tableModules = new ArrayList<>(); private List<ARTIFACT_TYPE> artifactTypes = new ArrayList<>(); private HashSet<String> tagNamesFilter = new HashSet<>(); TableReportsWorker(Map<ARTIFACT_TYPE, Boolean> artifactTypeSelections, Map<String, Boolean> tagNameSelections) { // Get the report modules selected by the user. for (Entry<TableReportModule, ReportProgressPanel> entry : tableProgress.entrySet()) { tableModules.add(entry.getKey()); } // Get the artifact types selected by the user. for (Entry<ARTIFACT_TYPE, Boolean> entry : artifactTypeSelections.entrySet()) { if (entry.getValue()) { artifactTypes.add(entry.getKey()); } } // Get the tag names selected by the user and make a tag names filter. if (null != tagNameSelections) { for (Entry<String, Boolean> entry : tagNameSelections.entrySet()) { if (entry.getValue() == true) { tagNamesFilter.add(entry.getKey()); } } } } @Override protected Integer doInBackground() throws Exception { // Start the progress indicators for each active TableReportModule. for (TableReportModule module : tableModules) { ReportProgressPanel progress = tableProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { module.startReport(reportPath); progress.start(); progress.setIndeterminate(false); progress.setMaximumProgress(ARTIFACT_TYPE.values().length + 2); // +2 for content and blackboard artifact tags } } makeBlackboardArtifactTables(); makeContentTagsTables(); makeBlackboardArtifactTagsTables(); for (TableReportModule module : tableModules) { tableProgress.get(module).complete(); module.endReport(); } return 0; } private void makeBlackboardArtifactTables() { // Make a comment string describing the tag names filter in effect. StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes results tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } // Add a table to the report for every enabled blackboard artifact type. for (ARTIFACT_TYPE type : artifactTypes) { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } for (TableReportModule module : tableModules) { tableProgress.get(module).updateStatusLabel("Now processing " + type.getDisplayName() + "..."); } // Keyword hits and hashset hit artifacts get sepcial handling. if (type.equals(ARTIFACT_TYPE.TSK_KEYWORD_HIT)) { writeKeywordHits(tableModules, comment.toString(), tagNamesFilter); continue; } else if (type.equals(ARTIFACT_TYPE.TSK_HASHSET_HIT)) { writeHashsetHits(tableModules, comment.toString(), tagNamesFilter); continue; } List<ArtifactData> unsortedArtifacts = getFilteredArtifacts(type, tagNamesFilter); if (unsortedArtifacts.isEmpty()) { continue; } // The most efficient way to sort all the Artifacts is to add them to a List, and then // sort that List based off a Comparator. Adding to a TreeMap/Set/List sorts the list // each time an element is added, which adds unnecessary overhead if we only need it sorted once. Collections.sort(unsortedArtifacts); // Get the column headers appropriate for the artifact type. /* @@@ BC: Seems like a better design here would be to have a method that * takes in the artifact as an argument and returns the attributes. We then use that * to make the headers and to make each row afterwards so that we don't have artifact-specific * logic in both getArtifactTableCoumnHeaders and ArtifactData.getRow() */ List<String> columnHeaders = getArtifactTableColumnHeaders(type.getTypeID()); if (columnHeaders == null) { // @@@ Hack to prevent system from hanging. Better solution is to merge all attributes into a single column or analyze the artifacts to find out how many are needed. MessageNotifyUtil.Notify.show("Skipping artifact type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); continue; } for (TableReportModule module : tableModules) { module.startDataType(type.getDisplayName(), comment.toString()); module.startTable(columnHeaders); } boolean msgSent = false; for(ArtifactData artifactData : unsortedArtifacts) { // Add the row data to all of the reports. for (TableReportModule module : tableModules) { // Get the row data for this type of artifact. List<String> rowData = artifactData.getRow(); if (rowData.isEmpty()) { if (msgSent == false) { MessageNotifyUtil.Notify.show("Skipping artifact rows for type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); msgSent = true; } continue; } module.addRow(rowData); } } // Finish up this data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } } private void makeContentTagsTables() { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } // Get the content tags. List<ContentTag> tags; try { tags = Case.getCurrentCase().getServices().getTagsManager().getAllContentTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get content tags", ex); return; } // Tell the modules reporting on content tags is beginning. for (TableReportModule module : tableModules) { // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. // @@@ Alos Using the obsolete ARTIFACT_TYPE.TSK_TAG_FILE is also an expedient hack. tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName() + "..."); ArrayList<String> columnHeaders = new ArrayList<>(Arrays.asList("File", "Tag", "Comment")); StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes file tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } if (module instanceof ReportHTML) { ReportHTML htmlReportModule = (ReportHTML)module; htmlReportModule.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); htmlReportModule.startContentTagsTable(columnHeaders); } else { module.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); module.startTable(columnHeaders); } } // Give the modules the rows for the content tags. for (ContentTag tag : tags) { if (passesTagNamesFilter(tag.getName().getDisplayName())) { ArrayList<String> rowData = new ArrayList<>(Arrays.asList(tag.getContent().getName(), tag.getName().getDisplayName(), tag.getComment())); for (TableReportModule module : tableModules) { // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. if (module instanceof ReportHTML) { ReportHTML htmlReportModule = (ReportHTML)module; htmlReportModule.addRowWithTaggedContentHyperlink(rowData, tag); } else { module.addRow(rowData); } } } } // The the modules content tags reporting is ended. for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } private void makeBlackboardArtifactTagsTables() { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } List<BlackboardArtifactTag> tags; try { tags = Case.getCurrentCase().getServices().getTagsManager().getAllBlackboardArtifactTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get blackboard artifact tags", ex); return; } // Tell the modules reporting on blackboard artifact tags data type is beginning. // @@@ Using the obsolete ARTIFACT_TYPE.TSK_TAG_ARTIFACT is an expedient hack. for (TableReportModule module : tableModules) { tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName() + "..."); StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes results tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } module.startDataType(ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName(), comment.toString()); module.startTable(new ArrayList<>(Arrays.asList("Result Type", "Tag", "Comment", "Source File"))); } // Give the modules the rows for the content tags. for (BlackboardArtifactTag tag : tags) { if (passesTagNamesFilter(tag.getName().getDisplayName())) { for (TableReportModule module : tableModules) { module.addRow(new ArrayList<>(Arrays.asList(tag.getArtifact().getArtifactTypeName(), tag.getName().getDisplayName(), tag.getComment(), tag.getContent().getName()))); } } } // The the modules blackboard artifact tags reporting is ended. for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } boolean passesTagNamesFilter(String tagName) { return tagNamesFilter.isEmpty() || tagNamesFilter.contains(tagName); } void removeCancelledTableReportModules() { Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } } } /// @@@ Should move the methods specific to TableReportsWorker into that scope. private Boolean failsTagFilter(HashSet<String> tagNames, HashSet<String> tagsNamesFilter) { if (null == tagsNamesFilter || tagsNamesFilter.isEmpty()) { return false; } HashSet<String> filteredTagNames = new HashSet<>(tagNames); filteredTagNames.retainAll(tagsNamesFilter); return filteredTagNames.isEmpty(); } /** * Get a List of the artifacts and data of the given type that pass the given Tag Filter. * * @param type The artifact type to get * @param tagNamesFilter The tag names that should be included. * @return a list of the filtered tags. */ private List<ArtifactData> getFilteredArtifacts(ARTIFACT_TYPE type, HashSet<String> tagNamesFilter) { List<ArtifactData> artifacts = new ArrayList<>(); try { for (BlackboardArtifact artifact : skCase.getBlackboardArtifacts(type)) { List<BlackboardArtifactTag> tags = Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact); HashSet<String> uniqueTagNames = new HashSet<>(); for (BlackboardArtifactTag tag : tags) { uniqueTagNames.add(tag.getName().getDisplayName()); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } try { artifacts.add(new ArtifactData(artifact, skCase.getBlackboardAttributes(artifact), uniqueTagNames)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); } } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get Blackboard Artifacts when generating report.", ex); } return artifacts; } /** * Write the keyword hits to the provided TableReportModules. * @param tableModules modules to report on */ @SuppressWarnings("deprecation") private void writeKeywordHits(List<TableReportModule> tableModules, String comment, HashSet<String> tagNamesFilter) { ResultSet listsRs = null; try { // Query for keyword lists listsRs = skCase.runQuery("SELECT att.value_text AS list " + "FROM blackboard_attributes AS att, blackboard_artifacts AS art " + "WHERE att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + " " + "AND art.artifact_type_id = " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + " " + "AND att.artifact_id = art.artifact_id " + "GROUP BY list"); List<String> lists = new ArrayList<>(); while(listsRs.next()) { String list = listsRs.getString("list"); if(list.isEmpty()) { list = "User Searches"; } lists.add(list); } // Make keyword data type and give them set index for (TableReportModule module : tableModules) { module.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName() + "..."); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query keyword lists.", ex); } finally { if (listsRs != null) { try { skCase.closeRunQuery(listsRs); } catch (SQLException ex) { } } } ResultSet rs = null; try { // Query for keywords rs = skCase.runQuery("SELECT art.artifact_id, art.obj_id, att1.value_text AS keyword, att2.value_text AS preview, att3.value_text AS list, f.name AS name " + "FROM blackboard_artifacts AS art, blackboard_attributes AS att1, blackboard_attributes AS att2, blackboard_attributes AS att3, tsk_files AS f " + "WHERE (att1.artifact_id = art.artifact_id) " + "AND (att2.artifact_id = art.artifact_id) " + "AND (att3.artifact_id = art.artifact_id) " + "AND (f.obj_id = art.obj_id) " + "AND (att1.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID() + ") " + "AND (att2.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID() + ") " + "AND (att3.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ") " + "AND (art.artifact_type_id = " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ") " + "ORDER BY list, keyword, name"); String currentKeyword = ""; String currentList = ""; while (rs.next()) { // Check to see if all the TableReportModules have been canceled if (tableModules.isEmpty()) { break; } Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } // Get any tags that associated with this artifact and apply the tag filter. HashSet<String> uniqueTagNames = new HashSet<>(); ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); while (tagNameRows.next()) { uniqueTagNames.add(tagNameRows.getString("display_name")); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String keyword = rs.getString("keyword"); String preview = rs.getString("preview"); String list = rs.getString("list"); String uniquePath = ""; try { uniquePath = skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); } // If the lists aren't the same, we've started a new list if((!list.equals(currentList) && !list.isEmpty()) || (list.isEmpty() && !currentList.equals("User Searches"))) { if(!currentList.isEmpty()) { for (TableReportModule module : tableModules) { module.endTable(); module.endSet(); } } currentList = list.isEmpty() ? "User Searches" : list; currentKeyword = ""; // reset the current keyword because it's a new list for (TableReportModule module : tableModules) { module.startSet(currentList); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName() + " (" + currentList + ")..."); } } if (!keyword.equals(currentKeyword)) { if(!currentKeyword.equals("")) { for (TableReportModule module : tableModules) { module.endTable(); } } currentKeyword = keyword; for (TableReportModule module : tableModules) { module.addSetElement(currentKeyword); module.startTable(getArtifactTableColumnHeaders(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID())); } } String previewreplace = EscapeUtil.escapeHtml(preview); for (TableReportModule module : tableModules) { module.addRow(Arrays.asList(new String[] {previewreplace.replaceAll("<!", ""), uniquePath, tagsList})); } } // Finish the current data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endDataType(); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query keywords.", ex); } finally { if (rs != null) { try { skCase.closeRunQuery(rs); } catch (SQLException ex) { } } } } /** * Write the hash set hits to the provided TableReportModules. * @param tableModules modules to report on */ @SuppressWarnings("deprecation") private void writeHashsetHits(List<TableReportModule> tableModules, String comment, HashSet<String> tagNamesFilter) { ResultSet listsRs = null; try { // Query for hashsets listsRs = skCase.runQuery("SELECT att.value_text AS list " + "FROM blackboard_attributes AS att, blackboard_artifacts AS art " + "WHERE att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + " " + "AND art.artifact_type_id = " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + " " + "AND att.artifact_id = art.artifact_id " + "GROUP BY list"); List<String> lists = new ArrayList<>(); while(listsRs.next()) { lists.add(listsRs.getString("list")); } for (TableReportModule module : tableModules) { module.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName() + "..."); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query hashset lists.", ex); } finally { if (listsRs != null) { try { skCase.closeRunQuery(listsRs); } catch (SQLException ex) { } } } ResultSet rs = null; try { // Query for hashset hits rs = skCase.runQuery("SELECT art.artifact_id, art.obj_id, att.value_text AS setname, f.name AS name, f.size AS size " + "FROM blackboard_artifacts AS art, blackboard_attributes AS att, tsk_files AS f " + "WHERE (att.artifact_id = art.artifact_id) " + "AND (f.obj_id = art.obj_id) " + "AND (att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ") " + "AND (art.artifact_type_id = " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ") " + "ORDER BY setname, name, size"); String currentSet = ""; while (rs.next()) { // Check to see if all the TableReportModules have been canceled if (tableModules.isEmpty()) { break; } Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } // Get any tags that associated with this artifact and apply the tag filter. HashSet<String> uniqueTagNames = new HashSet<>(); ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); while (tagNameRows.next()) { uniqueTagNames.add(tagNameRows.getString("display_name")); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String set = rs.getString("setname"); String size = rs.getString("size"); String uniquePath = ""; try { uniquePath = skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File from ID.", ex); } // If the sets aren't the same, we've started a new set if(!set.equals(currentSet)) { if(!currentSet.isEmpty()) { for (TableReportModule module : tableModules) { module.endTable(); module.endSet(); } } currentSet = set; for (TableReportModule module : tableModules) { module.startSet(currentSet); module.startTable(getArtifactTableColumnHeaders(ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID())); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName() + " (" + currentSet + ")..."); } } // Add a row for this hit to every module for (TableReportModule module : tableModules) { module.addRow(Arrays.asList(new String[] {uniquePath, size, tagsList})); } } // Finish the current data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endDataType(); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query hashsets hits.", ex); } finally { if (rs != null) { try { skCase.closeRunQuery(rs); } catch (SQLException ex) { } } } } /** * For a given artifact type ID, return the list of the row titles we're reporting on. * * @param artifactTypeId artifact type ID * @return List<String> row titles */ private List<String> getArtifactTableColumnHeaders(int artifactTypeId) { ArrayList<String> columnHeaders; BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeId); switch (type) { case TSK_WEB_BOOKMARK: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Title", "Date Created", "Program", "Source File"})); break; case TSK_WEB_COOKIE: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Date/Time", "Name", "Value", "Program", "Source File"})); break; case TSK_WEB_HISTORY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Date Accessed", "Referrer", "Title", "Program", "Source File"})); break; case TSK_WEB_DOWNLOAD: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Destination", "Source URL", "Date Accessed", "Program", "Source File"})); break; case TSK_RECENT_OBJECT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Path", "Date/Time", "Source File"})); break; case TSK_INSTALLED_PROG: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Install Date/Time", "Source File"})); break; case TSK_KEYWORD_HIT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Preview", "Source File"})); break; case TSK_HASHSET_HIT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"File", "Size"})); break; case TSK_DEVICE_ATTACHED: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Name", "Device ID", "Date/Time", "Source File"})); break; case TSK_WEB_SEARCH_QUERY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Text", "Domain", "Date Accessed", "Program Name", "Source File"})); break; case TSK_METADATA_EXIF: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Date Taken", "Device Manufacturer", "Device Model", "Latitude", "Longitude", "Source File"})); break; case TSK_CONTACT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Person Name", "Phone Number", "Phone Number (Home)", "Phone Number (Office)", "Phone Number (Mobile)", "Email", "Source File" })); break; case TSK_MESSAGE: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Message Type", "Direction", "Date/Time", "From Phone Number", "From Email", "To Phone Number", "To Email", "Subject", "Text", "Source File" })); break; case TSK_CALLLOG: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Person Name", "Phone Number", "Date/Time", "Direction", "Source File" })); break; case TSK_CALENDAR_ENTRY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Calendar Entry Type", "Description", "Start Date/Time", "End Date/Time", "Location", "Source File" })); break; case TSK_SPEED_DIAL_ENTRY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Short Cut", "Person Name", "Phone Number", "Source File" })); break; case TSK_BLUETOOTH_PAIRING: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Device Name", "Device Address", "Date/Time", "Source File" })); break; case TSK_GPS_TRACKPOINT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_BOOKMARK: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_LAST_KNOWN_LOCATION: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_SEARCH: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_SERVICE_ACCOUNT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Category", "User ID", "Password", "Person Name", "App Name", "URL", "App Path", "Description", "ReplyTo Address", "Mail Server", "Source File" })); break; case TSK_TOOL_OUTPUT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Text", "Source File"})); break; default: return null; } columnHeaders.add("Tags"); return columnHeaders; } /** * Map all BlackboardAttributes' values in a list of BlackboardAttributes to each attribute's attribute * type ID, using module's dateToString method for date/time conversions if a module is supplied. * * @param attList list of BlackboardAttributes to be mapped * @param module the TableReportModule the mapping is for * @return Map<Integer, String> of the BlackboardAttributes mapped to their attribute type ID */ public Map<Integer, String> getMappedAttributes(List<BlackboardAttribute> attList, TableReportModule... module) { Map<Integer, String> attributes = new HashMap<>(); int size = ATTRIBUTE_TYPE.values().length; for (int n = 0; n <= size; n++) { attributes.put(n, ""); } for (BlackboardAttribute tempatt : attList) { String value = ""; Integer type = tempatt.getAttributeTypeID(); if (type.equals(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_SENT.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_RCVD.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID()) ) { if (module.length > 0) { value = module[0].dateToString(tempatt.getValueLong()); } else { SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); value = sdf.format(new java.util.Date((tempatt.getValueLong() * 1000))); } } else if(type.equals(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())) { value = Double.toString(tempatt.getValueDouble()); } else { value = tempatt.getValueString(); } if (value == null) { value = ""; } value = EscapeUtil.escapeHtml(value); attributes.put(type, value); } return attributes; } /** * Converts a collection of strings into a single string of comma-separated items * * @param items A collection of strings * @return A string of comma-separated items */ private String makeCommaSeparatedList(Collection<String> items) { String list = ""; for (Iterator<String> iterator = items.iterator(); iterator.hasNext(); ) { list += iterator.next() + (iterator.hasNext() ? ", " : ""); } return list; } /** * Given a tsk_file's obj_id, return the unique path of that file. * * @param objId tsk_file obj_id * @return String unique path */ private String getFileUniquePath(long objId) { try { return skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); } return ""; } /** * Container class that holds data about an Artifact to eliminate duplicate * calls to the Sleuthkit database. */ private class ArtifactData implements Comparable<ArtifactData> { private BlackboardArtifact artifact; private List<BlackboardAttribute> attributes; private HashSet<String> tags; private List<String> rowData = null; ArtifactData(BlackboardArtifact artifact, List<BlackboardAttribute> attrs, HashSet<String> tags) { this.artifact = artifact; this.attributes = attrs; this.tags = tags; } public BlackboardArtifact getArtifact() { return artifact; } public List<BlackboardAttribute> getAttributes() { return attributes; } public HashSet<String> getTags() { return tags; } public long getArtifactID() { return artifact.getArtifactID(); } public long getObjectID() { return artifact.getObjectID(); } /** * Compares ArtifactData objects by the first attribute they have in * common in their List<BlackboardAttribute>. * * If all attributes are the same, they are assumed duplicates and are * compared by their artifact id. Should only be used with attributes * of the same type. */ @Override public int compareTo(ArtifactData otherArtifactData) { List<String> thisRow = getRow(); List<String> otherRow = otherArtifactData.getRow(); for (int i = 0; i < thisRow.size(); i++) { int compare = thisRow.get(i).compareTo(otherRow.get(i)); if (compare != 0) { return compare; } } // If all attributes are the same, they're most likely duplicates so sort by artifact ID return ((Long) this.getArtifactID()).compareTo((Long) otherArtifactData.getArtifactID()); } /** * Get the values for each row in the table report. */ public List<String> getRow() { if (rowData == null) { try { rowData = getOrderedRowDataAsStrings(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Core exception while generating row data for artifact report.", ex); rowData = Collections.<String>emptyList(); } } return rowData; } /** * Get a list of Strings with all the row values for the Artifact in the * correct order to be written to the report. * * @return List<String> row values * @throws TskCoreException */ private List<String> getOrderedRowDataAsStrings() throws TskCoreException { Map<Integer, String> mappedAttributes = getMappedAttributes(); List<String> orderedRowData = new ArrayList<>(); BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(getArtifact().getArtifactTypeID()); switch (type) { case TSK_WEB_BOOKMARK: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_COOKIE: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_HISTORY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_DOWNLOAD: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_RECENT_OBJECT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_INSTALLED_PROG: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_DEVICE_ATTACHED: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_SEARCH_QUERY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_METADATA_EXIF: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CONTACT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_OFFICE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_MESSAGE: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CALLLOG: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CALENDAR_ENTRY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_CALENDAR_ENTRY_TYPE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_SPEED_DIAL_ENTRY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SHORTCUT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_BLUETOOTH_PAIRING: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_TRACKPOINT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_BOOKMARK: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_LAST_KNOWN_LOCATION: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_SEARCH: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_SERVICE_ACCOUNT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_CATEGORY.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_USER_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PASSWORD.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_REPLYTO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SERVER_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_TOOL_OUTPUT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; } orderedRowData.add(makeCommaSeparatedList(getTags())); return orderedRowData; } /** * Returns a mapping of Attribute Type ID to the String representation * of an Attribute Value. */ private Map<Integer,String> getMappedAttributes() { return ReportGenerator.this.getMappedAttributes(attributes); } /** * Get a BlackboardArtifact. * * @param long artifactId An artifact id * @return The BlackboardArtifact associated with the artifact id */ private BlackboardArtifact getArtifactByID(long artifactId) { try { return skCase.getBlackboardArtifact(artifactId); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get blackboard artifact by ID.", ex); } return null; } } }
Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java
/* * Autopsy Forensic Browser * * Copyright 2013 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.report; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.HashSet; import java.util.logging.Level; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.SwingWorker; import org.openide.filesystems.FileUtil; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.report.ReportProgressPanel.ReportStatus; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.ContentTag; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; /** * Instances of this class use GeneralReportModules, TableReportModules and * FileReportModules to generate a report. If desired, displayProgressPanels() * can be called to show report generation progress using ReportProgressPanel * objects displayed using a dialog box. */ public class ReportGenerator { private static final Logger logger = Logger.getLogger(ReportGenerator.class.getName()); private Case currentCase = Case.getCurrentCase(); private SleuthkitCase skCase = currentCase.getSleuthkitCase(); private Map<TableReportModule, ReportProgressPanel> tableProgress; private Map<GeneralReportModule, ReportProgressPanel> generalProgress; private Map<FileReportModule, ReportProgressPanel> fileProgress; private String reportPath; private ReportGenerationPanel panel = new ReportGenerationPanel(); static final String REPORTS_DIR = "Reports"; ReportGenerator(Map<TableReportModule, Boolean> tableModuleStates, Map<GeneralReportModule, Boolean> generalModuleStates, Map<FileReportModule, Boolean> fileListModuleStates) { // Create the root reports directory path of the form: <CASE DIRECTORY>/Reports/<Case name> <Timestamp>/ DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss"); Date date = new Date(); String dateNoTime = dateFormat.format(date); this.reportPath = currentCase.getCaseDirectory() + File.separator + REPORTS_DIR + File.separator + currentCase.getName() + " " + dateNoTime + File.separator; // Create the root reports directory. try { FileUtil.createFolder(new File(this.reportPath)); } catch (IOException ex) { logger.log(Level.SEVERE, "Failed to make report folder, may be unable to generate reports.", ex); } // Initialize the progress panels generalProgress = new HashMap<>(); tableProgress = new HashMap<>(); fileProgress = new HashMap<>(); setupProgressPanels(tableModuleStates, generalModuleStates, fileListModuleStates); } /** * Create a ReportProgressPanel for each report generation module selected by the user. * * @param tableModuleStates The enabled/disabled state of each TableReportModule * @param generalModuleStates The enabled/disabled state of each GeneralReportModule * @param fileListModuleStates The enabled/disabled state of each FileReportModule */ private void setupProgressPanels(Map<TableReportModule, Boolean> tableModuleStates, Map<GeneralReportModule, Boolean> generalModuleStates, Map<FileReportModule, Boolean> fileListModuleStates) { if (null != tableModuleStates) { for (Entry<TableReportModule, Boolean> entry : tableModuleStates.entrySet()) { if (entry.getValue()) { TableReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { tableProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { tableProgress.put(module, panel.addReport(module.getName(), null)); } } } } if (null != generalModuleStates) { for (Entry<GeneralReportModule, Boolean> entry : generalModuleStates.entrySet()) { if (entry.getValue()) { GeneralReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { generalProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { generalProgress.put(module, panel.addReport(module.getName(), null)); } } } } if (null != fileListModuleStates) { for(Entry<FileReportModule, Boolean> entry : fileListModuleStates.entrySet()) { if (entry.getValue()) { FileReportModule module = entry.getKey(); String moduleFilePath = module.getFilePath(); if (moduleFilePath != null) { fileProgress.put(module, panel.addReport(module.getName(), reportPath + moduleFilePath)); } else { fileProgress.put(module, panel.addReport(module.getName(), null)); } } } } } /** * Display the progress panels to the user, and add actions to close the parent dialog. */ public void displayProgressPanels() { final JDialog dialog = new JDialog(new JFrame(), true); dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); dialog.setTitle("Report Generation Progress..."); dialog.add(this.panel); dialog.pack(); panel.addCloseAction(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dialog.dispose(); } }); dialog.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { panel.close(); } }); Dimension screenDimension = Toolkit.getDefaultToolkit().getScreenSize(); int w = dialog.getSize().width; int h = dialog.getSize().height; // set the location of the popUp Window on the center of the screen dialog.setLocation((screenDimension.width - w) / 2, (screenDimension.height - h) / 2); dialog.setVisible(true); } /** * Run the GeneralReportModules using a SwingWorker. */ public void generateGeneralReports() { GeneralReportsWorker worker = new GeneralReportsWorker(); worker.execute(); } /** * Run the TableReportModules using a SwingWorker. * * @param artifactTypeSelections the enabled/disabled state of the artifact types to be included in the report * @param tagSelections the enabled/disabled state of the tag names to be included in the report */ public void generateBlackboardArtifactsReports(Map<ARTIFACT_TYPE, Boolean> artifactTypeSelections, Map<String, Boolean> tagNameSelections) { if (!tableProgress.isEmpty() && null != artifactTypeSelections) { TableReportsWorker worker = new TableReportsWorker(artifactTypeSelections, tagNameSelections); worker.execute(); } } /** * Run the FileReportModules using a SwingWorker. * * @param enabledInfo the Information that should be included about each file * in the report. */ public void generateFileListReports(Map<FileReportDataTypes, Boolean> enabledInfo) { if (!fileProgress.isEmpty() && null != enabledInfo) { List<FileReportDataTypes> enabled = new ArrayList<>(); for (Entry<FileReportDataTypes, Boolean> e : enabledInfo.entrySet()) { if(e.getValue()) { enabled.add(e.getKey()); } } FileReportsWorker worker = new FileReportsWorker(enabled); worker.execute(); } } /** * SwingWorker to run GeneralReportModules. */ private class GeneralReportsWorker extends SwingWorker<Integer, Integer> { @Override protected Integer doInBackground() throws Exception { for (Entry<GeneralReportModule, ReportProgressPanel> entry : generalProgress.entrySet()) { GeneralReportModule module = entry.getKey(); if (generalProgress.get(module).getStatus() != ReportStatus.CANCELED) { module.generateReport(reportPath, generalProgress.get(module)); } } return 0; } } /** * SwingWorker to run FileReportModules. */ private class FileReportsWorker extends SwingWorker<Integer, Integer> { private List<FileReportDataTypes> enabledInfo = Arrays.asList(FileReportDataTypes.values()); private List<FileReportModule> fileModules = new ArrayList<>(); FileReportsWorker(List<FileReportDataTypes> enabled) { enabledInfo = enabled; for (Entry<FileReportModule, ReportProgressPanel> entry : fileProgress.entrySet()) { fileModules.add(entry.getKey()); } } @Override protected Integer doInBackground() throws Exception { for (FileReportModule module : fileModules) { ReportProgressPanel progress = fileProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { progress.start(); progress.updateStatusLabel("Querying database..."); } } List<AbstractFile> files = getFiles(); int numFiles = files.size(); for (FileReportModule module : fileModules) { module.startReport(reportPath); module.startTable(enabledInfo); fileProgress.get(module).setIndeterminate(false); fileProgress.get(module).setMaximumProgress(numFiles); } int i = 0; // Add files to report. for (AbstractFile file : files) { // Check to see if any reports have been cancelled. if (fileModules.isEmpty()) { break; } // Remove cancelled reports, add files to report otherwise. Iterator<FileReportModule> iter = fileModules.iterator(); while (iter.hasNext()) { FileReportModule module = iter.next(); ReportProgressPanel progress = fileProgress.get(module); if (progress.getStatus() == ReportStatus.CANCELED) { iter.remove(); } else { module.addRow(file, enabledInfo); progress.increment(); } if ((i % 100) == 0) { progress.updateStatusLabel("Now processing " + file.getName()); } } i++; } for (FileReportModule module : fileModules) { module.endTable(); module.endReport(); fileProgress.get(module).complete(); } return 0; } /** * Get all files in the image. * @return */ private List<AbstractFile> getFiles() { List<AbstractFile> absFiles; try { SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); absFiles = skCase.findAllFilesWhere("NOT meta_type = 2"); return absFiles; } catch (TskCoreException ex) { // TODO return Collections.EMPTY_LIST; } } } /** * SwingWorker to run TableReportModules to report on blackboard artifacts, * content tags, and blackboard artifact tags. */ private class TableReportsWorker extends SwingWorker<Integer, Integer> { private List<TableReportModule> tableModules = new ArrayList<>(); private List<ARTIFACT_TYPE> artifactTypes = new ArrayList<>(); private HashSet<String> tagNamesFilter = new HashSet<>(); TableReportsWorker(Map<ARTIFACT_TYPE, Boolean> artifactTypeSelections, Map<String, Boolean> tagNameSelections) { // Get the report modules selected by the user. for (Entry<TableReportModule, ReportProgressPanel> entry : tableProgress.entrySet()) { tableModules.add(entry.getKey()); } // Get the artifact types selected by the user. for (Entry<ARTIFACT_TYPE, Boolean> entry : artifactTypeSelections.entrySet()) { if (entry.getValue()) { artifactTypes.add(entry.getKey()); } } // Get the tag names selected by the user and make a tag names filter. if (null != tagNameSelections) { for (Entry<String, Boolean> entry : tagNameSelections.entrySet()) { if (entry.getValue() == true) { tagNamesFilter.add(entry.getKey()); } } } } @Override protected Integer doInBackground() throws Exception { // Start the progress indicators for each active TableReportModule. for (TableReportModule module : tableModules) { ReportProgressPanel progress = tableProgress.get(module); if (progress.getStatus() != ReportStatus.CANCELED) { module.startReport(reportPath); progress.start(); progress.setIndeterminate(false); progress.setMaximumProgress(ARTIFACT_TYPE.values().length + 2); // +2 for content and blackboard artifact tags } } makeBlackboardArtifactTables(); makeContentTagsTables(); makeBlackboardArtifactTagsTables(); for (TableReportModule module : tableModules) { tableProgress.get(module).complete(); module.endReport(); } return 0; } private void makeBlackboardArtifactTables() { // Make a comment string describing the tag names filter in effect. StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes results tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } // Add a table to the report for every enabled blackboard artifact type. for (ARTIFACT_TYPE type : artifactTypes) { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } for (TableReportModule module : tableModules) { tableProgress.get(module).updateStatusLabel("Now processing " + type.getDisplayName() + "..."); } // Keyword hits and hashset hit artifacts get sepcial handling. if (type.equals(ARTIFACT_TYPE.TSK_KEYWORD_HIT)) { writeKeywordHits(tableModules, comment.toString(), tagNamesFilter); continue; } else if (type.equals(ARTIFACT_TYPE.TSK_HASHSET_HIT)) { writeHashsetHits(tableModules, comment.toString(), tagNamesFilter); continue; } StopWatch stopwatch = new StopWatch(); stopwatch.start(); List<ArtifactData> unsortedArtifacts = getFilteredArtifacts(type, tagNamesFilter); stopwatch.stop(); System.out.println("Number of Artifacts:\t" + unsortedArtifacts.size()); System.out.println("getFilteredArtifacts:\t" + stopwatch.getElapsedTime()); if (unsortedArtifacts.isEmpty()) { continue; } // The most efficient way to sort all the Artifacts is to add them to a List, and then // sort that List based off a Comparator. Adding to a TreeMap/Set/List sorts the list // each time an element is added, which adds unnecessary overhead if we only need it sorted once. stopwatch.reset(); stopwatch.start(); Collections.sort(unsortedArtifacts); stopwatch.stop(); System.out.println("Collections.sort:\t" + stopwatch.getElapsedTime()); // Get the column headers appropriate for the artifact type. /* @@@ BC: Seems like a better design here would be to have a method that * takes in the artifact as an argument and returns the attributes. We then use that * to make the headers and to make each row afterwards so that we don't have artifact-specific * logic in both getArtifactTableCoumnHeaders and ArtifactData.getRow() */ List<String> columnHeaders = getArtifactTableColumnHeaders(type.getTypeID()); if (columnHeaders == null) { // @@@ Hack to prevent system from hanging. Better solution is to merge all attributes into a single column or analyze the artifacts to find out how many are needed. MessageNotifyUtil.Notify.show("Skipping artifact type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); continue; } for (TableReportModule module : tableModules) { module.startDataType(type.getDisplayName(), comment.toString()); module.startTable(columnHeaders); } stopwatch.reset(); stopwatch.start(); boolean msgSent = false; for(ArtifactData artifactData : unsortedArtifacts) { // Add the row data to all of the reports. for (TableReportModule module : tableModules) { // Get the row data for this type of artifact. List<String> rowData = artifactData.getRow(); if (rowData.isEmpty()) { if (msgSent == false) { MessageNotifyUtil.Notify.show("Skipping artifact rows for type " + type + " in reports", "Unknown columns to report on", MessageNotifyUtil.MessageType.ERROR); msgSent = true; } continue; } module.addRow(rowData); } } // Finish up this data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } } private void makeContentTagsTables() { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } // Get the content tags. List<ContentTag> tags; try { tags = Case.getCurrentCase().getServices().getTagsManager().getAllContentTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get content tags", ex); return; } // Tell the modules reporting on content tags is beginning. for (TableReportModule module : tableModules) { // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. // @@@ Alos Using the obsolete ARTIFACT_TYPE.TSK_TAG_FILE is also an expedient hack. tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName() + "..."); ArrayList<String> columnHeaders = new ArrayList<>(Arrays.asList("File", "Tag", "Comment")); StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes file tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } if (module instanceof ReportHTML) { ReportHTML htmlReportModule = (ReportHTML)module; htmlReportModule.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); htmlReportModule.startContentTagsTable(columnHeaders); } else { module.startDataType(ARTIFACT_TYPE.TSK_TAG_FILE.getDisplayName(), comment.toString()); module.startTable(columnHeaders); } } // Give the modules the rows for the content tags. for (ContentTag tag : tags) { if (passesTagNamesFilter(tag.getName().getDisplayName())) { ArrayList<String> rowData = new ArrayList<>(Arrays.asList(tag.getContent().getName(), tag.getName().getDisplayName(), tag.getComment())); for (TableReportModule module : tableModules) { // @@@ This casting is a tricky little workaround to allow the HTML report module to slip in a content hyperlink. if (module instanceof ReportHTML) { ReportHTML htmlReportModule = (ReportHTML)module; htmlReportModule.addRowWithTaggedContentHyperlink(rowData, tag); } else { module.addRow(rowData); } } } } // The the modules content tags reporting is ended. for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } private void makeBlackboardArtifactTagsTables() { // Check for cancellaton. removeCancelledTableReportModules(); if (tableModules.isEmpty()) { return; } List<BlackboardArtifactTag> tags; try { tags = Case.getCurrentCase().getServices().getTagsManager().getAllBlackboardArtifactTags(); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "failed to get blackboard artifact tags", ex); return; } // Tell the modules reporting on blackboard artifact tags data type is beginning. // @@@ Using the obsolete ARTIFACT_TYPE.TSK_TAG_ARTIFACT is an expedient hack. for (TableReportModule module : tableModules) { tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName() + "..."); StringBuilder comment = new StringBuilder(); if (!tagNamesFilter.isEmpty()) { comment.append("This report only includes results tagged with: "); comment.append(makeCommaSeparatedList(tagNamesFilter)); } module.startDataType(ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getDisplayName(), comment.toString()); module.startTable(new ArrayList<>(Arrays.asList("Result Type", "Tag", "Comment", "Source File"))); } // Give the modules the rows for the content tags. for (BlackboardArtifactTag tag : tags) { if (passesTagNamesFilter(tag.getName().getDisplayName())) { for (TableReportModule module : tableModules) { module.addRow(new ArrayList<>(Arrays.asList(tag.getArtifact().getArtifactTypeName(), tag.getName().getDisplayName(), tag.getComment(), tag.getContent().getName()))); } } } // The the modules blackboard artifact tags reporting is ended. for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endTable(); module.endDataType(); } } boolean passesTagNamesFilter(String tagName) { return tagNamesFilter.isEmpty() || tagNamesFilter.contains(tagName); } void removeCancelledTableReportModules() { Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } } } /// @@@ Should move the methods specific to TableReportsWorker into that scope. private Boolean failsTagFilter(HashSet<String> tagNames, HashSet<String> tagsNamesFilter) { if (null == tagsNamesFilter || tagsNamesFilter.isEmpty()) { return false; } HashSet<String> filteredTagNames = new HashSet<>(tagNames); filteredTagNames.retainAll(tagsNamesFilter); return filteredTagNames.isEmpty(); } /** * Get a List of the artifacts and data of the given type that pass the given Tag Filter. * * @param type The artifact type to get * @param tagNamesFilter The tag names that should be included. * @return a list of the filtered tags. */ private List<ArtifactData> getFilteredArtifacts(ARTIFACT_TYPE type, HashSet<String> tagNamesFilter) { List<ArtifactData> artifacts = new ArrayList<>(); try { for (BlackboardArtifact artifact : skCase.getBlackboardArtifacts(type)) { List<BlackboardArtifactTag> tags = Case.getCurrentCase().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact); HashSet<String> uniqueTagNames = new HashSet<>(); for (BlackboardArtifactTag tag : tags) { uniqueTagNames.add(tag.getName().getDisplayName()); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } try { artifacts.add(new ArtifactData(artifact, skCase.getBlackboardAttributes(artifact), uniqueTagNames)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); } } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Failed to get Blackboard Artifacts when generating report.", ex); } return artifacts; } /** * Write the keyword hits to the provided TableReportModules. * @param tableModules modules to report on */ @SuppressWarnings("deprecation") private void writeKeywordHits(List<TableReportModule> tableModules, String comment, HashSet<String> tagNamesFilter) { ResultSet listsRs = null; try { // Query for keyword lists listsRs = skCase.runQuery("SELECT att.value_text AS list " + "FROM blackboard_attributes AS att, blackboard_artifacts AS art " + "WHERE att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + " " + "AND art.artifact_type_id = " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + " " + "AND att.artifact_id = art.artifact_id " + "GROUP BY list"); List<String> lists = new ArrayList<>(); while(listsRs.next()) { String list = listsRs.getString("list"); if(list.isEmpty()) { list = "User Searches"; } lists.add(list); } // Make keyword data type and give them set index for (TableReportModule module : tableModules) { module.startDataType(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName() + "..."); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query keyword lists.", ex); } finally { if (listsRs != null) { try { skCase.closeRunQuery(listsRs); } catch (SQLException ex) { } } } ResultSet rs = null; try { // Query for keywords rs = skCase.runQuery("SELECT art.artifact_id, art.obj_id, att1.value_text AS keyword, att2.value_text AS preview, att3.value_text AS list, f.name AS name " + "FROM blackboard_artifacts AS art, blackboard_attributes AS att1, blackboard_attributes AS att2, blackboard_attributes AS att3, tsk_files AS f " + "WHERE (att1.artifact_id = art.artifact_id) " + "AND (att2.artifact_id = art.artifact_id) " + "AND (att3.artifact_id = art.artifact_id) " + "AND (f.obj_id = art.obj_id) " + "AND (att1.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID() + ") " + "AND (att2.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID() + ") " + "AND (att3.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ") " + "AND (art.artifact_type_id = " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ") " + "ORDER BY list, keyword, name"); String currentKeyword = ""; String currentList = ""; while (rs.next()) { // Check to see if all the TableReportModules have been canceled if (tableModules.isEmpty()) { break; } Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } // Get any tags that associated with this artifact and apply the tag filter. HashSet<String> uniqueTagNames = new HashSet<>(); ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); while (tagNameRows.next()) { uniqueTagNames.add(tagNameRows.getString("display_name")); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String keyword = rs.getString("keyword"); String preview = rs.getString("preview"); String list = rs.getString("list"); String uniquePath = ""; try { uniquePath = skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); } // If the lists aren't the same, we've started a new list if((!list.equals(currentList) && !list.isEmpty()) || (list.isEmpty() && !currentList.equals("User Searches"))) { if(!currentList.isEmpty()) { for (TableReportModule module : tableModules) { module.endTable(); module.endSet(); } } currentList = list.isEmpty() ? "User Searches" : list; currentKeyword = ""; // reset the current keyword because it's a new list for (TableReportModule module : tableModules) { module.startSet(currentList); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_KEYWORD_HIT.getDisplayName() + " (" + currentList + ")..."); } } if (!keyword.equals(currentKeyword)) { if(!currentKeyword.equals("")) { for (TableReportModule module : tableModules) { module.endTable(); } } currentKeyword = keyword; for (TableReportModule module : tableModules) { module.addSetElement(currentKeyword); module.startTable(getArtifactTableColumnHeaders(ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID())); } } String previewreplace = EscapeUtil.escapeHtml(preview); for (TableReportModule module : tableModules) { module.addRow(Arrays.asList(new String[] {previewreplace.replaceAll("<!", ""), uniquePath, tagsList})); } } // Finish the current data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endDataType(); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query keywords.", ex); } finally { if (rs != null) { try { skCase.closeRunQuery(rs); } catch (SQLException ex) { } } } } /** * Write the hash set hits to the provided TableReportModules. * @param tableModules modules to report on */ @SuppressWarnings("deprecation") private void writeHashsetHits(List<TableReportModule> tableModules, String comment, HashSet<String> tagNamesFilter) { ResultSet listsRs = null; try { // Query for hashsets listsRs = skCase.runQuery("SELECT att.value_text AS list " + "FROM blackboard_attributes AS att, blackboard_artifacts AS art " + "WHERE att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + " " + "AND art.artifact_type_id = " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + " " + "AND att.artifact_id = art.artifact_id " + "GROUP BY list"); List<String> lists = new ArrayList<>(); while(listsRs.next()) { lists.add(listsRs.getString("list")); } for (TableReportModule module : tableModules) { module.startDataType(ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName(), comment); module.addSetIndex(lists); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName() + "..."); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query hashset lists.", ex); } finally { if (listsRs != null) { try { skCase.closeRunQuery(listsRs); } catch (SQLException ex) { } } } ResultSet rs = null; try { // Query for hashset hits rs = skCase.runQuery("SELECT art.artifact_id, art.obj_id, att.value_text AS setname, f.name AS name, f.size AS size " + "FROM blackboard_artifacts AS art, blackboard_attributes AS att, tsk_files AS f " + "WHERE (att.artifact_id = art.artifact_id) " + "AND (f.obj_id = art.obj_id) " + "AND (att.attribute_type_id = " + ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID() + ") " + "AND (art.artifact_type_id = " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ") " + "ORDER BY setname, name, size"); String currentSet = ""; while (rs.next()) { // Check to see if all the TableReportModules have been canceled if (tableModules.isEmpty()) { break; } Iterator<TableReportModule> iter = tableModules.iterator(); while (iter.hasNext()) { TableReportModule module = iter.next(); if (tableProgress.get(module).getStatus() == ReportStatus.CANCELED) { iter.remove(); } } // Get any tags that associated with this artifact and apply the tag filter. HashSet<String> uniqueTagNames = new HashSet<>(); ResultSet tagNameRows = skCase.runQuery("SELECT display_name FROM tag_names WHERE artifact_id = " + rs.getLong("artifact_id")); while (tagNameRows.next()) { uniqueTagNames.add(tagNameRows.getString("display_name")); } if(failsTagFilter(uniqueTagNames, tagNamesFilter)) { continue; } String tagsList = makeCommaSeparatedList(uniqueTagNames); Long objId = rs.getLong("obj_id"); String set = rs.getString("setname"); String size = rs.getString("size"); String uniquePath = ""; try { uniquePath = skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File from ID.", ex); } // If the sets aren't the same, we've started a new set if(!set.equals(currentSet)) { if(!currentSet.isEmpty()) { for (TableReportModule module : tableModules) { module.endTable(); module.endSet(); } } currentSet = set; for (TableReportModule module : tableModules) { module.startSet(currentSet); module.startTable(getArtifactTableColumnHeaders(ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID())); tableProgress.get(module).updateStatusLabel("Now processing " + ARTIFACT_TYPE.TSK_HASHSET_HIT.getDisplayName() + " (" + currentSet + ")..."); } } // Add a row for this hit to every module for (TableReportModule module : tableModules) { module.addRow(Arrays.asList(new String[] {uniquePath, size, tagsList})); } } // Finish the current data type for (TableReportModule module : tableModules) { tableProgress.get(module).increment(); module.endDataType(); } } catch (SQLException ex) { logger.log(Level.SEVERE, "Failed to query hashsets hits.", ex); } finally { if (rs != null) { try { skCase.closeRunQuery(rs); } catch (SQLException ex) { } } } } /** * For a given artifact type ID, return the list of the row titles we're reporting on. * * @param artifactTypeId artifact type ID * @return List<String> row titles */ private List<String> getArtifactTableColumnHeaders(int artifactTypeId) { ArrayList<String> columnHeaders; BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifactTypeId); switch (type) { case TSK_WEB_BOOKMARK: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Title", "Date Created", "Program", "Source File"})); break; case TSK_WEB_COOKIE: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Date/Time", "Name", "Value", "Program", "Source File"})); break; case TSK_WEB_HISTORY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"URL", "Date Accessed", "Referrer", "Title", "Program", "Source File"})); break; case TSK_WEB_DOWNLOAD: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Destination", "Source URL", "Date Accessed", "Program", "Source File"})); break; case TSK_RECENT_OBJECT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Path", "Date/Time", "Source File"})); break; case TSK_INSTALLED_PROG: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Install Date/Time", "Source File"})); break; case TSK_KEYWORD_HIT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Preview", "Source File"})); break; case TSK_HASHSET_HIT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"File", "Size"})); break; case TSK_DEVICE_ATTACHED: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Name", "Device ID", "Date/Time", "Source File"})); break; case TSK_WEB_SEARCH_QUERY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Text", "Domain", "Date Accessed", "Program Name", "Source File"})); break; case TSK_METADATA_EXIF: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Date Taken", "Device Manufacturer", "Device Model", "Latitude", "Longitude", "Source File"})); break; case TSK_CONTACT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Person Name", "Phone Number", "Phone Number (Home)", "Phone Number (Office)", "Phone Number (Mobile)", "Email", "Source File" })); break; case TSK_MESSAGE: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Message Type", "Direction", "Date/Time", "From Phone Number", "From Email", "To Phone Number", "To Email", "Subject", "Text", "Source File" })); break; case TSK_CALLLOG: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Person Name", "Phone Number", "Date/Time", "Direction", "Source File" })); break; case TSK_CALENDAR_ENTRY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Calendar Entry Type", "Description", "Start Date/Time", "End Date/Time", "Location", "Source File" })); break; case TSK_SPEED_DIAL_ENTRY: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Short Cut", "Person Name", "Phone Number", "Source File" })); break; case TSK_BLUETOOTH_PAIRING: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Device Name", "Device Address", "Date/Time", "Source File" })); break; case TSK_GPS_TRACKPOINT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_BOOKMARK: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_LAST_KNOWN_LOCATION: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_GPS_SEARCH: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Latitude", "Longitude", "Altitude", "Name", "Location Address", "Date/Time", "Source File" })); break; case TSK_SERVICE_ACCOUNT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Category", "User ID", "Password", "Person Name", "App Name", "URL", "App Path", "Description", "ReplyTo Address", "Mail Server", "Source File" })); break; case TSK_TOOL_OUTPUT: columnHeaders = new ArrayList<>(Arrays.asList(new String[] {"Program Name", "Text", "Source File"})); break; default: return null; } columnHeaders.add("Tags"); return columnHeaders; } /** * Map all BlackboardAttributes' values in a list of BlackboardAttributes to each attribute's attribute * type ID, using module's dateToString method for date/time conversions if a module is supplied. * * @param attList list of BlackboardAttributes to be mapped * @param module the TableReportModule the mapping is for * @return Map<Integer, String> of the BlackboardAttributes mapped to their attribute type ID */ public Map<Integer, String> getMappedAttributes(List<BlackboardAttribute> attList, TableReportModule... module) { Map<Integer, String> attributes = new HashMap<>(); int size = ATTRIBUTE_TYPE.values().length; for (int n = 0; n <= size; n++) { attributes.put(n, ""); } for (BlackboardAttribute tempatt : attList) { String value = ""; Integer type = tempatt.getAttributeTypeID(); if (type.equals(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_SENT.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_RCVD.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID()) ) { if (module.length > 0) { value = module[0].dateToString(tempatt.getValueLong()); } else { SimpleDateFormat sdf = new java.text.SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); value = sdf.format(new java.util.Date((tempatt.getValueLong() * 1000))); } } else if(type.equals(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID()) || type.equals(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())) { value = Double.toString(tempatt.getValueDouble()); } else { value = tempatt.getValueString(); } if (value == null) { value = ""; } value = EscapeUtil.escapeHtml(value); attributes.put(type, value); } return attributes; } /** * Converts a collection of strings into a single string of comma-separated items * * @param items A collection of strings * @return A string of comma-separated items */ private String makeCommaSeparatedList(Collection<String> items) { String list = ""; for (Iterator<String> iterator = items.iterator(); iterator.hasNext(); ) { list += iterator.next() + (iterator.hasNext() ? ", " : ""); } return list; } /** * Given a tsk_file's obj_id, return the unique path of that file. * * @param objId tsk_file obj_id * @return String unique path */ private String getFileUniquePath(long objId) { try { return skCase.getAbstractFileById(objId).getUniquePath(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get Abstract File by ID.", ex); } return ""; } /** * Container class that holds data about an Artifact to eliminate duplicate * calls to the Sleuthkit database. */ private class ArtifactData implements Comparable<ArtifactData> { private BlackboardArtifact artifact; private List<BlackboardAttribute> attributes; private HashSet<String> tags; private List<String> rowData = null; ArtifactData(BlackboardArtifact artifact, List<BlackboardAttribute> attrs, HashSet<String> tags) { this.artifact = artifact; this.attributes = attrs; this.tags = tags; } public BlackboardArtifact getArtifact() { return artifact; } public List<BlackboardAttribute> getAttributes() { return attributes; } public HashSet<String> getTags() { return tags; } public long getArtifactID() { return artifact.getArtifactID(); } public long getObjectID() { return artifact.getObjectID(); } /** * Compares ArtifactData objects by the first attribute they have in * common in their List<BlackboardAttribute>. * * If all attributes are the same, they are assumed duplicates and are * compared by their artifact id. Should only be used with attributes * of the same type. */ @Override public int compareTo(ArtifactData otherArtifactData) { List<String> thisRow = getRow(); List<String> otherRow = otherArtifactData.getRow(); for (int i = 0; i < thisRow.size(); i++) { int compare = thisRow.get(i).compareTo(otherRow.get(i)); if (compare != 0) { return compare; } } // If all attributes are the same, they're most likely duplicates so sort by artifact ID return ((Long) this.getArtifactID()).compareTo((Long) otherArtifactData.getArtifactID()); } /** * Get the values for each row in the table report. */ public List<String> getRow() { if (rowData == null) { try { rowData = getOrderedRowDataAsStrings(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Core exception while generating row data for artifact report.", ex); rowData = Collections.<String>emptyList(); } } return rowData; } /** * Get a list of Strings with all the row values for the Artifact in the * correct order to be written to the report. * * @return List<String> row values * @throws TskCoreException */ private List<String> getOrderedRowDataAsStrings() throws TskCoreException { Map<Integer, String> mappedAttributes = getMappedAttributes(); List<String> orderedRowData = new ArrayList<>(); BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(getArtifact().getArtifactTypeID()); switch (type) { case TSK_WEB_BOOKMARK: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_COOKIE: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_HISTORY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_DOWNLOAD: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_RECENT_OBJECT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_INSTALLED_PROG: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_DEVICE_ATTACHED: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_WEB_SEARCH_QUERY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_METADATA_EXIF: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CONTACT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_OFFICE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_MESSAGE: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CALLLOG: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_CALENDAR_ENTRY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_CALENDAR_ENTRY_TYPE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_SPEED_DIAL_ENTRY: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SHORTCUT.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME_PERSON.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_BLUETOOTH_PAIRING: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_TRACKPOINT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_BOOKMARK: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_LAST_KNOWN_LOCATION: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_GPS_SEARCH: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_LOCATION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_SERVICE_ACCOUNT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_CATEGORY.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_USER_ID.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PASSWORD.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_URL.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PATH.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_DESCRIPTION.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_EMAIL_REPLYTO.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_SERVER_NAME.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; case TSK_TOOL_OUTPUT: orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID())); orderedRowData.add(mappedAttributes.get(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID())); orderedRowData.add(getFileUniquePath(getObjectID())); break; } orderedRowData.add(makeCommaSeparatedList(getTags())); return orderedRowData; } /** * Returns a mapping of Attribute Type ID to the String representation * of an Attribute Value. */ private Map<Integer,String> getMappedAttributes() { return ReportGenerator.this.getMappedAttributes(attributes); } /** * Get a BlackboardArtifact. * * @param long artifactId An artifact id * @return The BlackboardArtifact associated with the artifact id */ private BlackboardArtifact getArtifactByID(long artifactId) { try { return skCase.getBlackboardArtifact(artifactId); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to get blackboard artifact by ID.", ex); } return null; } } }
Removed stopwatch and debug printlns. Conflicts: Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java
Core/src/org/sleuthkit/autopsy/report/ReportGenerator.java
Removed stopwatch and debug printlns.
<ide><path>ore/src/org/sleuthkit/autopsy/report/ReportGenerator.java <ide> continue; <ide> } <ide> <del> StopWatch stopwatch = new StopWatch(); <del> stopwatch.start(); <ide> List<ArtifactData> unsortedArtifacts = getFilteredArtifacts(type, tagNamesFilter); <del> stopwatch.stop(); <del> System.out.println("Number of Artifacts:\t" + unsortedArtifacts.size()); <del> System.out.println("getFilteredArtifacts:\t" + stopwatch.getElapsedTime()); <ide> <ide> if (unsortedArtifacts.isEmpty()) { <ide> continue; <ide> // The most efficient way to sort all the Artifacts is to add them to a List, and then <ide> // sort that List based off a Comparator. Adding to a TreeMap/Set/List sorts the list <ide> // each time an element is added, which adds unnecessary overhead if we only need it sorted once. <del> stopwatch.reset(); <del> stopwatch.start(); <ide> Collections.sort(unsortedArtifacts); <del> stopwatch.stop(); <del> System.out.println("Collections.sort:\t" + stopwatch.getElapsedTime()); <ide> <ide> // Get the column headers appropriate for the artifact type. <ide> /* @@@ BC: Seems like a better design here would be to have a method that <ide> module.startTable(columnHeaders); <ide> } <ide> <del> stopwatch.reset(); <del> stopwatch.start(); <ide> boolean msgSent = false; <ide> for(ArtifactData artifactData : unsortedArtifacts) { <ide> // Add the row data to all of the reports. <ide> module.addRow(rowData); <ide> } <ide> } <del> <add> <ide> // Finish up this data type <ide> for (TableReportModule module : tableModules) { <ide> tableProgress.get(module).increment();
JavaScript
mit
807432e85782474766c7f411a2fce9b10a68b8af
0
zubairq/gosharedata,zubairq/yazz,zubairq/yazz,zubairq/gosharedata
#!/usr/bin/env node // Module to control application life. // Module to create native browser window. var startNodeServer = false const path = require("path"); const url = require('url'); var fork = require('child_process'); var fs = require('fs'); var ip = require('ip'); var isWin = /^win/.test(process.platform); var isLinux = /^linux/.test(process.platform); var isMac = /^darwin/.test(process.platform); var mainNodeProcessStarted = false; var restRoutes = new Object() var envVars = new Object() var systemReady = false; var ls = require('ls-sync'); var rimraf = require("rimraf"); var pidusage = require("pidusage"); var fs = require('fs'); var mkdirp = require('mkdirp') var rmdir = require('rmdir-sync'); var uuidv1 = require('uuid/v1'); var fork = require('child_process'); var express = require('express') var http = require('http') var https = require('https'); var app = express() var expressWs = require('express-ws')(app); outputDebug("__filename: " + __filename) outputDebug("__dirname: " + __dirname) outputDebug("Platform: " + process.platform) if (isWin) { outputDebug("Creating Windows driver") mkdirp.sync('node_modules\\sqlite3\\lib/binding\\node-v72-win32-x64'); var srcNodeJsFile = path.join(__dirname,'..\\node_sqlite3_win64.rename') outputDebug("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'..\\node_modules\\sqlite3\\lib\\binding\\node-v72-win32-x64\\node_sqlite3.node'), ); } else if (isLinux) { outputDebug("Creating Linux driver") mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-linux-x64'); var srcNodeJsFile = path.join(__dirname,'../node_sqlite3_linux64.rename') outputDebug("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-linux-x64/node_sqlite3.node'), ); } else if (isMac) { outputDebug("Creating Mac driver") mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-darwin-x64'); var srcNodeJsFile = path.join(__filename,'../../node_sqlite3_macos64.rename') outputDebug("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-darwin-x64/node_sqlite3.node'), ); } else { outputDebug("Error, unsupported platform: " + process.platform) } var request = require("request"); var db_helper = require("./db_helper") var perf = require('./perf') var compression = require('compression') var dns = require('dns'); var program = require('commander'); var bodyParser = require('body-parser'); var multer = require('multer'); var cors = require('cors') var saveHelper = require('./save_helpers') var isDocker = require('is-docker'); var sqlite3 = require('sqlite3'); var os = require('os') var username = "Unknown user"; var Keycloak = require('keycloak-connect'); var session = require('express-session'); var memoryStore = new session.MemoryStore(); var kk = { "realm": "yazz", "auth-server-url": "http://127.0.0.1:8080/auth", "ssl-required": "external", "resource": "yazz", "public-client": true, "confidential-port": 0 } var sessObj = session({ secret: 'some secret', resave: false, saveUninitialized: true, store: memoryStore }) var keycloak = new Keycloak({ store: memoryStore },kk); var upload var dbPath = null var dbsearch = null var userData = null var port; var hostaddress; hostaddress = "0.0.0.0"//ip.address(); var hostaddressintranet; hostaddressintranet = ip.address(); port = 80 var f = 0 var started = false var visifile var socket = null var io = null; var forkedProcesses = new Object(); var timeout = 0; var port; var typeOfSystem; var centralHostAddress; var centralHostPort; var stmt2 = null; var stmt3 = null; var setIn = null; var stopScan = false; var inScan = false; var numberOfSecondsAliveCheck = 60; var serverwebsockets = []; var portrange = 3000 var requestClientInternalHostAddress = ''; var requestClientInternalPort = -1; var requestClientPublicIp = ''; var requestClientPublicHostName = ''; var locked; var useHttps; var serverProtocol = "http"; var privateKey; var publicCertificate; var caCertificate1; var caCertificate2; var caCertificate3; var requestClientPublicIp; var hostcount = 0; var queuedResponses = new Object(); var queuedResponseSeqNum = 1; var alreadyOpen = false; var executionProcessCount = 6; app.use(compression()) app.use(sessObj); app.use(keycloak.middleware({ logout: '/c', admin: '/ad' })); var inmemcalc = false var totalMem = 0 var returnedmemCount = 0 var allForked=[] const apiMetrics = require('prometheus-api-metrics'); app.use(apiMetrics()) const Prometheus = require('prom-client'); const yazzMemoryUsageMetric = new Prometheus.Gauge({ name: 'yazz_total_memory_bytes', help: 'Total Memory Usage' }); const yazzProcessMainMemoryUsageMetric = new Prometheus.Gauge({ name: 'yazz_node_process_main_memory_bytes', help: 'Memory Usage for Yazz NodeJS process "main"' }); var stdin = process.openStdin(); var data = ""; stdin.on('data', function(chunk) { data += chunk; }); stdin.on('end', function() { console.log("DATA:\n" + data + "\nEND DATA"); }); if (process.argv.length > 1) { program .version('0.0.1') .option('-t, --type [type]', 'Add the specified type of app (client/server) [type]', 'client') .option('-p, --port [port]', 'Which port should I listen on? (default 80) [port]', parseInt) .option('-h, --host [host]', 'Server address of the central host (default yazz.com) [host]', 'yazz.com') .option('-l, --locked [locked]', 'Allow server to be locked/unlocked on start up (default true) [locked]', 'true') .option('-d, --debug [debug]', 'Allow to run NodeJS in debug mode (default false) [debug]', 'false') .option('-z, --showdebug [showdebug]', 'Allow to show debug info (default false) [showdebug]', 'false') .option('-k, --showprogress [showprogress]', 'Allow to show progress when starting Pilot (default false) [showprogress]', 'false') .option('-j, --showstats [showstats]', 'Allow to show stats debug info (default false) [showstats]', 'false') .option('-i, --statsinterval [statsinterval]', 'Allow to show debug info every x seconds (default 10 seconds) [statsinterval]', 10) .option('-a, --virtualprocessors [virtualprocessors]', 'How many virtual processors to run (default 6 processors) [virtualprocessors]', 6) .option('-m, --maxprocessesretry [maxprocessesretry]', 'Number of processes to retry when all cores are busy (default 10 processes) [maxprocessesretry]', 10) .option('-n, --maxJobProcessDurationMs [maxJobProcessDurationMs]', 'Maximum time to wait for a job to complete (default 10000 ms) [maxJobProcessDurationMs]', 10000) .option('-s, --hostport [hostport]', 'Server port of the central host (default 80) [hostport]', parseInt) .option('-x, --deleteonexit [deleteonexit]', 'Delete database files on exit (default true) [deleteonexit]', 'true') .option('-y, --deleteonstartup [deleteonstartup]', 'Delete database files on startup (default false) [deleteonstartup]', 'false') .option('-a, --runapp [runapp]', 'Run the app with ID as the homepage (default not set) [runapp]', null) .option('-u, --loadjsurl [loadjsurl]', 'Load the following JS from a URL (default not set) [loadjsurl]', null) .option('-f, --loadjsfile [loadjsfile]', 'Load the following JS from a file (default not set) [loadjsfile]', null) .option('-z, --loadjscode [loadjscode]', 'Load the following JS from the command line (default not set) [loadjscode]', null) .option('-b, --runhtml [runhtml]', 'Run using a local HTML page as the homepage (default not set) [runhtml]', null) .option('-q, --https [https]', 'Run using a HTTPS (default is http) [https]', 'false') .option('-v, --private [private]', 'Private HTTPS key [private]', null) .option('-c, --public [public]', 'Public HTTPS certificate [public]', null) .option('-e, --cacert1 [cacert1]', 'Public HTTPS CA certificate 1 [cacert1]', null) .option('-f, --cacert2 [cacert2]', 'Public HTTPS CA certificate 2 [cacert2]', null) .option('-g, --cacert3 [cacert3]', 'Public HTTPS CA certificate 3 [cacert3]', null) .option('-u, --usehost [usehost]', 'Use host name [usehost]', null) .parse(process.argv); } else { program.type = 'client' program.host = 'yazz.com' program.locked = 'true' program.debug = 'false' program.deleteonexit = 'true' program.deleteonstartup = 'false' program.runapp = null program.loadjsurl = null program.loadjsfile = null program.runhtml = null program.https = 'false' program.usehost = null } var semver = require('semver') var showProgress = false if (program.showprogress == 'true') { showProgress = true; } var showDebug = false function outputDebug(text) { if (showDebug) { console.log(text); } else { if (showProgress) { process.stdout.write("."); } } }; if (program.showdebug == 'true') { showDebug = true; } outputDebug(" showDebug: " + showDebug); var showStats = false if (program.showstats == 'true') { showStats = true; } outputDebug(" showStats: " + showStats ); var statsInterval = -1 if (program.statsinterval > 0) { statsInterval = program.statsinterval; } outputDebug(" statsInterval: " + statsInterval ); if (program.virtualprocessors > 0) { executionProcessCount = program.virtualprocessors; } outputDebug(" executionProcessCount: " + executionProcessCount ); var maxProcessesCountToRetry = 10 if (program.maxprocessesretry > 0) { maxProcessesCountToRetry = program.maxprocessesretry; } outputDebug(" maxProcessesCountToRetry: " + maxProcessesCountToRetry ); var maxJobProcessDurationMs = 10000 if (program.maxJobProcessDurationMs > 0) { maxJobProcessDurationMs = program.maxJobProcessDurationMs; } outputDebug(" maxJobProcessDurationMs: " + maxJobProcessDurationMs ); var listOfEnvs = process.env var envNames = Object.keys(listOfEnvs) for (var i=0 ;i< envNames.length; i++){ let envName = envNames[i].replace(/[^a-zA-Z0-9]/g,'_'); outputDebug("Env var " + envName + ": " + listOfEnvs[envName]) envVars[envName] = listOfEnvs[envName] } if (isValidObject(envVars.virtualprocessors)) { executionProcessCount = envVars.virtualprocessors } function isValidObject(variable){ if ((typeof variable !== 'undefined') && (variable != null)) { return true } return false } outputDebug("process.env.OPENSHIFT_NODEJS_IP:= " + process.env.OPENSHIFT_NODEJS_IP) if (process.env.OPENSHIFT_NODEJS_IP) { username = "node" } else { username = "node" //if (isValidObject(os) && isValidObject(os.userInfo()) && isValidObject(os.userInfo().username)) { // username = os.userInfo().username.toLowerCase(); //} } var LOCAL_HOME = process.env.HOME outputDebug('LOCAL_HOME:' + LOCAL_HOME); // // We set the HOME environment variable if we are running in OpenShift // outputDebug('DOCKER CHECK...'); if (isDocker()) { outputDebug('Running inside a Linux container'); } else { outputDebug('NOT running inside a Linux container'); } if (!isValidObject(LOCAL_HOME) || (LOCAL_HOME == "/")) { LOCAL_HOME = "/home/node" } outputDebug('Starting services'); var debug = false; outputDebug("NodeJS version: " + process.versions.node); if (semver.gt(process.versions.node, '6.9.0')) { outputDebug("NodeJS version > 6.9 " ); } if (program.debug == 'true') { debug = true; outputDebug(" debug: true" ); } else { outputDebug(" debug: false" ); }; var deleteOnExit = (program.deleteonexit == 'true'); outputDebug("deleteOnExit: " + deleteOnExit) var deleteOnStartup = (program.deleteonstartup == 'true'); outputDebug("deleteOnStartup: " + deleteOnStartup) locked = (program.locked == 'true'); useHttps = (program.https == 'true'); if (useHttps) { serverProtocol = "https" } privateKey = program.private; publicCertificate = program.public; caCertificate1 = program.cacert1; caCertificate2 = program.cacert2; caCertificate3 = program.cacert3; var useHost = program.usehost; if (useHost) { hostaddress = useHost outputDebug("USE Host: " + useHost) } port = program.port; var runapp = program.runapp; var runhtml = program.runhtml; var loadjsurl = program.loadjsurl; var loadjsfile = program.loadjsfile; var loadjscode = program.loadjscode; if (!isNumber(port)) { port = 80; if (useHttps) { port = 443; } }; outputDebug('Yazz node local hostname: ' + ip.address() + ' ') setupVisifileParams(); function setUpChildListeners(processName, fileName, debugPort) { forkedProcesses[processName].on('close', function() { if (!shuttingDown) { console.log("Child process " + processName + " exited.. restarting... ") var stmtInsertProcessError = dbsearch.prepare( ` insert into system_process_errors ( id, timestamp, process, status, base_component_id, event, system_code_id, args, error_message ) values ( ?, ?, ?, ?, ?, ?, ?, ?, ? );`) dbsearch.serialize(function() { dbsearch.run("begin exclusive transaction"); var newId = uuidv1() stmtInsertProcessError.run( newId, new Date().getTime(), processName, "KILLED", null, null, null, null, null ) dbsearch.run("commit"); stmtInsertProcessError.finalize(); }) setupForkedProcess(processName, fileName, debugPort) } }); forkedProcesses[processName].on('message', (msg) => { //console.log("message from child: " + JSON.stringify(msg,null,2)) //console.log("message type from child: " + JSON.stringify(msg.message_type,null,2)) if (msg.message_type == "return_test_fork") { //console.log('Message from child', msg); sendOverWebSockets({ type: "test_fork", value: "Counter: " + msg.counter + ", count data_states from sqlite: " + msg.sqlite }); } else if (msg.message_type == "save_code") { forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: msg.base_component_id, parent_hash: msg.parent_hash, code: msg.code, options: msg.options }); } else if (msg.message_type == "add_rest_api") { outputDebug("add_rest_api called") var newFunction = async function (req, res) { var params = req.query; var url = req.originalUrl; var body = req.body; var promise = new Promise(async function(returnFn) { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[ seqNum ] = function(value) { returnFn(value) } outputDebug(" msg.base_component_id: " + msg.base_component_id); outputDebug(" seqNum: " + seqNum); forkedProcesses["forked"].send({ message_type: "callDriverMethod", find_component: { method_name: msg.base_component_id, driver_name: msg.base_component_id } , args: { params: params, body: body, url: url } , seq_num_parent: null, seq_num_browser: null, seq_num_local: seqNum, }); }) var ret = await promise if (ret.value) { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( ret.value )); } else if (ret.error) { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( {error: ret.error} )); } else { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( {error: "Unknown problem occurred"} )); } } // end of function def for newFunction if (!isValidObject(restRoutes[msg.route])) { if (msg.rest_method == "POST") { app.post( '/' + msg.route + '/*' , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) app.post( '/' + msg.route , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) } else { app.get( '/' + msg.route + '/*' , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) app.get( '/' + msg.route , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) } } restRoutes[msg.route] = newFunction } else if (msg.message_type == "createdTablesInChild") { forkedProcesses["forked"].send({ message_type: "setUpSql" }); forkedProcesses["forked"].send({ message_type: "greeting" , hello: 'world' }); outputDebug("mainNodeProcessStarted: " + mainNodeProcessStarted) if (!mainNodeProcessStarted) { mainNodeProcessStarted = true getPort() } } else if (msg.message_type == "drivers_loaded_by_child") { //-------------------------------------------------------- // open the app in a web browser //-------------------------------------------------------- checkForJSLoaded(); if (typeOfSystem == 'client') { var localClientUrl = serverProtocol + '://' + hostaddress + ":" + port; var remoteServerUrl = 'http://' + centralHostAddress + ":" + centralHostPort + "/visifile/list_intranet_servers.html?time=" + new Date().getTime(); request({ uri: remoteServerUrl, method: "GET", timeout: 10000, agent: false, followRedirect: true, maxRedirects: 10 }, function(error, response, body) { if (error) { //console.log("Error opening central server: " + error); if (!alreadyOpen) { alreadyOpen = true; } } else { if (!alreadyOpen) { alreadyOpen = true; //open(remoteServerUrl); } } }); } else if (typeOfSystem == 'server') { if (!alreadyOpen) { alreadyOpen = true; //open('http://' + hostaddress + ":" + port + "/visifile/list_intranet_servers.html?time=" + new Date().getTime()); } } console.log(` YYYYYYY YYYYYYY Y:::::Y Y:::::Y Y:::::Y Y:::::Y Y::::::Y Y::::::Y YYY:::::Y Y:::::YYY aaaaaaaaaaaaa zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz Y:::::Y Y:::::Y a::::::::::::a z:::::::::::::::zz:::::::::::::::z Y:::::Y:::::Y aaaaaaaaa:::::a z::::::::::::::z z::::::::::::::z Y:::::::::Y a::::a zzzzzzzz::::::z zzzzzzzz::::::z Y:::::::Y aaaaaaa:::::a z::::::z z::::::z Y:::::Y aa::::::::::::a z::::::z z::::::z Y:::::Y a::::aaaa::::::a z::::::z z::::::z Y:::::Y a::::a a:::::a z::::::z z::::::z Y:::::Y a::::a a:::::a z::::::zzzzzzzz z::::::zzzzzzzz YYYY:::::YYYY a:::::aaaa::::::a z::::::::::::::z z::::::::::::::z Y:::::::::::Y a::::::::::aa:::az:::::::::::::::zz:::::::::::::::z YYYYYYYYYYYYY aaaaaaaaaa aaaazzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz PPPPPPPPPPPPPPPPP iiii lllllll tttt P::::::::::::::::P i::::i l:::::l ttt:::t P::::::PPPPPP:::::P iiii l:::::l t:::::t PP:::::P P:::::P l:::::l t:::::t P::::P P:::::Piiiiiii l::::l ooooooooooo ttttttt:::::ttttttt P::::P P:::::Pi:::::i l::::l oo:::::::::::oo t:::::::::::::::::t P::::PPPPPP:::::P i::::i l::::l o:::::::::::::::ot:::::::::::::::::t P:::::::::::::PP i::::i l::::l o:::::ooooo:::::otttttt:::::::tttttt P::::PPPPPPPPP i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t tttttt PP::::::PP i::::::il::::::lo:::::ooooo:::::o t::::::tttt:::::t P::::::::P i::::::il::::::lo:::::::::::::::o tt::::::::::::::t P::::::::P i::::::il::::::l oo:::::::::::oo tt:::::::::::tt PPPPPPPPPP iiiiiiiillllllll ooooooooooo ttttttttttt `) console.log("\nRunning " + executionProcessCount + " virtual processors"); console.log("\nYazz Pilot started on:"); console.log("Network Host Address: " + hostaddressintranet) console.log("Local Machine Address: " + serverProtocol + "://" + hostaddress + ':' + port); systemReady = true } else if (msg.message_type == "ipc_child_returning_uploaded_app_as_file_in_child_response") { console.log("uploaded_app_as_file_in_child: " + JSON.stringify(msg)) // ______ // Server --1 data item--> Browser // ______ // sendOverWebSockets({ type: "uploaded_app_as_file_from_server", code_id: msg.code_id, base_component_id: msg.base_component_id, client_file_upload_id: msg.client_file_upload_id }); } else if (msg.message_type == "database_setup_in_child") { if (msg.child_process_name == "forkedExeScheduler") { forkedProcesses["forkedExeScheduler"].send({ message_type: "setUpSql" }); } if (msg.child_process_name.startsWith("forkedExeProcess")) { forkedProcesses[msg.child_process_name].send({ message_type: "setUpSql" }); forkedProcesses["forkedExeScheduler"].send({ message_type: "startNode", node_id: msg.child_process_name, child_process_id: forkedProcesses[msg.child_process_name].pid, started: new Date() }); } } else if (msg.message_type == "getResultReturned") { var newres = queuedResponses[ msg.seqNum ] newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end(JSON.stringify(msg.result)); newres = null; } else if (msg.message_type == "return_add_local_driver_results_msg") { //console.log("6 - return_get_search_results: " + msg.returned); var rett = eval("(" + msg.success + ")"); var newCallbackFn = queuedResponses[ msg.seq_num_local ] if (msg.result ) { newCallbackFn(msg.result) } else { newCallbackFn({ error: msg.error }) } newres = null; } else if (msg.message_type == "processor_free") { forkedProcesses["forkedExeScheduler"].send({ message_type: "processor_free", child_process_name: msg.child_process_name }); } else if (msg.message_type == "execute_code_in_exe_child_process") { //console.log("6 - return_get_all_table: " ); forkedProcesses[msg.child_process_name].send({ message_type: "execute_code", code: msg.code, callback_index: msg.callback_index, code_id: msg.code_id, args: msg.args, call_id: msg.call_id, on_condition: msg.on_condition, base_component_id: msg.base_component_id }); } else if (msg.message_type == "function_call_request") { //console.log("6 - return_get_all_table: " ); forkedProcesses["forkedExeScheduler"].send({ message_type: "function_call_request", child_process_name: msg.child_process_name, find_component: msg.find_component, args: msg.args, callback_index: msg.callback_index, caller_call_id: msg.caller_call_id }); } else if (msg.message_type == "function_call_response") { //console.log("*** function_call_response: " + JSON.stringify(msg,null,2)) forkedProcesses["forkedExeScheduler"].send({ message_type: "function_call_response", child_process_name: msg.child_process_name, driver_name: msg.driver_name, method_name: msg.method_name, result: msg.result, callback_index: msg.callback_index, called_call_id: msg.called_call_id }); } else if (msg.message_type == "return_response_to_function_caller") { //console.log("*) Electron.js got response for " + msg.child_process_name); //console.log("*) "+ msg.result) if (msg.child_process_name) { forkedProcesses[msg.child_process_name].send({ message_type: "return_response_to_function_caller", callback_index: msg.callback_index, result: msg.result }); } } else if (msg.message_type == "return_get_all_table") { //console.log("6 - return_get_all_table: " ); var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end(msg.result); newres = null; } else if (msg.message_type == "returnIntranetServers") { var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'text/plain'}); if (msg.returned) { newres.end( JSON.stringify( { allServers: msg.returned, intranetPublicIp: msg.requestClientPublicIp}) ); } else { //console.log( "8: " + msg.error ); newres.end(JSON.stringify( { allServers: [], intranetPublicIp: msg.requestClientPublicIp}) ); } newres = null; } else if (msg.message_type == "returnIntranetServers_json") { var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'application/json'}); var result = { list: [], links: {"self": { "href": "/start" }}, } if (msg.returned) { result.links.servers = {} result.intranetPublicIp = msg.requestClientPublicIp result.error = false result.count = msg.returned.length if (msg.returned.length > 0) { result.main_user = msg.returned[0].client_user_name result.main = msg.returned[0].internal_host + ":" + msg.returned[0].internal_port result.main_url = serverProtocol + "://" + msg.returned[0].internal_host + ":" + msg.returned[0].internal_port + "/home" } for (var i =0 ; i< msg.returned.length; i ++) { var addr = msg.returned[i].internal_host + ":" + msg.returned[i].internal_port result.list.push( addr ) result.links.servers[addr] = {"href": serverProtocol + "://" + addr + "/home" , "user": msg.returned[i].client_user_name} } newres.end(JSON.stringify(result)); } else { newres.end(JSON.stringify( { allServers: [], error: true}) ); } newres = null; } else if (msg.message_type == "returnClientConnect") { //console.log("6: returnClientConnect") //console.log("6.1: " + msg) //console.log("7: " + msg.returned) var newres = queuedResponses[ msg.seq_num ] if (msg.returned) { newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end( JSON.stringify( JSON.stringify({ connected: msg.returned })) ); } newres = null; // ______ // Subprocess --1 data item--> Server // ______ // } else if (msg.message_type == "subprocess_returns_data_item_to_server") { //console.log("6: return_query_item") //console.log("6.1: " + msg) //console.log("7: " + msg.returned) var new_ws = queuedResponses[ msg.seq_num ] if (msg.returned) { // ______ // Server --1 data item--> Browser // ______ // sendToBrowserViaWebSocket( new_ws, { type: "client_data_item_received_from_server", data_item: msg.returned }); } } else if (msg.message_type == "ipc_child_returning_find_results") { // console.log(" .......3: " + msg.results); //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num ] sendToBrowserViaWebSocket( new_ws , { type: "ws_to_browser_find_results", results: msg.results }); //new_ws = null; } else if (msg.message_type == "ipc_child_returning_callDriverMethod_response") { //console.log(" .......3: " + JSON.stringify(msg,null,2)); //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num_parent ] if (msg.result) { if (msg.result.code) { var tr = msg.result.code msg.result.code = tr } } sendToBrowserViaWebSocket( new_ws , { type: "ws_to_browser_callDriverMethod_results", value: msg.result, seq_num: msg.seq_num_browser }); //new_ws = null; } else if (msg.message_type == "subprocess_alerts_data_done_to_server") { //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num ] sendToBrowserViaWebSocket( new_ws, { type: "server_alerts_data_done_to_browser" }); //new_ws = null; } // // }); } function setupForkedProcess( processName, fileName, debugPort ) { var debugArgs =[]; if (debug) { if (semver.gte(process.versions.node, '6.9.0')) { debugArgs = ['--inspect=' + debugPort]; } else { debugArgs = ['--debug=' + debugPort]; }; }; var forkedProcessPath if (isWin) { forkedProcessPath = path.join(__dirname, '..\\src\\' + fileName) } else { forkedProcessPath = path.join(__dirname, '../src/' + fileName) } forkedProcesses[ processName ] = fork.fork(forkedProcessPath, [], {execArgv: debugArgs}); setUpChildListeners(processName, fileName, debugPort); if (processName == "forked") { forkedProcesses["forked"].send({ message_type: "init" , user_data_path: userData, child_process_name: "forked", show_debug: showDebug, show_progress: showProgress }); forkedProcesses["forked"].send({ message_type: "createTables" }); } if (processName == "forkedExeScheduler") { forkedProcesses["forkedExeScheduler"].send({ message_type: "init" , user_data_path: userData, child_process_name: "forkedExeScheduler", max_processes_count_to_retry: maxProcessesCountToRetry, max_job_process_duration_ms: maxJobProcessDurationMs, show_debug: showDebug, show_progress: showProgress }); } for (var i=0;i<executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i if (processName == exeProcName) { forkedProcesses[exeProcName].send({ message_type: "init" , user_data_path: userData, child_process_name: exeProcName, show_debug: showDebug, show_progress: showProgress }); } } outputDebug("Started subprocess '" + processName + "' ") } function setupMainChildProcess() { setupForkedProcess("forked", "child.js", 40003) } function setupChildProcesses() { setupForkedProcess("forkedExeScheduler", "exeScheduler.js", 40004) for (var i=0;i<executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i setupForkedProcess(exeProcName, "exeProcess.js", 40100 + i) } } function sendOverWebSockets(data) { var ll = serverwebsockets.length; //console.log('send to sockets Count: ' + JSON.stringify(serverwebsockets.length)); for (var i =0 ; i < ll; i++ ) { var sock = serverwebsockets[i]; sock.emit(data.type,data); //console.log(' sock ' + i + ': ' + JSON.stringify(sock.readyState)); } } function isNumber(n) { return !isNaN(parseFloat(n)) && isFinite(n); } function setupVisifileParams() { typeOfSystem = program.type; centralHostAddress = program.host; centralHostPort = program.hostport; if (!isNumber(centralHostPort)) {centralHostPort = 80;}; if (!(typeOfSystem == 'client' || typeOfSystem == 'server')) { console.log('-------* Invalid system type: ' + typeOfSystem); process.exit(); }; outputDebug('-------* System type: ' + typeOfSystem); outputDebug('-------* Port: ' + port); outputDebug('-------* Central host: ' + centralHostAddress); outputDebug('-------* Central host port: ' + centralHostPort); outputDebug( ip.address() ); //console.log('addr: '+ ip.address()); //hostaddress = ip.address(); } outputDebug("process.platform = " + process.platform) if (process.platform === "win32") { var rl = require("readline").createInterface({ input: process.stdin, output: process.stdout }); rl.on("SIGINT", function () { shutDown(); process.exit(); }); } if (isWin) { outputDebug("Running as Windows") var localappdata = process.env.LOCALAPPDATA userData = path.join(localappdata, '/Yazz/') } else { outputDebug("Running as Linux/Mac") userData = path.join(LOCAL_HOME, 'Yazz') } dbPath = path.join(userData, username + '.visi') if (deleteOnStartup) { outputDebug("deleting dir :" + userData) if (userData.length > 6) { deleteYazzDataV2(userData) } } var uploadPath = path.join(userData, 'uploads/') outputDebug("LOCAL_HOME: " + LOCAL_HOME) outputDebug("userData: " + userData) outputDebug("uploadPath: " + uploadPath) upload = multer( { dest: uploadPath}); rmdir("uploads"); mkdirp.sync(path.join(userData, 'uploads')); mkdirp.sync(path.join(userData, 'files')); mkdirp.sync(path.join(userData, 'apps')); mkdirp.sync(path.join(userData, 'app_dbs')); outputDebug('process.env.LOCALAPPDATA: ' + JSON.stringify(localappdata ,null,2)) outputDebug("Local home data path: " + LOCAL_HOME) outputDebug("userData: " + JSON.stringify(userData ,null,2)) outputDebug("process.env keys: " + Object.keys(process.env)) dbsearch = new sqlite3.Database(dbPath); dbsearch.run("PRAGMA journal_mode=WAL;") var nodeConsole = require('console'); var myConsole = new nodeConsole.Console(process.stdout, process.stderr); //var index = require(path.resolve('src/index.js')) setupMainChildProcess(); var shuttingDown = false; process.on('exit', function() { shutDown(); }); process.on('quit', function() { shutDown(); }); process.on("SIGINT", function () { shutDown(); process.exit() }); function shutDown() { console.log(" shutDown() called") if (!shuttingDown) { shuttingDown = true; if (dbsearch) { outputDebug("Database closing...") dbsearch.run("PRAGMA wal_checkpoint;") dbsearch.close(function(err){ outputDebug("...database closed") visifile = null }) } if (forkedProcesses["forked"]) { outputDebug("Killed Process forked") forkedProcesses["forked"].kill(); } if (forkedProcesses["forkedExeScheduler"]) { outputDebug("Killed Exe Scheduler process") forkedProcesses["forkedExeScheduler"].kill(); } for (var i = 0; i < executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i forkedProcesses[exeProcName].kill(); outputDebug("Killed Process " + exeProcName) } if (visifile){ visifile.removeAllListeners('close'); //visifile.close(); if (visifile.globalShortcut) { //visifile.globalShortcut.unregisterAll(); } } outputDebug("deleteOnExit =" + deleteOnExit) if (deleteOnExit) { outputDebug("deleting dir :" + userData) if (userData.length > 6) { if (isWin) { deleteYazzDataWindows(userData) } else { deleteYazzData(userData) } } } } } function deleteYazzDataWindows(dddd) { console.log("deleteYazzDataWindows") if (dddd.length > 6) { var ff = 'timeout 8 && rd /s /q "' + dddd + '"' outputDebug(ff) fork.exec(ff , function(err, stdout, stderr) { if (err) { // node couldn't execute the command return; } }) } } function deleteYazzDataV2(dddd) { outputDebug("----------------------------------") outputDebug("Before delete :" + ls(dddd)) outputDebug("----------------------------------") rimraf.sync(path.join(dddd, 'uploads/')); rimraf.sync(path.join(dddd, 'files/')); rimraf.sync(path.join(dddd, 'apps/')); rimraf.sync(path.join(dddd, 'app_dbs/')); rimraf.sync(path.join(dddd, '*.visi')); rimraf.sync(path.join(dddd, '*.visi*')); outputDebug("----------------------------------") outputDebug("After delete :" + ls(dddd)) outputDebug("----------------------------------") } function deleteYazzData(dddd) { fork.exec('sleep 3 && cd "' + dddd + '" && rm -rf app_dbs apps uploads files *.visi*', function(err, stdout, stderr) { if (err) { // node couldn't execute the command return; } }) } var httpServer = null; function getPort () { outputDebug('** called getPort v2') if (useHttps) { var caCerts = readCerts() var certOptions = { key: fs.readFileSync(privateKey, 'utf8'), cert: fs.readFileSync(publicCertificate, 'utf8'), ca: caCerts } certOptions.requestCert = true certOptions.rejectUnauthorized = false httpServer = https.createServer(certOptions,app) } else { httpServer = http.createServer(app) } httpServer.listen(port, ip.address(), function (err) { outputDebug('trying port: ' + port + ' ') httpServer.once('close', function () { }) httpServer.close() httpServer = null; }) httpServer.on('error', function (err) { outputDebug('Couldnt connect on port ' + port + '...') if (port < portrange) { port = portrange }; outputDebug('... trying port ' + port) portrange += 1 getPort() }) httpServer.on('listening', function (err) { outputDebug('Can connect on ' + ip.address() + ':' + port + ' :) ') forkedProcesses["forked"].send({ message_type: "host_and_port" , child_process_name: "forked", ip: hostaddress, port: port }); startServices() setupChildProcesses(); }) } function checkForJSLoaded() { if (isValidObject(envVars.loadjsurl)) { loadjsurl = envVars.loadjsurl } // // load JS code from file // if (isValidObject(envVars.loadjsfile)) { loadjsfile = envVars.loadjsfile } //console.log("process.argv.length : " + process.argv.length ) //console.log("process.argv[2] : " + process.argv[2] ) if ((process.argv[2]) && (process.argv[2].endsWith(".js") || process.argv[2].endsWith(".pilot") )) { loadjsfile = process.argv[2] } else if ((process.argv[2]) && (!process.argv[2].startsWith("--"))) { loadjscode = process.argv[2] outputDebug("load code: " + loadjscode ) } if (isValidObject(envVars.loadjscode)) { loadjscode = envVars.loadjscode } if (isValidObject(loadjsurl)) { var jsUrl = loadjsurl https.get(jsUrl, (resp) => { var data = ''; // A chunk of data has been recieved. resp.on('data', (chunk) => { data += chunk; }); // The whole response has been received. Print out the result. resp.on('end', () => { //console.log("code:" + data); var baseComponentIdForUrl = saveHelper.getValueOfCodeString(data, "base_component_id") outputDebug("baseComponentIdForUrl:" + baseComponentIdForUrl); if (!isValidObject(baseComponentIdForUrl)) { baseComponentIdForUrl = loadjsurl.replace(/[^A-Z0-9]/ig, "_"); } var jsCode = data outputDebug("*********** Trying to load loadjsurl code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForUrl, parent_hash: null, code: data, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForUrl }); }).on("error", (err) => { outputDebug("Error: " + err.message); }); } else if (isValidObject(loadjsfile)) { var jsFile = loadjsfile var data2 = fs.readFileSync(jsFile).toString() var baseComponentIdForFile = saveHelper.getValueOfCodeString(data2, "base_component_id") if (!isValidObject(baseComponentIdForFile)) { baseComponentIdForFile = loadjsfile.replace(/[^A-Z0-9]/ig, "_"); } //console.log("code from file:" + data2); //console.log("*********** Trying to load loadjsfile code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForFile, parent_hash: null, code: data2, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForFile } else if (isValidObject(loadjscode)) { console.log("loadjscode ...") var data2 = loadjscode var baseComponentIdForCode = saveHelper.getValueOfCodeString(data2, "base_component_id") outputDebug("baseComponentIdForCode:" + baseComponentIdForCode); if (!isValidObject(baseComponentIdForCode)) { baseComponentIdForCode = "code_" + (("" + Math.random()).replace(/[^A-Z0-9]/ig, "_")); outputDebug("baseComponentIdForFile:" + baseComponentIdForCode); } //console.log("code:" + data2); outputDebug("*********** Trying to load loadjscode code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForCode, parent_hash: null, code: data2, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForCode } } function mkdirSync(dirPath) { try { mkdirp.sync(dirPath) } catch (err) { //if (err.code !== 'EEXIST') throw err } } function outputToConsole(text) { var c = console; c.log(text); } function copyFileSync( source, target ) { var targetFile = target; //if target is a directory a new file with the same name will be created if ( fs.existsSync( target ) ) { if ( fs.lstatSync( target ).isDirectory() ) { targetFile = path.join( target, path.basename( source ) ); } } fs.writeFileSync(targetFile, fs.readFileSync(source)); } function copyFolderRecursiveSync( source, target ) { //console.log('çopy from: '+ source + ' to ' + target); var files = []; //check if folder needs to be created or integrated var targetFolder = path.join( target, path.basename( source ) ); if ( !fs.existsSync( targetFolder ) ) { fs.mkdirSync( targetFolder ); } //copy if ( fs.lstatSync( source ).isDirectory() ) { files = fs.readdirSync( source ); files.forEach( function ( file ) { var curSource = path.join( source, file ); if ( fs.lstatSync( curSource ).isDirectory() ) { copyFolderRecursiveSync( curSource, targetFolder ); } else { copyFileSync( curSource, targetFolder ); //console.log('copying: ' + targetFolder); } } ); } } // ============================================================ // This sends a message to a specific websocket // ============================================================ function sendToBrowserViaWebSocket(aws, msg) { aws.emit(msg.type,msg); } function isLocalMachine(req) { if ((req.ip == '127.0.0.1') || (hostaddress == req.ip) || (hostaddress == "0.0.0.0")) { // this is the correct line to use //if (req.ip == '127.0.0.1') { // this is used for debugging only so that we can deny access from the local machine return true; }; return false; } //------------------------------------------------------------------------------ // test if allowed //------------------------------------------------------------------------------ function canAccess(req,res) { if (!locked) { return true; }; if (isLocalMachine(req) ) { return true; }; res.writeHead(200, {'Content-Type': 'text/plain'}); res.end("Sorry but access to " + username + "'s data is not allowed. Please ask " + username + " to unlocked their Yazz account"); return false; }; function extractHostname(url) { var hostname; //find & remove protocol (http, ftp, etc.) and get hostname if (url.indexOf("://") > -1) { hostname = url.split('/')[2]; } else { hostname = url.split('/')[0]; } //find & remove port number hostname = hostname.split(':')[0]; //find & remove "?" hostname = hostname.split('?')[0]; return hostname; } function extractRootDomain(url) { var domain = extractHostname(url), splitArr = domain.split('.'), arrLen = splitArr.length; //extracting the root domain here if (arrLen > 2) { domain = splitArr[arrLen - 2] + '.' + splitArr[arrLen - 1]; } return domain; } function findViafromString(inp) { if (inp == null) { return ""; } var ll = inp.split(' '); for (var i=0; i< ll.length ; i++){ if (ll[i] != null) { if (ll[i].indexOf(":") != -1) { return extractRootDomain(ll[i]); } } } return ""; } function aliveCheckFn() { var urlToConnectTo = "http://" + centralHostAddress + ":" + centralHostPort + '/client_connect'; //console.log('-------* urlToConnectTo: ' + urlToConnectTo); //console.log('trying to connect to central server...'); request({ uri: urlToConnectTo, method: "GET", timeout: 10000, agent: false, followRedirect: true, maxRedirects: 10, qs: { requestClientInternalHostAddress: hostaddress , requestClientInternalPort: port , clientUsername: username } }, function(error, response, body) { //console.log('Error: ' + error); if (response) { if (response.statusCode == '403') { //console.log('403 received, not allowed through firewall for ' + urlToConnectTo); //open("http://" + centralHostAddress + ":" + centralHostPort); } else { ////console.log('response: ' + JSON.stringify(response)); ////console.log(body); } } }); }; function runOnPageExists(req, res, homepage) { if (fs.existsSync(homepage)) { if (typeOfSystem == 'client') { if (!canAccess(req,res)) { return; } res.end(fs.readFileSync(homepage)); } } else { setTimeout(function() { runOnPageExists(req, res, homepage) },3000) } } function getRoot(req, res, next) { hostcount++; //console.log("Host: " + req.headers.host + ", " + hostcount); //console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl); var homepage = path.join(__dirname, '../public/go.html') var homepageUrl = serverProtocol + '://yazz.com/visifile/index.html?time=' + new Date().getTime() if (req.headers.host) { if (req.query.goto) { outputDebug("*** FOUND goto") res.end(fs.readFileSync(homepage)); return } if (req.query.embed) { outputDebug("*** FOUND embed") res.end(fs.readFileSync(homepage)); return } if (req.headers.host.toLowerCase().endsWith('yazz.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('dannea.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('canlabs.com')) { res.writeHead(301, {Location: 'http://canlabs.com/canlabs/index.html'} ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('gosharedata.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('visifile.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('visifiles.com')) { res.writeHead(301, {Location: homepageUrl} ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('appshare.co')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; }; if (isValidObject(envVars.YAZZ_RUN_APP)) { runapp = envVars.YAZZ_RUN_APP } if (runhtml && (!req.query.goto) && (!req.query.embed)) { homepage = runhtml runOnPageExists(req,res,homepage) return } else if (runapp && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjsurl && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjsfile && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjscode && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else { homepage = path.join( userData, 'apps/homepage.html' ) runOnPageExists(req,res,homepage) return } outputDebug("Serving: " + homepage) } function getEditApp(req, res) { hostcount++; // I dont know why sockets.io calls .map files here if (req.path.endsWith(".map")) { return } var parts = req.path.split('/'); var lastSegment = parts.pop() || parts.pop(); outputDebug("URL PATH: " + lastSegment); //console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl); // // send the edit page // var homepage = path.join(__dirname, '../public/go.html') var baseComponentId = lastSegment var newStaticFileContent = fs.readFileSync(homepage) newStaticFileContent = newStaticFileContent.toString().replace("var editAppShareApp = null", "var editAppShareApp = '" + baseComponentId + "'") res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end(newStaticFileContent); } function testFirewall(req, res) { var tracking_id = url.parse(req.url, true).query.tracking_id; var server = url.parse(req.url, true).query.server; //console.log(JSON.stringify(tracking_id,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({ got_through_firewall: tracking_id , server: server, username: username, locked: locked })); }; function websocketFn(ws) { serverwebsockets.push(ws); sendToBrowserViaWebSocket(ws, {type: "socket_connected"}); sendOverWebSockets({ type: "env_vars", value: envVars }); //console.log('Socket connected : ' + serverwebsockets.length); sendOverWebSockets({ type: "network_ip_address_intranet", value: hostaddressintranet }); ws.on('message', function(msg) { var receivedMessage = eval("(" + msg + ")"); //console.log(" 1- Server recieved message: " + JSON.stringify(receivedMessage)); // if we get the message "server_get_all_queries" from the web browser if (receivedMessage.message_type == "server_get_all_queries") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; //console.log(" 2 "); forkedProcesses["forked"].send({ message_type: "get_all_queries", seq_num: seqNum }); } else if (receivedMessage.message_type == "loadUiComponent") { //console.log("***** } else if (msg.message_type == loadUiComponent) ") var componentIds = receivedMessage.find_components.base_component_ids dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT * FROM system_code WHERE base_component_id in " + "(" + componentIds.map(function(){ return "?" }).join(",") + " )" + " and code_tag = 'LATEST' ", componentIds , function(err, results) { if (results) { if (results.length > 0) { var codeId = results[0].id dbsearch.all( "SELECT dependency_name FROM app_dependencies where code_id = ?; ", codeId, function(err, results2) { results[0].libs = results2 sendToBrowserViaWebSocket( ws, { type: "server_returns_loadUiComponent_to_browser", seq_num: receivedMessage.seq_num, record: JSON.stringify(results,null,2), args: JSON.stringify(receivedMessage.args,null,2), test: 1 }); }) } } }) }, sqlite3.OPEN_READONLY) // ______ // Browser --Send me your data--> Server // ______ // } else if (receivedMessage.message_type == "edit_static_app") { outputDebug("*** server got message from static app: edit_static_app") var sql_data = receivedMessage.sql_data var code_fn = receivedMessage.code_fn forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: receivedMessage.base_component_id, parent_hash: null, code: code_fn, client_file_upload_id: -1, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sql_data }); sendToBrowserViaWebSocket( ws, { type: "edit_static_app_url" , url: receivedMessage.host_editor_address + "/edit/" + receivedMessage.base_component_id , size_of_db: "" + (sql_data?sql_data.length:0) , code_fn: "" + (code_fn?code_fn.length:0) }); // ______ // Browser --Send me your data--> Server // ______ // } else if (receivedMessage.message_type == "browser_asks_server_for_data") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; // ______ // Server --Send me your data--> Subprocess // ______ // forkedProcesses["forked"].send({ message_type: "server_asks_subprocess_for_data", seq_num: seqNum }); } else if (receivedMessage.message_type == "browser_asks_server_for_data") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; // ______ // Server --Send me your data--> Subprocess // ______ // forkedProcesses["forked"].send({ message_type: "server_asks_subprocess_for_data", seq_num: seqNum }); } else if (receivedMessage.message_type == "browser_asks_server_for_apps") { // outputDebug("******************* browser_asks_server_for_apps *******************") findLatestVersionOfApps( function(results) { // outputDebug(JSON.stringify(results,null,2)) sendToBrowserViaWebSocket( ws, { type: "vf_app_names", results: results }); }) // -------------------------------------------------------------------- // // callDriverMethod // // "callDriverMethod" is used to call server side apps/code. // // // // -------------------------------------------------------------------- } else if (receivedMessage.message_type == "callDriverMethod") { // Use an integer counter to identify whoever was // calling the server function (in this case a web browser with // a web socket). We need to do this as there may be several // web browsers connected to this one server var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[ seqNum ] = ws; //console.log(" .......1 Electron callDriverMethod: " + JSON.stringify(receivedMessage,null,2)); forkedProcesses["forked"].send({ message_type: "callDriverMethod", find_component: receivedMessage.find_component, args: receivedMessage.args, seq_num_parent: seqNum, seq_num_browser: receivedMessage.seqNum }); } });}; function file_uploadSingleFn(req, res) { //console.log('----- file_uploadSingle --------------'); //console.log(req.file); //console.log("**FILE** " + JSON.stringify(Object.keys(req))); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log(JSON.stringify(req.files.length)); //console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2)) var client_file_upload_id = req.body.client_file_upload_id //console.log("**client_file_upload_id** " + JSON.stringify(client_file_upload_id)); //console.log( " next: " + JSON.stringify(next)); res.status( 200 ).send( req.file ); //console.log('Loading saved Creator app' ); var ifile = req.file //console.log(" " + JSON.stringify(ifile)); var ext = ifile.originalname.split('.').pop(); ext = ext.toLowerCase(); //console.log('Ext: ' + ext); if ((ext == "html") || (ext == "html")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() //console.log(''); //console.log('Local saved path: ' + localp); var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") //console.log(`indexStart: ${indexStart}`) //console.log(`indexEnd: ${indexEnd}`) if ((indexStart > 0) && (indexEnd > 0)) { indexStart += 13 + 10 indexEnd -= 2 var tts = readIn.substring(indexStart,indexEnd) //console.log(tts) var ytr = unescape(tts) outputDebug("SENDING FROM UPLOAD___=+++****") var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") var indexOfSqliteData = readIn.indexOf("var sqlitedata = '") var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata") var sqlitedatafromupload = null if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) { sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18, indexOfSqliteDataEnd) } forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: ytr, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sqlitedatafromupload }); } } else if ((ext == "js") || (ext == "yazz") || (ext == "pilot")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id") forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: readIn, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: false}, sqlite_data: "" }); } else { outputDebug('Ignoring file '); } }; function file_uploadFn(req, res, next) { //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log(JSON.stringify(req.files.length)); //console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2)) var client_file_upload_id = req.body.client_file_upload_id //console.log("**FILES** " + JSON.stringify(req.files)); //console.log( " next: " + JSON.stringify(next)); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); res.status( 200 ).send( req.files ); var ll = req.files.length; for (var i = 0; i < ll ; i ++) { //console.log('Loading saved Creator app' ); var ifile = req.files[i]; //console.log(" " + JSON.stringify(ifile)); var ext = ifile.originalname.split('.').pop(); ext = ext.toLowerCase(); //console.log('Ext: ' + ext); if ((ext == "html") || (ext == "html")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() //console.log(''); //console.log('Local saved path: ' + localp); var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") //console.log(`indexStart: ${indexStart}`) //console.log(`indexEnd: ${indexEnd}`) if ((indexStart > 0) && (indexEnd > 0)) { indexStart += 13 + 10 indexEnd -= 2 var tts = readIn.substring(indexStart,indexEnd) //console.log(tts) var ytr = unescape(tts) outputDebug("SENDINF FROM UPLAOD___=+++****") var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") var indexOfSqliteData = readIn.indexOf("var sqlitedata = '") var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata") var sqlitedatafromupload = null if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) { sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18, indexOfSqliteDataEnd) } forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: ytr, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sqlitedatafromupload }); } } else if ((ext == "js") || (ext == "yazz") || (ext == "pilot")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id") forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: readIn, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: false}, sqlite_data: "" }); } else { outputDebug('Ignoring file '); } } }; function code_uploadFn(req, res) { forkedProcesses["forked"].send({ message_type: "save_code_from_upload", parent_hash: null, code: "function(args) { /* rest_api('test3') */ return {ab: 163}}", options: {save_html: true}, sqlite_data: "" }); }; function send_client_detailsFn(req, res) { ////console.log('in send_client_details: ' + JSON.stringify(req,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({ returned: 'some data ', server: hostaddress, port: port, username: username, locked: locked, localIp: req.ip, isLocalMachine: isLocalMachine(req) })); } function lockFn(req, res) { if ((req.query.locked == "TRUE") || (req.query.locked == "true")) { locked = true; } else { locked = false; } ////console.log('in lock: ' + JSON.stringify(req,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({locked: locked})); } //------------------------------------------------------------------------------ // This is called by the central server to get the details of the last // client that connected tp the central server //------------------------------------------------------------------------------ function get_connectFn(req, res) { res.writeHead(200, {'Content-Type': 'text/plain'}); res.end( JSON.stringify( { requestClientInternalHostAddress: requestClientInternalHostAddress , requestClientInternalPort: requestClientInternalPort , requestClientPublicIp: requestClientPublicIp , requestClientPublicHostName: requestClientPublicHostName , version: 31 } )); } function add_new_connectionFn(req, res) { var params = req.body; forkedProcesses["forked"].send({ message_type: "addNewConnection" , params: params}); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({done: "ok"}))}; function add_new_queryFn(req, res) { var params = req.body; forkedProcesses["forked"].send({ message_type: "addNewQuery" , params: params}); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({done: "ok"}))}; function keycloakProtector(params) { return function(req,res,next) { next() return var appName2=null if (params.compIdFromReqFn) { appName2 = params.compIdFromReqFn(req) } dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT code FROM system_code where base_component_id = ? and code_tag = ?; ", appName2, "LATEST", function(err, results) { if (results.length == 0) { outputDebug("Could not find component : " + appName2) } else { outputDebug("Found code for : " + appName2) var fileC = results[0].code.toString() //console.log("Code : " + fileC) var sscode = saveHelper.getValueOfCodeString(fileC,"keycloak",")//keycloak") //console.log("sscode:" + sscode) if (sscode) { //var ssval = eval( "(" + sscode + ")") //console.log("keycloak: " + JSON.stringify(sscode,null,2)) keycloak.protect()(req, res, next) } else { next() } } }) }, sqlite3.OPEN_READONLY) } } //------------------------------------------------------------ // This starts all the system services //------------------------------------------------------------ function startServices() { if (useHttps) { var app2 = express() var newhttp = http.createServer(app2); app2.use(compression()) app2.get('/', function (req, res, next) { return getRoot(req, res, next); }) app2.get('*', function(req, res) { if (req.headers.host.toLowerCase().endsWith('canlabs.com')) { outputDebug("path: " + req.path) var rty = req.path if (req.path == "/canlabs") { rty = "/canlabs/index.html" } var fileNameRead = path.join(__dirname, '../public' + rty) res.end(fs.readFileSync(fileNameRead)); } else if ( req.path.indexOf(".well-known") != -1 ) { var fileNameRead = path.join(__dirname, '../public' + req.path) res.end(fs.readFileSync(fileNameRead)); } else { outputDebug("Redirect HTTP to HTTPS") res.redirect('https://' + req.headers.host + req.url); } }) newhttp.listen(80); } app.use(compression()) app.use(cors({ origin: '*' })); app.use(function (req, res, next) { // Website you wish to allow to connect res.header('Access-Control-Allow-Origin', '*'); // Request methods you wish to allow res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE'); // Request headers you wish to allow res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type'); // Set to true if you need the website to include cookies in the requests sent // to the API (e.g. in case you use sessions) res.setHeader('Access-Control-Allow-Credentials', false); // Pass to next layer of middleware next(); }); //------------------------------------------------------------------------------ // Show the default page for the different domains //------------------------------------------------------------------------------ app.get('/', function (req, res, next) { return getRoot(req, res, next); }) app.get('/live-check',(req,res)=> { outputDebug("Live check passed") res.send ("Live check passed"); }); app.get('/readiness-check',(req,res)=> { if (systemReady) { outputDebug("Readiness check passed") res.send ("Readiness check passed"); } else { outputDebug("Readiness check failed") res.status(500).send('Readiness check did not pass'); } }); //------------------------------------------------------------------------------ // Allow an app to be edited //------------------------------------------------------------------------------ app.get('/edit/*', function (req, res) { return getEditApp(req, res); }) app.use("/files", express.static(path.join(userData, '/files/'))); function getAppNameFromHtml() { } function getBaseComponentIdFromRequest(req){ var parts = req.path.split('/'); var appHtmlFile = parts.pop() || parts.pop(); var appName = appHtmlFile.split('.').slice(0, -1).join('.') return appName } //app.get('/app/*', keycloakProtector({compIdFromReqFn: getBaseComponentIdFromRequest}), function (req, res, next) { app.get('/app/*', function (req, res, next) { if (req.kauth) { outputDebug('Keycloak details from server:') outputDebug(req.kauth.grant) } var parts = req.path.split('/'); var appHtmlFile = parts.pop() || parts.pop(); //console.log("appHtemlFile: " + appHtmlFile); var appName = appHtmlFile.split('.').slice(0, -1).join('.') //console.log("appName: " + appName); //console.log("path: " + path); var appFilePath = path.join(userData, 'apps/' + appHtmlFile) var fileC2 = fs.readFileSync(appFilePath, 'utf8').toString() res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end(fileC2); }) //app.use("/app_dbs", express.static(path.join(userData, '/app_dbs/'))); app.use("/public/aframe_fonts", express.static(path.join(__dirname, '../public/aframe_fonts'))); app.use( express.static(path.join(__dirname, '../public/'))) app.use(bodyParser.json()); // support json encoded bodies app.use(bodyParser.urlencoded({ extended: true })); // support encoded bodies //------------------------------------------------------------------------------ // test_firewall //------------------------------------------------------------------------------ app.get('/test_firewall', function (req, res) { return testFirewall(req,res); }); //------------------------------------------------------------------------------ // get_intranet_servers //------------------------------------------------------------------------------ app.get('/get_intranet_servers', function (req, res) { //console.log("1 - get_intranet_servers: " + req.ip) //console.log("1.1 - get_intranet_servers: " + Object.keys(req.headers)) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2") forkedProcesses["forked"].send({ message_type: "get_intranet_servers", seq_num: seqNum, requestClientPublicIp: req.ip , numberOfSecondsAliveCheck: numberOfSecondsAliveCheck, requestVia: findViafromString(req.headers.via) }); }); app.post('/file_upload_single', upload.single( 'uploadfilefromhomepage' ), function (req, res, next) { return file_uploadSingleFn(req, res, next); }); app.post('/file_upload', upload.array( 'file' ), function (req, res, next) { return file_uploadFn(req, res, next); }); app.get('/code_upload', function (req, res, next) { code_uploadFn(req, res); //zzz res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end("Done"); }); app.get('/send_client_details', function (req, res) { return send_client_detailsFn(req, res); }) app.get('/lock', function (req, res) { return lockFn(req, res); }) process.on('uncaughtException', function (err) { outputDebug(err); }) //------------------------------------------------------------------------------ // This is called by the central server to get the details of the last // client that connected tp the central server //------------------------------------------------------------------------------ app.get('/get_connect', function (req, res) { return get_connectFn(req, res); }) //app.enable('trust proxy') app.get('/get_all_table', function (req, res) { var tableName = url.parse(req.url, true).query.tableName; var fields = url.parse(req.url, true).query.fields; //console.log("1 - get_all_table ,tableName: " + tableName) //console.log(" get_all_table ,fields: " + fields) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2 - get_search_results") forkedProcesses["forked"].send({ message_type: "get_all_tables", seq_num: seqNum, table_name: tableName, fields: fields }); }); app.post('/add_new_connection', function (req, res) { return add_new_connectionFn(req, res) }); app.post('/add_new_query',function (req, res) { return add_new_queryFn(req, res) }); //------------------------------------------------------------------------------ // run on the central server only // // This is where the client sends its details to the central server //------------------------------------------------------------------------------ app.get('/client_connect', function (req, res) { //console.log("1 - client_connect: ") var queryData = url.parse(req.url, true).query; var requestClientInternalHostAddress = req.query.requestClientInternalHostAddress; //console.log(" requestClientInternalHostAddress: " + requestClientInternalHostAddress) var requestClientInternalPort = req.query.requestClientInternalPort; //console.log(" requestClientInternalPort: " + requestClientInternalPort) var requestVia = findViafromString(req.headers.via); //console.log(" requestVia: " + requestVia) var requestClientPublicIp = req.ip; //console.log(" requestClientPublicIp: " + requestClientPublicIp) var clientUsername = req.query.clientUsername; //console.log(" clientUsername: " + clientUsername) //requestClientPublicHostName = req.headers['x-forwarded-for'] || req.connection.remoteAddress; var requestClientPublicHostName = "req keys::" + Object.keys(req) + ", VIA::" + req.headers.via + ", raw::" + JSON.stringify(req.rawHeaders); //console.log(" requestClientPublicHostName: " + requestClientPublicHostName) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2") forkedProcesses["forked"].send({ message_type: "client_connect", seq_num: seqNum, requestClientInternalHostAddress: requestClientInternalHostAddress, requestClientInternalPort: requestClientInternalPort, requestVia: requestVia, requestClientPublicIp: requestClientPublicIp, clientUsername: clientUsername, requestClientPublicHostName: requestClientPublicHostName }); }) //------------------------------------------------------------------------------ // start the web server //------------------------------------------------------------------------------ if (useHttps) { var caCerts = readCerts() var certOptions = { key: fs.readFileSync(privateKey, 'utf8'), cert: fs.readFileSync(publicCertificate, 'utf8'), ca: caCerts } certOptions.requestCert = true certOptions.rejectUnauthorized = false httpServer = https.createServer(certOptions,app) } else { httpServer = http.createServer(app) } socket = require('socket.io') httpServer.listen(port, hostaddress, function () { outputDebug("****HOST=" + hostaddress + "HOST****\n"); outputDebug("****PORT=" + port+ "PORT****\n"); outputDebug(typeOfSystem + ' started on port ' + port + ' with local folder at ' + process.cwd() + ' and __dirname = ' + __dirname+ "\n"); // // We dont listen on websockets here with socket.io as often they stop working!!! // Crazy, I know!!!! So we removed websockets from the list of transports below // io = socket.listen(httpServer, { log: false, agent: false, origins: '*:*', transports: ['htmlfile', 'xhr-polling', 'jsonp-polling', 'polling'] }); io.on('connection', function (sck) { var connt = JSON.stringify(sck.conn.transport,null,2); websocketFn(sck) }); }) //console.log('addr: '+ hostaddress + ":" + port); //aliveCheckFn(); if (typeOfSystem == 'client') { //setInterval(aliveCheckFn ,numberOfSecondsAliveCheck * 1000); } setTimeout(function(){ forkedProcesses["forked"].send({message_type: 'setUpPredefinedComponents'}); },1000) } function findLatestVersionOfApps( callbackFn) { dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT id,base_component_id,display_name, component_options FROM system_code where component_scope = ? and code_tag = ?; ", "app", "LATEST", function(err, results) { if (results.length > 0) { callbackFn(results) } else { callbackFn(null) } }) }, sqlite3.OPEN_READONLY) } function findDriversWithMethodLike(methodName, callbackFn) { dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT base_component_id FROM system_code where on_condition like '%" + methodName + "%'; ", function(err, results) { if (results.length > 0) { callbackFn(results) } else { callbackFn(null) } }) }, sqlite3.OPEN_READONLY) } function bytesToMb(bytes) { return (bytes / 1024 ) / 1024 } function getChildMem(childProcessName,stats) { var memoryused = 0 if (stats) { memoryused = stats.memory ; totalMem += memoryused } if (showStats) { outputDebug(`${childProcessName}: ${Math.round(bytesToMb(memoryused) * 100) / 100} MB`); } } function usePid(childProcessName,childprocess) { pidusage(childprocess.pid, function (err, stats) { getChildMem(childProcessName,stats) returnedmemCount ++ if (returnedmemCount == allForked.length) { if (showStats) { outputDebug("------------------------------------") outputDebug(" TOTAL MEM = " + bytesToMb(totalMem) + " MB") outputDebug("------------------------------------") } inmemcalc = false yazzMemoryUsageMetric.set(totalMem) } }); } if (statsInterval > 0) { setInterval(function(){ if (!inmemcalc) { inmemcalc = true totalMem = 0 const used = process.memoryUsage().heapUsed ; totalMem += used yazzProcessMainMemoryUsageMetric.set(used) if (showStats) { outputDebug(`Main: ${Math.round( bytesToMb(used) * 100) / 100} MB`); } allForked = Object.keys(forkedProcesses) returnedmemCount = 0 for (var ttt=0; ttt< allForked.length; ttt++) { var childProcessName = allForked[ttt] const childprocess = forkedProcesses[childProcessName] usePid(childProcessName,childprocess) } } },(statsInterval * 1000)) } function readCerts() { outputDebug("Checking CA certs" ) outputDebug("-----------------" ) outputDebug("" ) outputDebug("CA Cert 1 = " + caCertificate1) outputDebug("CA Cert 2 = " + caCertificate2) outputDebug("CA Cert 3 = " + caCertificate3) outputDebug("" ) outputDebug("" ) let caCertsRet = [] if (caCertificate1) { outputDebug("CA Cert 1 = " + caCertificate1) var fff = fs.readFileSync(caCertificate1, 'utf8') outputDebug(" = " + fff) caCertsRet.push(fff) } if (caCertificate2) { outputDebug("CA Cert 2 = " + caCertificate2) var fff = fs.readFileSync(caCertificate2, 'utf8') outputDebug(" = " + fff) caCertsRet.push(fff) } if (caCertificate3) { outputDebug("CA Cert 3 = " + caCertificate3) var fff = fs.readFileSync(caCertificate3, 'utf8') outputDebug(" = " + fff) caCertsRet.push(fff) } return caCertsRet }
src/electron.js
#!/usr/bin/env node // Module to control application life. // Module to create native browser window. var startNodeServer = false const path = require("path"); const url = require('url'); var fork = require('child_process'); var fs = require('fs'); var ip = require('ip'); var isWin = /^win/.test(process.platform); var isLinux = /^linux/.test(process.platform); var isMac = /^darwin/.test(process.platform); var mainNodeProcessStarted = false; var restRoutes = new Object() var envVars = new Object() var systemReady = false; var ls = require('ls-sync'); var rimraf = require("rimraf"); var pidusage = require("pidusage"); var fs = require('fs'); var mkdirp = require('mkdirp') var rmdir = require('rmdir-sync'); var uuidv1 = require('uuid/v1'); var fork = require('child_process'); var express = require('express') var http = require('http') var https = require('https'); var app = express() var expressWs = require('express-ws')(app); console.log("__filename: " + __filename) console.log("__dirname: " + __dirname) console.log("Platform: " + process.platform) if (isWin) { console.log("Creating Windows driver") mkdirp.sync('node_modules\\sqlite3\\lib/binding\\node-v72-win32-x64'); var srcNodeJsFile = path.join(__dirname,'..\\node_sqlite3_win64.rename') console.log("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'..\\node_modules\\sqlite3\\lib\\binding\\node-v72-win32-x64\\node_sqlite3.node'), ); } else if (isLinux) { console.log("Creating Linux driver") mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-linux-x64'); var srcNodeJsFile = path.join(__dirname,'../node_sqlite3_linux64.rename') console.log("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-linux-x64/node_sqlite3.node'), ); } else if (isMac) { console.log("Creating Mac driver") mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-darwin-x64'); var srcNodeJsFile = path.join(__filename,'../../node_sqlite3_macos64.rename') console.log("srcNodeJsFile: " + srcNodeJsFile) fs.copyFileSync( srcNodeJsFile, path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-darwin-x64/node_sqlite3.node'), ); } else { console.log("Error, unsupported platform: " + process.platform) } var request = require("request"); var db_helper = require("./db_helper") var perf = require('./perf') var compression = require('compression') var dns = require('dns'); var program = require('commander'); var bodyParser = require('body-parser'); var multer = require('multer'); var cors = require('cors') var saveHelper = require('./save_helpers') var isDocker = require('is-docker'); var sqlite3 = require('sqlite3'); var os = require('os') var username = "Unknown user"; var Keycloak = require('keycloak-connect'); var session = require('express-session'); var memoryStore = new session.MemoryStore(); var kk = { "realm": "yazz", "auth-server-url": "http://127.0.0.1:8080/auth", "ssl-required": "external", "resource": "yazz", "public-client": true, "confidential-port": 0 } var sessObj = session({ secret: 'some secret', resave: false, saveUninitialized: true, store: memoryStore }) var keycloak = new Keycloak({ store: memoryStore },kk); var upload var dbPath = null var dbsearch = null var userData = null var port; var hostaddress; hostaddress = "0.0.0.0"//ip.address(); var hostaddressintranet; hostaddressintranet = ip.address(); port = 80 var f = 0 var started = false var visifile var socket = null var io = null; var forkedProcesses = new Object(); var timeout = 0; var port; var typeOfSystem; var centralHostAddress; var centralHostPort; var stmt2 = null; var stmt3 = null; var setIn = null; var stopScan = false; var inScan = false; var numberOfSecondsAliveCheck = 60; var serverwebsockets = []; var portrange = 3000 var requestClientInternalHostAddress = ''; var requestClientInternalPort = -1; var requestClientPublicIp = ''; var requestClientPublicHostName = ''; var locked; var useHttps; var serverProtocol = "http"; var privateKey; var publicCertificate; var caCertificate1; var caCertificate2; var caCertificate3; var requestClientPublicIp; var hostcount = 0; var queuedResponses = new Object(); var queuedResponseSeqNum = 1; var alreadyOpen = false; var executionProcessCount = 6; app.use(compression()) app.use(sessObj); app.use(keycloak.middleware({ logout: '/c', admin: '/ad' })); var inmemcalc = false var totalMem = 0 var returnedmemCount = 0 var allForked=[] const apiMetrics = require('prometheus-api-metrics'); app.use(apiMetrics()) const Prometheus = require('prom-client'); const yazzMemoryUsageMetric = new Prometheus.Gauge({ name: 'yazz_total_memory_bytes', help: 'Total Memory Usage' }); const yazzProcessMainMemoryUsageMetric = new Prometheus.Gauge({ name: 'yazz_node_process_main_memory_bytes', help: 'Memory Usage for Yazz NodeJS process "main"' }); var stdin = process.openStdin(); var data = ""; stdin.on('data', function(chunk) { data += chunk; }); stdin.on('end', function() { console.log("DATA:\n" + data + "\nEND DATA"); }); if (process.argv.length > 1) { program .version('0.0.1') .option('-t, --type [type]', 'Add the specified type of app (client/server) [type]', 'client') .option('-p, --port [port]', 'Which port should I listen on? (default 80) [port]', parseInt) .option('-h, --host [host]', 'Server address of the central host (default yazz.com) [host]', 'yazz.com') .option('-l, --locked [locked]', 'Allow server to be locked/unlocked on start up (default true) [locked]', 'true') .option('-d, --debug [debug]', 'Allow to run NodeJS in debug mode (default false) [debug]', 'false') .option('-z, --showdebug [showdebug]', 'Allow to show debug info (default false) [showdebug]', 'false') .option('-k, --showprogress [showprogress]', 'Allow to show progress when starting Pilot (default false) [showprogress]', 'false') .option('-j, --showstats [showstats]', 'Allow to show stats debug info (default false) [showstats]', 'false') .option('-i, --statsinterval [statsinterval]', 'Allow to show debug info every x seconds (default 10 seconds) [statsinterval]', 10) .option('-a, --virtualprocessors [virtualprocessors]', 'How many virtual processors to run (default 6 processors) [virtualprocessors]', 6) .option('-m, --maxprocessesretry [maxprocessesretry]', 'Number of processes to retry when all cores are busy (default 10 processes) [maxprocessesretry]', 10) .option('-n, --maxJobProcessDurationMs [maxJobProcessDurationMs]', 'Maximum time to wait for a job to complete (default 10000 ms) [maxJobProcessDurationMs]', 10000) .option('-s, --hostport [hostport]', 'Server port of the central host (default 80) [hostport]', parseInt) .option('-x, --deleteonexit [deleteonexit]', 'Delete database files on exit (default true) [deleteonexit]', 'true') .option('-y, --deleteonstartup [deleteonstartup]', 'Delete database files on startup (default false) [deleteonstartup]', 'false') .option('-a, --runapp [runapp]', 'Run the app with ID as the homepage (default not set) [runapp]', null) .option('-u, --loadjsurl [loadjsurl]', 'Load the following JS from a URL (default not set) [loadjsurl]', null) .option('-f, --loadjsfile [loadjsfile]', 'Load the following JS from a file (default not set) [loadjsfile]', null) .option('-z, --loadjscode [loadjscode]', 'Load the following JS from the command line (default not set) [loadjscode]', null) .option('-b, --runhtml [runhtml]', 'Run using a local HTML page as the homepage (default not set) [runhtml]', null) .option('-q, --https [https]', 'Run using a HTTPS (default is http) [https]', 'false') .option('-v, --private [private]', 'Private HTTPS key [private]', null) .option('-c, --public [public]', 'Public HTTPS certificate [public]', null) .option('-e, --cacert1 [cacert1]', 'Public HTTPS CA certificate 1 [cacert1]', null) .option('-f, --cacert2 [cacert2]', 'Public HTTPS CA certificate 2 [cacert2]', null) .option('-g, --cacert3 [cacert3]', 'Public HTTPS CA certificate 3 [cacert3]', null) .option('-u, --usehost [usehost]', 'Use host name [usehost]', null) .parse(process.argv); } else { program.type = 'client' program.host = 'yazz.com' program.locked = 'true' program.debug = 'false' program.deleteonexit = 'true' program.deleteonstartup = 'false' program.runapp = null program.loadjsurl = null program.loadjsfile = null program.runhtml = null program.https = 'false' program.usehost = null } var semver = require('semver') var showProgress = false if (program.showprogress == 'true') { showProgress = true; } var showDebug = false function outputDebug(text) { if (showDebug) { console.log(text); } else { if (showProgress) { process.stdout.write("."); } } }; if (program.showdebug == 'true') { showDebug = true; } outputDebug(" showDebug: " + showDebug); var showStats = false if (program.showstats == 'true') { showStats = true; } outputDebug(" showStats: " + showStats ); var statsInterval = -1 if (program.statsinterval > 0) { statsInterval = program.statsinterval; } outputDebug(" statsInterval: " + statsInterval ); if (program.virtualprocessors > 0) { executionProcessCount = program.virtualprocessors; } outputDebug(" executionProcessCount: " + executionProcessCount ); var maxProcessesCountToRetry = 10 if (program.maxprocessesretry > 0) { maxProcessesCountToRetry = program.maxprocessesretry; } outputDebug(" maxProcessesCountToRetry: " + maxProcessesCountToRetry ); var maxJobProcessDurationMs = 10000 if (program.maxJobProcessDurationMs > 0) { maxJobProcessDurationMs = program.maxJobProcessDurationMs; } outputDebug(" maxJobProcessDurationMs: " + maxJobProcessDurationMs ); var listOfEnvs = process.env var envNames = Object.keys(listOfEnvs) for (var i=0 ;i< envNames.length; i++){ let envName = envNames[i].replace(/[^a-zA-Z0-9]/g,'_'); outputDebug("Env var " + envName + ": " + listOfEnvs[envName]) envVars[envName] = listOfEnvs[envName] } if (isValidObject(envVars.virtualprocessors)) { executionProcessCount = envVars.virtualprocessors } function isValidObject(variable){ if ((typeof variable !== 'undefined') && (variable != null)) { return true } return false } outputDebug("process.env.OPENSHIFT_NODEJS_IP:= " + process.env.OPENSHIFT_NODEJS_IP) if (process.env.OPENSHIFT_NODEJS_IP) { username = "node" } else { username = "node" //if (isValidObject(os) && isValidObject(os.userInfo()) && isValidObject(os.userInfo().username)) { // username = os.userInfo().username.toLowerCase(); //} } var LOCAL_HOME = process.env.HOME outputDebug('LOCAL_HOME:' + LOCAL_HOME); // // We set the HOME environment variable if we are running in OpenShift // outputDebug('DOCKER CHECK...'); if (isDocker()) { outputDebug('Running inside a Linux container'); } else { outputDebug('NOT running inside a Linux container'); } if (!isValidObject(LOCAL_HOME) || (LOCAL_HOME == "/")) { LOCAL_HOME = "/home/node" } outputDebug('Starting services'); var debug = false; outputDebug("NodeJS version: " + process.versions.node); if (semver.gt(process.versions.node, '6.9.0')) { outputDebug("NodeJS version > 6.9 " ); } if (program.debug == 'true') { debug = true; outputDebug(" debug: true" ); } else { outputDebug(" debug: false" ); }; var deleteOnExit = (program.deleteonexit == 'true'); outputDebug("deleteOnExit: " + deleteOnExit) var deleteOnStartup = (program.deleteonstartup == 'true'); outputDebug("deleteOnStartup: " + deleteOnStartup) locked = (program.locked == 'true'); useHttps = (program.https == 'true'); if (useHttps) { serverProtocol = "https" } privateKey = program.private; publicCertificate = program.public; caCertificate1 = program.cacert1; caCertificate2 = program.cacert2; caCertificate3 = program.cacert3; var useHost = program.usehost; if (useHost) { hostaddress = useHost console.log("USE Host: " + useHost) } port = program.port; var runapp = program.runapp; var runhtml = program.runhtml; var loadjsurl = program.loadjsurl; var loadjsfile = program.loadjsfile; var loadjscode = program.loadjscode; if (!isNumber(port)) { port = 80; if (useHttps) { port = 443; } }; outputDebug('Yazz node local hostname: ' + ip.address() + ' ') setupVisifileParams(); function setUpChildListeners(processName, fileName, debugPort) { forkedProcesses[processName].on('close', function() { if (!shuttingDown) { console.log("Child process " + processName + " exited.. restarting... ") var stmtInsertProcessError = dbsearch.prepare( ` insert into system_process_errors ( id, timestamp, process, status, base_component_id, event, system_code_id, args, error_message ) values ( ?, ?, ?, ?, ?, ?, ?, ?, ? );`) dbsearch.serialize(function() { dbsearch.run("begin exclusive transaction"); var newId = uuidv1() stmtInsertProcessError.run( newId, new Date().getTime(), processName, "KILLED", null, null, null, null, null ) dbsearch.run("commit"); stmtInsertProcessError.finalize(); }) setupForkedProcess(processName, fileName, debugPort) } }); forkedProcesses[processName].on('message', (msg) => { //console.log("message from child: " + JSON.stringify(msg,null,2)) //console.log("message type from child: " + JSON.stringify(msg.message_type,null,2)) if (msg.message_type == "return_test_fork") { //console.log('Message from child', msg); sendOverWebSockets({ type: "test_fork", value: "Counter: " + msg.counter + ", count data_states from sqlite: " + msg.sqlite }); } else if (msg.message_type == "save_code") { forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: msg.base_component_id, parent_hash: msg.parent_hash, code: msg.code, options: msg.options }); } else if (msg.message_type == "add_rest_api") { outputDebug("add_rest_api called") var newFunction = async function (req, res) { var params = req.query; var url = req.originalUrl; var body = req.body; var promise = new Promise(async function(returnFn) { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[ seqNum ] = function(value) { returnFn(value) } console.log(" msg.base_component_id: " + msg.base_component_id); console.log(" seqNum: " + seqNum); forkedProcesses["forked"].send({ message_type: "callDriverMethod", find_component: { method_name: msg.base_component_id, driver_name: msg.base_component_id } , args: { params: params, body: body, url: url } , seq_num_parent: null, seq_num_browser: null, seq_num_local: seqNum, }); }) var ret = await promise if (ret.value) { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( ret.value )); } else if (ret.error) { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( {error: ret.error} )); } else { res.writeHead(200, {'Content-Type': 'application/json'}); res.end(JSON.stringify( {error: "Unknown problem occurred"} )); } } // end of function def for newFunction if (!isValidObject(restRoutes[msg.route])) { if (msg.rest_method == "POST") { app.post( '/' + msg.route + '/*' , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) app.post( '/' + msg.route , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) } else { app.get( '/' + msg.route + '/*' , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) app.get( '/' + msg.route , async function(req, res){ await ((restRoutes[msg.route])(req,res)) }) } } restRoutes[msg.route] = newFunction } else if (msg.message_type == "createdTablesInChild") { forkedProcesses["forked"].send({ message_type: "setUpSql" }); forkedProcesses["forked"].send({ message_type: "greeting" , hello: 'world' }); outputDebug("mainNodeProcessStarted: " + mainNodeProcessStarted) if (!mainNodeProcessStarted) { mainNodeProcessStarted = true getPort() } } else if (msg.message_type == "drivers_loaded_by_child") { //-------------------------------------------------------- // open the app in a web browser //-------------------------------------------------------- checkForJSLoaded(); if (typeOfSystem == 'client') { var localClientUrl = serverProtocol + '://' + hostaddress + ":" + port; var remoteServerUrl = 'http://' + centralHostAddress + ":" + centralHostPort + "/visifile/list_intranet_servers.html?time=" + new Date().getTime(); request({ uri: remoteServerUrl, method: "GET", timeout: 10000, agent: false, followRedirect: true, maxRedirects: 10 }, function(error, response, body) { if (error) { //console.log("Error opening central server: " + error); if (!alreadyOpen) { alreadyOpen = true; } } else { if (!alreadyOpen) { alreadyOpen = true; //open(remoteServerUrl); } } }); } else if (typeOfSystem == 'server') { if (!alreadyOpen) { alreadyOpen = true; //open('http://' + hostaddress + ":" + port + "/visifile/list_intranet_servers.html?time=" + new Date().getTime()); } } console.log(` YYYYYYY YYYYYYY Y:::::Y Y:::::Y Y:::::Y Y:::::Y Y::::::Y Y::::::Y YYY:::::Y Y:::::YYY aaaaaaaaaaaaa zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz Y:::::Y Y:::::Y a::::::::::::a z:::::::::::::::zz:::::::::::::::z Y:::::Y:::::Y aaaaaaaaa:::::a z::::::::::::::z z::::::::::::::z Y:::::::::Y a::::a zzzzzzzz::::::z zzzzzzzz::::::z Y:::::::Y aaaaaaa:::::a z::::::z z::::::z Y:::::Y aa::::::::::::a z::::::z z::::::z Y:::::Y a::::aaaa::::::a z::::::z z::::::z Y:::::Y a::::a a:::::a z::::::z z::::::z Y:::::Y a::::a a:::::a z::::::zzzzzzzz z::::::zzzzzzzz YYYY:::::YYYY a:::::aaaa::::::a z::::::::::::::z z::::::::::::::z Y:::::::::::Y a::::::::::aa:::az:::::::::::::::zz:::::::::::::::z YYYYYYYYYYYYY aaaaaaaaaa aaaazzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz PPPPPPPPPPPPPPPPP iiii lllllll tttt P::::::::::::::::P i::::i l:::::l ttt:::t P::::::PPPPPP:::::P iiii l:::::l t:::::t PP:::::P P:::::P l:::::l t:::::t P::::P P:::::Piiiiiii l::::l ooooooooooo ttttttt:::::ttttttt P::::P P:::::Pi:::::i l::::l oo:::::::::::oo t:::::::::::::::::t P::::PPPPPP:::::P i::::i l::::l o:::::::::::::::ot:::::::::::::::::t P:::::::::::::PP i::::i l::::l o:::::ooooo:::::otttttt:::::::tttttt P::::PPPPPPPPP i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t P::::P i::::i l::::l o::::o o::::o t:::::t tttttt PP::::::PP i::::::il::::::lo:::::ooooo:::::o t::::::tttt:::::t P::::::::P i::::::il::::::lo:::::::::::::::o tt::::::::::::::t P::::::::P i::::::il::::::l oo:::::::::::oo tt:::::::::::tt PPPPPPPPPP iiiiiiiillllllll ooooooooooo ttttttttttt `) console.log("\nRunning " + executionProcessCount + " virtual processors"); console.log("\nYazz Pilot started on:"); console.log("Network Host Address: " + hostaddressintranet) console.log("Local Machine Address: " + serverProtocol + "://" + hostaddress + ':' + port); systemReady = true } else if (msg.message_type == "ipc_child_returning_uploaded_app_as_file_in_child_response") { console.log("uploaded_app_as_file_in_child: " + JSON.stringify(msg)) // ______ // Server --1 data item--> Browser // ______ // sendOverWebSockets({ type: "uploaded_app_as_file_from_server", code_id: msg.code_id, base_component_id: msg.base_component_id, client_file_upload_id: msg.client_file_upload_id }); } else if (msg.message_type == "database_setup_in_child") { if (msg.child_process_name == "forkedExeScheduler") { forkedProcesses["forkedExeScheduler"].send({ message_type: "setUpSql" }); } if (msg.child_process_name.startsWith("forkedExeProcess")) { forkedProcesses[msg.child_process_name].send({ message_type: "setUpSql" }); forkedProcesses["forkedExeScheduler"].send({ message_type: "startNode", node_id: msg.child_process_name, child_process_id: forkedProcesses[msg.child_process_name].pid, started: new Date() }); } } else if (msg.message_type == "getResultReturned") { var newres = queuedResponses[ msg.seqNum ] newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end(JSON.stringify(msg.result)); newres = null; } else if (msg.message_type == "return_add_local_driver_results_msg") { //console.log("6 - return_get_search_results: " + msg.returned); var rett = eval("(" + msg.success + ")"); var newCallbackFn = queuedResponses[ msg.seq_num_local ] if (msg.result ) { newCallbackFn(msg.result) } else { newCallbackFn({ error: msg.error }) } newres = null; } else if (msg.message_type == "processor_free") { forkedProcesses["forkedExeScheduler"].send({ message_type: "processor_free", child_process_name: msg.child_process_name }); } else if (msg.message_type == "execute_code_in_exe_child_process") { //console.log("6 - return_get_all_table: " ); forkedProcesses[msg.child_process_name].send({ message_type: "execute_code", code: msg.code, callback_index: msg.callback_index, code_id: msg.code_id, args: msg.args, call_id: msg.call_id, on_condition: msg.on_condition, base_component_id: msg.base_component_id }); } else if (msg.message_type == "function_call_request") { //console.log("6 - return_get_all_table: " ); forkedProcesses["forkedExeScheduler"].send({ message_type: "function_call_request", child_process_name: msg.child_process_name, find_component: msg.find_component, args: msg.args, callback_index: msg.callback_index, caller_call_id: msg.caller_call_id }); } else if (msg.message_type == "function_call_response") { //console.log("*** function_call_response: " + JSON.stringify(msg,null,2)) forkedProcesses["forkedExeScheduler"].send({ message_type: "function_call_response", child_process_name: msg.child_process_name, driver_name: msg.driver_name, method_name: msg.method_name, result: msg.result, callback_index: msg.callback_index, called_call_id: msg.called_call_id }); } else if (msg.message_type == "return_response_to_function_caller") { //console.log("*) Electron.js got response for " + msg.child_process_name); //console.log("*) "+ msg.result) if (msg.child_process_name) { forkedProcesses[msg.child_process_name].send({ message_type: "return_response_to_function_caller", callback_index: msg.callback_index, result: msg.result }); } } else if (msg.message_type == "return_get_all_table") { //console.log("6 - return_get_all_table: " ); var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end(msg.result); newres = null; } else if (msg.message_type == "returnIntranetServers") { var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'text/plain'}); if (msg.returned) { newres.end( JSON.stringify( { allServers: msg.returned, intranetPublicIp: msg.requestClientPublicIp}) ); } else { //console.log( "8: " + msg.error ); newres.end(JSON.stringify( { allServers: [], intranetPublicIp: msg.requestClientPublicIp}) ); } newres = null; } else if (msg.message_type == "returnIntranetServers_json") { var newres = queuedResponses[ msg.seq_num ] newres.writeHead(200, {'Content-Type': 'application/json'}); var result = { list: [], links: {"self": { "href": "/start" }}, } if (msg.returned) { result.links.servers = {} result.intranetPublicIp = msg.requestClientPublicIp result.error = false result.count = msg.returned.length if (msg.returned.length > 0) { result.main_user = msg.returned[0].client_user_name result.main = msg.returned[0].internal_host + ":" + msg.returned[0].internal_port result.main_url = serverProtocol + "://" + msg.returned[0].internal_host + ":" + msg.returned[0].internal_port + "/home" } for (var i =0 ; i< msg.returned.length; i ++) { var addr = msg.returned[i].internal_host + ":" + msg.returned[i].internal_port result.list.push( addr ) result.links.servers[addr] = {"href": serverProtocol + "://" + addr + "/home" , "user": msg.returned[i].client_user_name} } newres.end(JSON.stringify(result)); } else { newres.end(JSON.stringify( { allServers: [], error: true}) ); } newres = null; } else if (msg.message_type == "returnClientConnect") { //console.log("6: returnClientConnect") //console.log("6.1: " + msg) //console.log("7: " + msg.returned) var newres = queuedResponses[ msg.seq_num ] if (msg.returned) { newres.writeHead(200, {'Content-Type': 'text/plain'}); newres.end( JSON.stringify( JSON.stringify({ connected: msg.returned })) ); } newres = null; // ______ // Subprocess --1 data item--> Server // ______ // } else if (msg.message_type == "subprocess_returns_data_item_to_server") { //console.log("6: return_query_item") //console.log("6.1: " + msg) //console.log("7: " + msg.returned) var new_ws = queuedResponses[ msg.seq_num ] if (msg.returned) { // ______ // Server --1 data item--> Browser // ______ // sendToBrowserViaWebSocket( new_ws, { type: "client_data_item_received_from_server", data_item: msg.returned }); } } else if (msg.message_type == "ipc_child_returning_find_results") { // console.log(" .......3: " + msg.results); //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num ] sendToBrowserViaWebSocket( new_ws , { type: "ws_to_browser_find_results", results: msg.results }); //new_ws = null; } else if (msg.message_type == "ipc_child_returning_callDriverMethod_response") { //console.log(" .......3: " + JSON.stringify(msg,null,2)); //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num_parent ] if (msg.result) { if (msg.result.code) { var tr = msg.result.code msg.result.code = tr } } sendToBrowserViaWebSocket( new_ws , { type: "ws_to_browser_callDriverMethod_results", value: msg.result, seq_num: msg.seq_num_browser }); //new_ws = null; } else if (msg.message_type == "subprocess_alerts_data_done_to_server") { //console.log("6: return_query_items_ended") //console.log("6.1: " + msg) var new_ws = queuedResponses[ msg.seq_num ] sendToBrowserViaWebSocket( new_ws, { type: "server_alerts_data_done_to_browser" }); //new_ws = null; } // // }); } function setupForkedProcess( processName, fileName, debugPort ) { var debugArgs =[]; if (debug) { if (semver.gte(process.versions.node, '6.9.0')) { debugArgs = ['--inspect=' + debugPort]; } else { debugArgs = ['--debug=' + debugPort]; }; }; var forkedProcessPath if (isWin) { forkedProcessPath = path.join(__dirname, '..\\src\\' + fileName) } else { forkedProcessPath = path.join(__dirname, '../src/' + fileName) } forkedProcesses[ processName ] = fork.fork(forkedProcessPath, [], {execArgv: debugArgs}); setUpChildListeners(processName, fileName, debugPort); if (processName == "forked") { forkedProcesses["forked"].send({ message_type: "init" , user_data_path: userData, child_process_name: "forked", show_debug: showDebug, show_progress: showProgress }); forkedProcesses["forked"].send({ message_type: "createTables" }); } if (processName == "forkedExeScheduler") { forkedProcesses["forkedExeScheduler"].send({ message_type: "init" , user_data_path: userData, child_process_name: "forkedExeScheduler", max_processes_count_to_retry: maxProcessesCountToRetry, max_job_process_duration_ms: maxJobProcessDurationMs, show_debug: showDebug, show_progress: showProgress }); } for (var i=0;i<executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i if (processName == exeProcName) { forkedProcesses[exeProcName].send({ message_type: "init" , user_data_path: userData, child_process_name: exeProcName, show_debug: showDebug, show_progress: showProgress }); } } outputDebug("Started subprocess '" + processName + "' ") } function setupMainChildProcess() { setupForkedProcess("forked", "child.js", 40003) } function setupChildProcesses() { setupForkedProcess("forkedExeScheduler", "exeScheduler.js", 40004) for (var i=0;i<executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i setupForkedProcess(exeProcName, "exeProcess.js", 40100 + i) } } function sendOverWebSockets(data) { var ll = serverwebsockets.length; //console.log('send to sockets Count: ' + JSON.stringify(serverwebsockets.length)); for (var i =0 ; i < ll; i++ ) { var sock = serverwebsockets[i]; sock.emit(data.type,data); //console.log(' sock ' + i + ': ' + JSON.stringify(sock.readyState)); } } function isNumber(n) { return !isNaN(parseFloat(n)) && isFinite(n); } function setupVisifileParams() { typeOfSystem = program.type; centralHostAddress = program.host; centralHostPort = program.hostport; if (!isNumber(centralHostPort)) {centralHostPort = 80;}; if (!(typeOfSystem == 'client' || typeOfSystem == 'server')) { console.log('-------* Invalid system type: ' + typeOfSystem); process.exit(); }; outputDebug('-------* System type: ' + typeOfSystem); outputDebug('-------* Port: ' + port); outputDebug('-------* Central host: ' + centralHostAddress); outputDebug('-------* Central host port: ' + centralHostPort); outputDebug( ip.address() ); //console.log('addr: '+ ip.address()); //hostaddress = ip.address(); } outputDebug("process.platform = " + process.platform) if (process.platform === "win32") { var rl = require("readline").createInterface({ input: process.stdin, output: process.stdout }); rl.on("SIGINT", function () { shutDown(); process.exit(); }); } if (isWin) { console.log("Running as Windows") var localappdata = process.env.LOCALAPPDATA userData = path.join(localappdata, '/Yazz/') } else { outputDebug("Running as Linux/Mac") userData = path.join(LOCAL_HOME, 'Yazz') } dbPath = path.join(userData, username + '.visi') if (deleteOnStartup) { console.log("deleting dir :" + userData) if (userData.length > 6) { deleteYazzDataV2(userData) } } var uploadPath = path.join(userData, 'uploads/') outputDebug("LOCAL_HOME: " + LOCAL_HOME) outputDebug("userData: " + userData) outputDebug("uploadPath: " + uploadPath) upload = multer( { dest: uploadPath}); rmdir("uploads"); mkdirp.sync(path.join(userData, 'uploads')); mkdirp.sync(path.join(userData, 'files')); mkdirp.sync(path.join(userData, 'apps')); mkdirp.sync(path.join(userData, 'app_dbs')); outputDebug('process.env.LOCALAPPDATA: ' + JSON.stringify(localappdata ,null,2)) outputDebug("Local home data path: " + LOCAL_HOME) outputDebug("userData: " + JSON.stringify(userData ,null,2)) outputDebug("process.env keys: " + Object.keys(process.env)) dbsearch = new sqlite3.Database(dbPath); dbsearch.run("PRAGMA journal_mode=WAL;") var nodeConsole = require('console'); var myConsole = new nodeConsole.Console(process.stdout, process.stderr); //var index = require(path.resolve('src/index.js')) setupMainChildProcess(); var shuttingDown = false; process.on('exit', function() { shutDown(); }); process.on('quit', function() { shutDown(); }); process.on("SIGINT", function () { shutDown(); process.exit() }); function shutDown() { console.log(" shutDown() called") if (!shuttingDown) { shuttingDown = true; if (dbsearch) { console.log("Database closing...") dbsearch.run("PRAGMA wal_checkpoint;") dbsearch.close(function(err){ console.log("...database closed") visifile = null }) } if (forkedProcesses["forked"]) { console.log("Killed Process forked") forkedProcesses["forked"].kill(); } if (forkedProcesses["forkedExeScheduler"]) { console.log("Killed Exe Scheduler process") forkedProcesses["forkedExeScheduler"].kill(); } for (var i = 0; i < executionProcessCount; i++ ) { var exeProcName = "forkedExeProcess" + i forkedProcesses[exeProcName].kill(); console.log("Killed Process " + exeProcName) } if (visifile){ visifile.removeAllListeners('close'); //visifile.close(); if (visifile.globalShortcut) { //visifile.globalShortcut.unregisterAll(); } } console.log("deleteOnExit =" + deleteOnExit) if (deleteOnExit) { console.log("deleting dir :" + userData) if (userData.length > 6) { if (isWin) { deleteYazzDataWindows(userData) } else { deleteYazzData(userData) } } } } } function deleteYazzDataWindows(dddd) { console.log("deleteYazzDataWindows") if (dddd.length > 6) { var ff = 'timeout 8 && rd /s /q "' + dddd + '"' console.log(ff) fork.exec(ff , function(err, stdout, stderr) { if (err) { // node couldn't execute the command return; } }) } } function deleteYazzDataV2(dddd) { console.log("----------------------------------") console.log("Before delete :" + ls(dddd)) console.log("----------------------------------") rimraf.sync(path.join(dddd, 'uploads/')); rimraf.sync(path.join(dddd, 'files/')); rimraf.sync(path.join(dddd, 'apps/')); rimraf.sync(path.join(dddd, 'app_dbs/')); rimraf.sync(path.join(dddd, '*.visi')); rimraf.sync(path.join(dddd, '*.visi*')); console.log("----------------------------------") console.log("After delete :" + ls(dddd)) console.log("----------------------------------") } function deleteYazzData(dddd) { fork.exec('sleep 3 && cd "' + dddd + '" && rm -rf app_dbs apps uploads files *.visi*', function(err, stdout, stderr) { if (err) { // node couldn't execute the command return; } }) } var httpServer = null; function getPort () { outputDebug('** called getPort v2') if (useHttps) { var caCerts = readCerts() var certOptions = { key: fs.readFileSync(privateKey, 'utf8'), cert: fs.readFileSync(publicCertificate, 'utf8'), ca: caCerts } certOptions.requestCert = true certOptions.rejectUnauthorized = false httpServer = https.createServer(certOptions,app) } else { httpServer = http.createServer(app) } httpServer.listen(port, ip.address(), function (err) { outputDebug('trying port: ' + port + ' ') httpServer.once('close', function () { }) httpServer.close() httpServer = null; }) httpServer.on('error', function (err) { outputDebug('Couldnt connect on port ' + port + '...') if (port < portrange) { port = portrange }; outputDebug('... trying port ' + port) portrange += 1 getPort() }) httpServer.on('listening', function (err) { outputDebug('Can connect on ' + ip.address() + ':' + port + ' :) ') forkedProcesses["forked"].send({ message_type: "host_and_port" , child_process_name: "forked", ip: hostaddress, port: port }); startServices() setupChildProcesses(); }) } function checkForJSLoaded() { if (isValidObject(envVars.loadjsurl)) { loadjsurl = envVars.loadjsurl } // // load JS code from file // if (isValidObject(envVars.loadjsfile)) { loadjsfile = envVars.loadjsfile } //console.log("process.argv.length : " + process.argv.length ) //console.log("process.argv[2] : " + process.argv[2] ) if ((process.argv[2]) && (process.argv[2].endsWith(".js") || process.argv[2].endsWith(".pilot") )) { loadjsfile = process.argv[2] } else if ((process.argv[2]) && (!process.argv[2].startsWith("--"))) { loadjscode = process.argv[2] console.log("load code: " + loadjscode ) } if (isValidObject(envVars.loadjscode)) { loadjscode = envVars.loadjscode } if (isValidObject(loadjsurl)) { var jsUrl = loadjsurl https.get(jsUrl, (resp) => { var data = ''; // A chunk of data has been recieved. resp.on('data', (chunk) => { data += chunk; }); // The whole response has been received. Print out the result. resp.on('end', () => { //console.log("code:" + data); var baseComponentIdForUrl = saveHelper.getValueOfCodeString(data, "base_component_id") console.log("baseComponentIdForUrl:" + baseComponentIdForUrl); if (!isValidObject(baseComponentIdForUrl)) { baseComponentIdForUrl = loadjsurl.replace(/[^A-Z0-9]/ig, "_"); } var jsCode = data console.log("*********** Trying to load loadjsurl code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForUrl, parent_hash: null, code: data, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForUrl }); }).on("error", (err) => { console.log("Error: " + err.message); }); } else if (isValidObject(loadjsfile)) { var jsFile = loadjsfile var data2 = fs.readFileSync(jsFile).toString() var baseComponentIdForFile = saveHelper.getValueOfCodeString(data2, "base_component_id") if (!isValidObject(baseComponentIdForFile)) { baseComponentIdForFile = loadjsfile.replace(/[^A-Z0-9]/ig, "_"); } //console.log("code from file:" + data2); //console.log("*********** Trying to load loadjsfile code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForFile, parent_hash: null, code: data2, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForFile } else if (isValidObject(loadjscode)) { console.log("loadjscode ...") var data2 = loadjscode var baseComponentIdForCode = saveHelper.getValueOfCodeString(data2, "base_component_id") console.log("baseComponentIdForCode:" + baseComponentIdForCode); if (!isValidObject(baseComponentIdForCode)) { baseComponentIdForCode = "code_" + (("" + Math.random()).replace(/[^A-Z0-9]/ig, "_")); console.log("baseComponentIdForFile:" + baseComponentIdForCode); } //console.log("code:" + data2); console.log("*********** Trying to load loadjscode code *************") forkedProcesses["forked"].send({ message_type: "save_code", base_component_id: baseComponentIdForCode, parent_hash: null, code: data2, options: { make_public: true, save_html: true } }); runapp = baseComponentIdForCode } } function mkdirSync(dirPath) { try { mkdirp.sync(dirPath) } catch (err) { //if (err.code !== 'EEXIST') throw err } } function outputToConsole(text) { var c = console; c.log(text); } function copyFileSync( source, target ) { var targetFile = target; //if target is a directory a new file with the same name will be created if ( fs.existsSync( target ) ) { if ( fs.lstatSync( target ).isDirectory() ) { targetFile = path.join( target, path.basename( source ) ); } } fs.writeFileSync(targetFile, fs.readFileSync(source)); } function copyFolderRecursiveSync( source, target ) { //console.log('çopy from: '+ source + ' to ' + target); var files = []; //check if folder needs to be created or integrated var targetFolder = path.join( target, path.basename( source ) ); if ( !fs.existsSync( targetFolder ) ) { fs.mkdirSync( targetFolder ); } //copy if ( fs.lstatSync( source ).isDirectory() ) { files = fs.readdirSync( source ); files.forEach( function ( file ) { var curSource = path.join( source, file ); if ( fs.lstatSync( curSource ).isDirectory() ) { copyFolderRecursiveSync( curSource, targetFolder ); } else { copyFileSync( curSource, targetFolder ); //console.log('copying: ' + targetFolder); } } ); } } // ============================================================ // This sends a message to a specific websocket // ============================================================ function sendToBrowserViaWebSocket(aws, msg) { aws.emit(msg.type,msg); } function isLocalMachine(req) { if ((req.ip == '127.0.0.1') || (hostaddress == req.ip) || (hostaddress == "0.0.0.0")) { // this is the correct line to use //if (req.ip == '127.0.0.1') { // this is used for debugging only so that we can deny access from the local machine return true; }; return false; } //------------------------------------------------------------------------------ // test if allowed //------------------------------------------------------------------------------ function canAccess(req,res) { if (!locked) { return true; }; if (isLocalMachine(req) ) { return true; }; res.writeHead(200, {'Content-Type': 'text/plain'}); res.end("Sorry but access to " + username + "'s data is not allowed. Please ask " + username + " to unlocked their Yazz account"); return false; }; function extractHostname(url) { var hostname; //find & remove protocol (http, ftp, etc.) and get hostname if (url.indexOf("://") > -1) { hostname = url.split('/')[2]; } else { hostname = url.split('/')[0]; } //find & remove port number hostname = hostname.split(':')[0]; //find & remove "?" hostname = hostname.split('?')[0]; return hostname; } function extractRootDomain(url) { var domain = extractHostname(url), splitArr = domain.split('.'), arrLen = splitArr.length; //extracting the root domain here if (arrLen > 2) { domain = splitArr[arrLen - 2] + '.' + splitArr[arrLen - 1]; } return domain; } function findViafromString(inp) { if (inp == null) { return ""; } var ll = inp.split(' '); for (var i=0; i< ll.length ; i++){ if (ll[i] != null) { if (ll[i].indexOf(":") != -1) { return extractRootDomain(ll[i]); } } } return ""; } function aliveCheckFn() { var urlToConnectTo = "http://" + centralHostAddress + ":" + centralHostPort + '/client_connect'; //console.log('-------* urlToConnectTo: ' + urlToConnectTo); //console.log('trying to connect to central server...'); request({ uri: urlToConnectTo, method: "GET", timeout: 10000, agent: false, followRedirect: true, maxRedirects: 10, qs: { requestClientInternalHostAddress: hostaddress , requestClientInternalPort: port , clientUsername: username } }, function(error, response, body) { //console.log('Error: ' + error); if (response) { if (response.statusCode == '403') { //console.log('403 received, not allowed through firewall for ' + urlToConnectTo); //open("http://" + centralHostAddress + ":" + centralHostPort); } else { ////console.log('response: ' + JSON.stringify(response)); ////console.log(body); } } }); }; function runOnPageExists(req, res, homepage) { if (fs.existsSync(homepage)) { if (typeOfSystem == 'client') { if (!canAccess(req,res)) { return; } res.end(fs.readFileSync(homepage)); } } else { setTimeout(function() { runOnPageExists(req, res, homepage) },3000) } } function getRoot(req, res, next) { hostcount++; //console.log("Host: " + req.headers.host + ", " + hostcount); //console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl); var homepage = path.join(__dirname, '../public/go.html') var homepageUrl = serverProtocol + '://yazz.com/visifile/index.html?time=' + new Date().getTime() if (req.headers.host) { if (req.query.goto) { console.log("*** FOUND goto") res.end(fs.readFileSync(homepage)); return } if (req.query.embed) { console.log("*** FOUND embed") res.end(fs.readFileSync(homepage)); return } if (req.headers.host.toLowerCase().endsWith('yazz.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('dannea.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('canlabs.com')) { res.writeHead(301, {Location: 'http://canlabs.com/canlabs/index.html'} ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('gosharedata.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('visifile.com')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('visifiles.com')) { res.writeHead(301, {Location: homepageUrl} ); res.end(); return; }; if (req.headers.host.toLowerCase().endsWith('appshare.co')) { res.writeHead(301, {Location: homepageUrl } ); res.end(); return; }; }; if (isValidObject(envVars.YAZZ_RUN_APP)) { runapp = envVars.YAZZ_RUN_APP } if (runhtml && (!req.query.goto) && (!req.query.embed)) { homepage = runhtml runOnPageExists(req,res,homepage) return } else if (runapp && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjsurl && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjsfile && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else if (loadjscode && (!req.query.goto) && (!req.query.embed)) { homepage = path.join( userData, 'apps/' + runapp + '.html' ) runOnPageExists(req,res,homepage) return } else { homepage = path.join( userData, 'apps/homepage.html' ) runOnPageExists(req,res,homepage) return } console.log("Serving: " + homepage) } function getEditApp(req, res) { hostcount++; // I dont know why sockets.io calls .map files here if (req.path.endsWith(".map")) { return } var parts = req.path.split('/'); var lastSegment = parts.pop() || parts.pop(); console.log("URL PATH: " + lastSegment); //console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl); // // send the edit page // var homepage = path.join(__dirname, '../public/go.html') var baseComponentId = lastSegment var newStaticFileContent = fs.readFileSync(homepage) newStaticFileContent = newStaticFileContent.toString().replace("var editAppShareApp = null", "var editAppShareApp = '" + baseComponentId + "'") res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end(newStaticFileContent); } function testFirewall(req, res) { var tracking_id = url.parse(req.url, true).query.tracking_id; var server = url.parse(req.url, true).query.server; //console.log(JSON.stringify(tracking_id,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({ got_through_firewall: tracking_id , server: server, username: username, locked: locked })); }; function websocketFn(ws) { serverwebsockets.push(ws); sendToBrowserViaWebSocket(ws, {type: "socket_connected"}); sendOverWebSockets({ type: "env_vars", value: envVars }); //console.log('Socket connected : ' + serverwebsockets.length); sendOverWebSockets({ type: "network_ip_address_intranet", value: hostaddressintranet }); ws.on('message', function(msg) { var receivedMessage = eval("(" + msg + ")"); //console.log(" 1- Server recieved message: " + JSON.stringify(receivedMessage)); // if we get the message "server_get_all_queries" from the web browser if (receivedMessage.message_type == "server_get_all_queries") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; //console.log(" 2 "); forkedProcesses["forked"].send({ message_type: "get_all_queries", seq_num: seqNum }); } else if (receivedMessage.message_type == "loadUiComponent") { //console.log("***** } else if (msg.message_type == loadUiComponent) ") var componentIds = receivedMessage.find_components.base_component_ids dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT * FROM system_code WHERE base_component_id in " + "(" + componentIds.map(function(){ return "?" }).join(",") + " )" + " and code_tag = 'LATEST' ", componentIds , function(err, results) { if (results) { if (results.length > 0) { var codeId = results[0].id dbsearch.all( "SELECT dependency_name FROM app_dependencies where code_id = ?; ", codeId, function(err, results2) { results[0].libs = results2 sendToBrowserViaWebSocket( ws, { type: "server_returns_loadUiComponent_to_browser", seq_num: receivedMessage.seq_num, record: JSON.stringify(results,null,2), args: JSON.stringify(receivedMessage.args,null,2), test: 1 }); }) } } }) }, sqlite3.OPEN_READONLY) // ______ // Browser --Send me your data--> Server // ______ // } else if (receivedMessage.message_type == "edit_static_app") { console.log("*** server got message from static app: edit_static_app") var sql_data = receivedMessage.sql_data var code_fn = receivedMessage.code_fn forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: receivedMessage.base_component_id, parent_hash: null, code: code_fn, client_file_upload_id: -1, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sql_data }); sendToBrowserViaWebSocket( ws, { type: "edit_static_app_url" , url: receivedMessage.host_editor_address + "/edit/" + receivedMessage.base_component_id , size_of_db: "" + (sql_data?sql_data.length:0) , code_fn: "" + (code_fn?code_fn.length:0) }); // ______ // Browser --Send me your data--> Server // ______ // } else if (receivedMessage.message_type == "browser_asks_server_for_data") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; // ______ // Server --Send me your data--> Subprocess // ______ // forkedProcesses["forked"].send({ message_type: "server_asks_subprocess_for_data", seq_num: seqNum }); } else if (receivedMessage.message_type == "browser_asks_server_for_data") { var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = ws; // ______ // Server --Send me your data--> Subprocess // ______ // forkedProcesses["forked"].send({ message_type: "server_asks_subprocess_for_data", seq_num: seqNum }); } else if (receivedMessage.message_type == "browser_asks_server_for_apps") { // console.log("******************* browser_asks_server_for_apps *******************") findLatestVersionOfApps( function(results) { // console.log(JSON.stringify(results,null,2)) sendToBrowserViaWebSocket( ws, { type: "vf_app_names", results: results }); }) // -------------------------------------------------------------------- // // callDriverMethod // // "callDriverMethod" is used to call server side apps/code. // // // // -------------------------------------------------------------------- } else if (receivedMessage.message_type == "callDriverMethod") { // Use an integer counter to identify whoever was // calling the server function (in this case a web browser with // a web socket). We need to do this as there may be several // web browsers connected to this one server var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[ seqNum ] = ws; //console.log(" .......1 Electron callDriverMethod: " + JSON.stringify(receivedMessage,null,2)); forkedProcesses["forked"].send({ message_type: "callDriverMethod", find_component: receivedMessage.find_component, args: receivedMessage.args, seq_num_parent: seqNum, seq_num_browser: receivedMessage.seqNum }); } });}; function file_uploadSingleFn(req, res) { //console.log('----- file_uploadSingle --------------'); //console.log(req.file); //console.log("**FILE** " + JSON.stringify(Object.keys(req))); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log(JSON.stringify(req.files.length)); //console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2)) var client_file_upload_id = req.body.client_file_upload_id //console.log("**client_file_upload_id** " + JSON.stringify(client_file_upload_id)); //console.log( " next: " + JSON.stringify(next)); res.status( 200 ).send( req.file ); //console.log('Loading saved Creator app' ); var ifile = req.file //console.log(" " + JSON.stringify(ifile)); var ext = ifile.originalname.split('.').pop(); ext = ext.toLowerCase(); //console.log('Ext: ' + ext); if ((ext == "html") || (ext == "html")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() //console.log(''); //console.log('Local saved path: ' + localp); var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") //console.log(`indexStart: ${indexStart}`) //console.log(`indexEnd: ${indexEnd}`) if ((indexStart > 0) && (indexEnd > 0)) { indexStart += 13 + 10 indexEnd -= 2 var tts = readIn.substring(indexStart,indexEnd) //console.log(tts) var ytr = unescape(tts) console.log("SENDING FROM UPLOAD___=+++****") var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") var indexOfSqliteData = readIn.indexOf("var sqlitedata = '") var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata") var sqlitedatafromupload = null if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) { sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18, indexOfSqliteDataEnd) } forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: ytr, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sqlitedatafromupload }); } } else if ((ext == "js") || (ext == "yazz") || (ext == "pilot")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id") forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: readIn, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: false}, sqlite_data: "" }); } else { console.log('Ignoring file '); } }; function file_uploadFn(req, res, next) { //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log('-------------------------------------------------------------------------------------'); //console.log(JSON.stringify(req.files.length)); //console.log("client_file_upload_id: " + JSON.stringify(req.body.client_file_upload_id,null,2)) var client_file_upload_id = req.body.client_file_upload_id //console.log("**FILES** " + JSON.stringify(req.files)); //console.log( " next: " + JSON.stringify(next)); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); //console.log('......................................................................................'); res.status( 200 ).send( req.files ); var ll = req.files.length; for (var i = 0; i < ll ; i ++) { //console.log('Loading saved Creator app' ); var ifile = req.files[i]; //console.log(" " + JSON.stringify(ifile)); var ext = ifile.originalname.split('.').pop(); ext = ext.toLowerCase(); //console.log('Ext: ' + ext); if ((ext == "html") || (ext == "html")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() //console.log(''); //console.log('Local saved path: ' + localp); var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") //console.log(`indexStart: ${indexStart}`) //console.log(`indexEnd: ${indexEnd}`) if ((indexStart > 0) && (indexEnd > 0)) { indexStart += 13 + 10 indexEnd -= 2 var tts = readIn.substring(indexStart,indexEnd) //console.log(tts) var ytr = unescape(tts) console.log("SENDINF FROM UPLAOD___=+++****") var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") var indexStart = readIn.indexOf("/*APP_START*/") var indexEnd = readIn.indexOf("/*APP_END*/") var indexOfSqliteData = readIn.indexOf("var sqlitedata = '") var indexOfSqliteDataEnd = readIn.indexOf("'//sqlitedata") var sqlitedatafromupload = null if ((indexOfSqliteData != -1) && (indexOfSqliteDataEnd != -1)) { sqlitedatafromupload = readIn.substring( indexOfSqliteData + 18, indexOfSqliteDataEnd) } forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: ytr, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: true}, sqlite_data: sqlitedatafromupload }); } } else if ((ext == "js") || (ext == "yazz") || (ext == "pilot")) { var localp2; localp2 = path.join(userData, 'uploads/' + ifile.filename); var localp = localp2 + '.' + ext; fs.renameSync(localp2, localp); var readIn = fs.readFileSync(localp).toString() var bci = saveHelper.getValueOfCodeString(readIn, "base_component_id") forkedProcesses["forked"].send({ message_type: "save_code_from_upload", base_component_id: bci, parent_hash: null, code: readIn, client_file_upload_id: client_file_upload_id, options: {save_html: true, fast_forward_database_to_latest_revision: false}, sqlite_data: "" }); } else { console.log('Ignoring file '); } } }; function code_uploadFn(req, res) { forkedProcesses["forked"].send({ message_type: "save_code_from_upload", parent_hash: null, code: "function(args) { /* rest_api('test3') */ return {ab: 163}}", options: {save_html: true}, sqlite_data: "" }); }; function send_client_detailsFn(req, res) { ////console.log('in send_client_details: ' + JSON.stringify(req,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({ returned: 'some data ', server: hostaddress, port: port, username: username, locked: locked, localIp: req.ip, isLocalMachine: isLocalMachine(req) })); } function lockFn(req, res) { if ((req.query.locked == "TRUE") || (req.query.locked == "true")) { locked = true; } else { locked = false; } ////console.log('in lock: ' + JSON.stringify(req,null,2)); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({locked: locked})); } //------------------------------------------------------------------------------ // This is called by the central server to get the details of the last // client that connected tp the central server //------------------------------------------------------------------------------ function get_connectFn(req, res) { res.writeHead(200, {'Content-Type': 'text/plain'}); res.end( JSON.stringify( { requestClientInternalHostAddress: requestClientInternalHostAddress , requestClientInternalPort: requestClientInternalPort , requestClientPublicIp: requestClientPublicIp , requestClientPublicHostName: requestClientPublicHostName , version: 31 } )); } function add_new_connectionFn(req, res) { var params = req.body; forkedProcesses["forked"].send({ message_type: "addNewConnection" , params: params}); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({done: "ok"}))}; function add_new_queryFn(req, res) { var params = req.body; forkedProcesses["forked"].send({ message_type: "addNewQuery" , params: params}); res.writeHead(200, {'Content-Type': 'text/plain'}); res.end(JSON.stringify({done: "ok"}))}; function keycloakProtector(params) { return function(req,res,next) { next() return var appName2=null if (params.compIdFromReqFn) { appName2 = params.compIdFromReqFn(req) } dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT code FROM system_code where base_component_id = ? and code_tag = ?; ", appName2, "LATEST", function(err, results) { if (results.length == 0) { console.log("Could not find component : " + appName2) } else { console.log("Found code for : " + appName2) var fileC = results[0].code.toString() //console.log("Code : " + fileC) var sscode = saveHelper.getValueOfCodeString(fileC,"keycloak",")//keycloak") //console.log("sscode:" + sscode) if (sscode) { //var ssval = eval( "(" + sscode + ")") //console.log("keycloak: " + JSON.stringify(sscode,null,2)) keycloak.protect()(req, res, next) } else { next() } } }) }, sqlite3.OPEN_READONLY) } } //------------------------------------------------------------ // This starts all the system services //------------------------------------------------------------ function startServices() { if (useHttps) { var app2 = express() var newhttp = http.createServer(app2); app2.use(compression()) app2.get('/', function (req, res, next) { return getRoot(req, res, next); }) app2.get('*', function(req, res) { if (req.headers.host.toLowerCase().endsWith('canlabs.com')) { console.log("path: " + req.path) var rty = req.path if (req.path == "/canlabs") { rty = "/canlabs/index.html" } var fileNameRead = path.join(__dirname, '../public' + rty) res.end(fs.readFileSync(fileNameRead)); } else if ( req.path.indexOf(".well-known") != -1 ) { var fileNameRead = path.join(__dirname, '../public' + req.path) res.end(fs.readFileSync(fileNameRead)); } else { console.log("Redirect HTTP to HTTPS") res.redirect('https://' + req.headers.host + req.url); } }) newhttp.listen(80); } app.use(compression()) app.use(cors({ origin: '*' })); app.use(function (req, res, next) { // Website you wish to allow to connect res.header('Access-Control-Allow-Origin', '*'); // Request methods you wish to allow res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE'); // Request headers you wish to allow res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type'); // Set to true if you need the website to include cookies in the requests sent // to the API (e.g. in case you use sessions) res.setHeader('Access-Control-Allow-Credentials', false); // Pass to next layer of middleware next(); }); //------------------------------------------------------------------------------ // Show the default page for the different domains //------------------------------------------------------------------------------ app.get('/', function (req, res, next) { return getRoot(req, res, next); }) app.get('/live-check',(req,res)=> { console.log("Live check passed") res.send ("Live check passed"); }); app.get('/readiness-check',(req,res)=> { if (systemReady) { console.log("Readiness check passed") res.send ("Readiness check passed"); } else { console.log("Readiness check failed") res.status(500).send('Readiness check did not pass'); } }); //------------------------------------------------------------------------------ // Allow an app to be edited //------------------------------------------------------------------------------ app.get('/edit/*', function (req, res) { return getEditApp(req, res); }) app.use("/files", express.static(path.join(userData, '/files/'))); function getAppNameFromHtml() { } function getBaseComponentIdFromRequest(req){ var parts = req.path.split('/'); var appHtmlFile = parts.pop() || parts.pop(); var appName = appHtmlFile.split('.').slice(0, -1).join('.') return appName } //app.get('/app/*', keycloakProtector({compIdFromReqFn: getBaseComponentIdFromRequest}), function (req, res, next) { app.get('/app/*', function (req, res, next) { if (req.kauth) { console.log('Keycloak details from server:') console.log(req.kauth.grant) } var parts = req.path.split('/'); var appHtmlFile = parts.pop() || parts.pop(); //console.log("appHtemlFile: " + appHtmlFile); var appName = appHtmlFile.split('.').slice(0, -1).join('.') //console.log("appName: " + appName); //console.log("path: " + path); var appFilePath = path.join(userData, 'apps/' + appHtmlFile) var fileC2 = fs.readFileSync(appFilePath, 'utf8').toString() res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end(fileC2); }) //app.use("/app_dbs", express.static(path.join(userData, '/app_dbs/'))); app.use("/public/aframe_fonts", express.static(path.join(__dirname, '../public/aframe_fonts'))); app.use( express.static(path.join(__dirname, '../public/'))) app.use(bodyParser.json()); // support json encoded bodies app.use(bodyParser.urlencoded({ extended: true })); // support encoded bodies //------------------------------------------------------------------------------ // test_firewall //------------------------------------------------------------------------------ app.get('/test_firewall', function (req, res) { return testFirewall(req,res); }); //------------------------------------------------------------------------------ // get_intranet_servers //------------------------------------------------------------------------------ app.get('/get_intranet_servers', function (req, res) { //console.log("1 - get_intranet_servers: " + req.ip) //console.log("1.1 - get_intranet_servers: " + Object.keys(req.headers)) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2") forkedProcesses["forked"].send({ message_type: "get_intranet_servers", seq_num: seqNum, requestClientPublicIp: req.ip , numberOfSecondsAliveCheck: numberOfSecondsAliveCheck, requestVia: findViafromString(req.headers.via) }); }); app.post('/file_upload_single', upload.single( 'uploadfilefromhomepage' ), function (req, res, next) { return file_uploadSingleFn(req, res, next); }); app.post('/file_upload', upload.array( 'file' ), function (req, res, next) { return file_uploadFn(req, res, next); }); app.get('/code_upload', function (req, res, next) { code_uploadFn(req, res); //zzz res.writeHead(200, {'Content-Type': 'text/html; charset=utf-8'}); res.end("Done"); }); app.get('/send_client_details', function (req, res) { return send_client_detailsFn(req, res); }) app.get('/lock', function (req, res) { return lockFn(req, res); }) process.on('uncaughtException', function (err) { console.log(err); }) //------------------------------------------------------------------------------ // This is called by the central server to get the details of the last // client that connected tp the central server //------------------------------------------------------------------------------ app.get('/get_connect', function (req, res) { return get_connectFn(req, res); }) //app.enable('trust proxy') app.get('/get_all_table', function (req, res) { var tableName = url.parse(req.url, true).query.tableName; var fields = url.parse(req.url, true).query.fields; //console.log("1 - get_all_table ,tableName: " + tableName) //console.log(" get_all_table ,fields: " + fields) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2 - get_search_results") forkedProcesses["forked"].send({ message_type: "get_all_tables", seq_num: seqNum, table_name: tableName, fields: fields }); }); app.post('/add_new_connection', function (req, res) { return add_new_connectionFn(req, res) }); app.post('/add_new_query',function (req, res) { return add_new_queryFn(req, res) }); //------------------------------------------------------------------------------ // run on the central server only // // This is where the client sends its details to the central server //------------------------------------------------------------------------------ app.get('/client_connect', function (req, res) { //console.log("1 - client_connect: ") var queryData = url.parse(req.url, true).query; var requestClientInternalHostAddress = req.query.requestClientInternalHostAddress; //console.log(" requestClientInternalHostAddress: " + requestClientInternalHostAddress) var requestClientInternalPort = req.query.requestClientInternalPort; //console.log(" requestClientInternalPort: " + requestClientInternalPort) var requestVia = findViafromString(req.headers.via); //console.log(" requestVia: " + requestVia) var requestClientPublicIp = req.ip; //console.log(" requestClientPublicIp: " + requestClientPublicIp) var clientUsername = req.query.clientUsername; //console.log(" clientUsername: " + clientUsername) //requestClientPublicHostName = req.headers['x-forwarded-for'] || req.connection.remoteAddress; var requestClientPublicHostName = "req keys::" + Object.keys(req) + ", VIA::" + req.headers.via + ", raw::" + JSON.stringify(req.rawHeaders); //console.log(" requestClientPublicHostName: " + requestClientPublicHostName) var seqNum = queuedResponseSeqNum; queuedResponseSeqNum ++; queuedResponses[seqNum] = res; //console.log("2") forkedProcesses["forked"].send({ message_type: "client_connect", seq_num: seqNum, requestClientInternalHostAddress: requestClientInternalHostAddress, requestClientInternalPort: requestClientInternalPort, requestVia: requestVia, requestClientPublicIp: requestClientPublicIp, clientUsername: clientUsername, requestClientPublicHostName: requestClientPublicHostName }); }) //------------------------------------------------------------------------------ // start the web server //------------------------------------------------------------------------------ if (useHttps) { var caCerts = readCerts() var certOptions = { key: fs.readFileSync(privateKey, 'utf8'), cert: fs.readFileSync(publicCertificate, 'utf8'), ca: caCerts } certOptions.requestCert = true certOptions.rejectUnauthorized = false httpServer = https.createServer(certOptions,app) } else { httpServer = http.createServer(app) } socket = require('socket.io') httpServer.listen(port, hostaddress, function () { outputDebug("****HOST=" + hostaddress + "HOST****\n"); outputDebug("****PORT=" + port+ "PORT****\n"); outputDebug(typeOfSystem + ' started on port ' + port + ' with local folder at ' + process.cwd() + ' and __dirname = ' + __dirname+ "\n"); // // We dont listen on websockets here with socket.io as often they stop working!!! // Crazy, I know!!!! So we removed websockets from the list of transports below // io = socket.listen(httpServer, { log: false, agent: false, origins: '*:*', transports: ['htmlfile', 'xhr-polling', 'jsonp-polling', 'polling'] }); io.on('connection', function (sck) { var connt = JSON.stringify(sck.conn.transport,null,2); websocketFn(sck) }); }) //console.log('addr: '+ hostaddress + ":" + port); //aliveCheckFn(); if (typeOfSystem == 'client') { //setInterval(aliveCheckFn ,numberOfSecondsAliveCheck * 1000); } setTimeout(function(){ forkedProcesses["forked"].send({message_type: 'setUpPredefinedComponents'}); },1000) } function findLatestVersionOfApps( callbackFn) { dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT id,base_component_id,display_name, component_options FROM system_code where component_scope = ? and code_tag = ?; ", "app", "LATEST", function(err, results) { if (results.length > 0) { callbackFn(results) } else { callbackFn(null) } }) }, sqlite3.OPEN_READONLY) } function findDriversWithMethodLike(methodName, callbackFn) { dbsearch.serialize( function() { var stmt = dbsearch.all( "SELECT base_component_id FROM system_code where on_condition like '%" + methodName + "%'; ", function(err, results) { if (results.length > 0) { callbackFn(results) } else { callbackFn(null) } }) }, sqlite3.OPEN_READONLY) } function bytesToMb(bytes) { return (bytes / 1024 ) / 1024 } function getChildMem(childProcessName,stats) { var memoryused = 0 if (stats) { memoryused = stats.memory ; totalMem += memoryused } if (showStats) { console.log(`${childProcessName}: ${Math.round(bytesToMb(memoryused) * 100) / 100} MB`); } } function usePid(childProcessName,childprocess) { pidusage(childprocess.pid, function (err, stats) { getChildMem(childProcessName,stats) returnedmemCount ++ if (returnedmemCount == allForked.length) { if (showStats) { console.log("------------------------------------") console.log(" TOTAL MEM = " + bytesToMb(totalMem) + " MB") console.log("------------------------------------") } inmemcalc = false yazzMemoryUsageMetric.set(totalMem) } }); } if (statsInterval > 0) { setInterval(function(){ if (!inmemcalc) { inmemcalc = true totalMem = 0 const used = process.memoryUsage().heapUsed ; totalMem += used yazzProcessMainMemoryUsageMetric.set(used) if (showStats) { console.log(`Main: ${Math.round( bytesToMb(used) * 100) / 100} MB`); } allForked = Object.keys(forkedProcesses) returnedmemCount = 0 for (var ttt=0; ttt< allForked.length; ttt++) { var childProcessName = allForked[ttt] const childprocess = forkedProcesses[childProcessName] usePid(childProcessName,childprocess) } } },(statsInterval * 1000)) } function readCerts() { console.log("Checking CA certs" ) console.log("-----------------" ) console.log("" ) console.log("CA Cert 1 = " + caCertificate1) console.log("CA Cert 2 = " + caCertificate2) console.log("CA Cert 3 = " + caCertificate3) console.log("" ) console.log("" ) let caCertsRet = [] if (caCertificate1) { console.log("CA Cert 1 = " + caCertificate1) var fff = fs.readFileSync(caCertificate1, 'utf8') console.log(" = " + fff) caCertsRet.push(fff) } if (caCertificate2) { console.log("CA Cert 2 = " + caCertificate2) var fff = fs.readFileSync(caCertificate2, 'utf8') console.log(" = " + fff) caCertsRet.push(fff) } if (caCertificate3) { console.log("CA Cert 3 = " + caCertificate3) var fff = fs.readFileSync(caCertificate3, 'utf8') console.log(" = " + fff) caCertsRet.push(fff) } return caCertsRet }
Adding debug code
src/electron.js
Adding debug code
<ide><path>rc/electron.js <ide> <ide> <ide> var expressWs = require('express-ws')(app); <del>console.log("__filename: " + __filename) <del>console.log("__dirname: " + __dirname) <del> <del> <del>console.log("Platform: " + process.platform) <add>outputDebug("__filename: " + __filename) <add>outputDebug("__dirname: " + __dirname) <add> <add> <add>outputDebug("Platform: " + process.platform) <ide> <ide> if (isWin) { <del> console.log("Creating Windows driver") <add> outputDebug("Creating Windows driver") <ide> mkdirp.sync('node_modules\\sqlite3\\lib/binding\\node-v72-win32-x64'); <ide> var srcNodeJsFile = path.join(__dirname,'..\\node_sqlite3_win64.rename') <del> console.log("srcNodeJsFile: " + srcNodeJsFile) <add> outputDebug("srcNodeJsFile: " + srcNodeJsFile) <ide> fs.copyFileSync( <ide> srcNodeJsFile, <ide> path.join(__dirname,'..\\node_modules\\sqlite3\\lib\\binding\\node-v72-win32-x64\\node_sqlite3.node'), <ide> <ide> <ide> } else if (isLinux) { <del> console.log("Creating Linux driver") <add> outputDebug("Creating Linux driver") <ide> mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-linux-x64'); <ide> var srcNodeJsFile = path.join(__dirname,'../node_sqlite3_linux64.rename') <del> console.log("srcNodeJsFile: " + srcNodeJsFile) <add> outputDebug("srcNodeJsFile: " + srcNodeJsFile) <ide> fs.copyFileSync( <ide> srcNodeJsFile, <ide> path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-linux-x64/node_sqlite3.node'), <ide> <ide> <ide> } else if (isMac) { <del> console.log("Creating Mac driver") <add> outputDebug("Creating Mac driver") <ide> mkdirp.sync('node_modules/sqlite3/lib/binding/node-v64-darwin-x64'); <ide> <ide> var srcNodeJsFile = path.join(__filename,'../../node_sqlite3_macos64.rename') <del> console.log("srcNodeJsFile: " + srcNodeJsFile) <add> outputDebug("srcNodeJsFile: " + srcNodeJsFile) <ide> fs.copyFileSync( <ide> srcNodeJsFile, <ide> path.join(__dirname,'../node_modules/sqlite3/lib/binding/node-v64-darwin-x64/node_sqlite3.node'), <ide> <ide> <ide> } else { <del> console.log("Error, unsupported platform: " + process.platform) <add> outputDebug("Error, unsupported platform: " + process.platform) <ide> } <ide> <ide> <ide> <ide> if (useHost) { <ide> hostaddress = useHost <del> console.log("USE Host: " + useHost) <add> outputDebug("USE Host: " + useHost) <ide> } <ide> <ide> <ide> } <ide> <ide> <del> console.log(" msg.base_component_id: " + msg.base_component_id); <del> console.log(" seqNum: " + seqNum); <add> outputDebug(" msg.base_component_id: " + msg.base_component_id); <add> outputDebug(" seqNum: " + seqNum); <ide> forkedProcesses["forked"].send({ <ide> message_type: "callDriverMethod", <ide> find_component: { <ide> <ide> <ide> if (isWin) { <del> console.log("Running as Windows") <add> outputDebug("Running as Windows") <ide> var localappdata = process.env.LOCALAPPDATA <ide> userData = path.join(localappdata, '/Yazz/') <ide> } else { <ide> <ide> <ide> if (deleteOnStartup) { <del> console.log("deleting dir :" + userData) <add> outputDebug("deleting dir :" + userData) <ide> if (userData.length > 6) { <ide> deleteYazzDataV2(userData) <ide> } <ide> shuttingDown = true; <ide> <ide> if (dbsearch) { <del> console.log("Database closing...") <add> outputDebug("Database closing...") <ide> dbsearch.run("PRAGMA wal_checkpoint;") <ide> dbsearch.close(function(err){ <del> console.log("...database closed") <add> outputDebug("...database closed") <ide> visifile = null <ide> <ide> }) <ide> } <ide> <ide> if (forkedProcesses["forked"]) { <del> console.log("Killed Process forked") <add> outputDebug("Killed Process forked") <ide> forkedProcesses["forked"].kill(); <ide> } <ide> if (forkedProcesses["forkedExeScheduler"]) { <del> console.log("Killed Exe Scheduler process") <add> outputDebug("Killed Exe Scheduler process") <ide> forkedProcesses["forkedExeScheduler"].kill(); <ide> } <ide> <ide> for (var i = 0; i < executionProcessCount; i++ ) { <ide> var exeProcName = "forkedExeProcess" + i <ide> forkedProcesses[exeProcName].kill(); <del> console.log("Killed Process " + exeProcName) <add> outputDebug("Killed Process " + exeProcName) <ide> } <ide> if (visifile){ <ide> visifile.removeAllListeners('close'); <ide> } <ide> } <ide> <del> console.log("deleteOnExit =" + deleteOnExit) <add> outputDebug("deleteOnExit =" + deleteOnExit) <ide> if (deleteOnExit) { <ide> <del> console.log("deleting dir :" + userData) <add> outputDebug("deleting dir :" + userData) <ide> if (userData.length > 6) { <ide> if (isWin) { <ide> deleteYazzDataWindows(userData) <ide> console.log("deleteYazzDataWindows") <ide> if (dddd.length > 6) { <ide> var ff = 'timeout 8 && rd /s /q "' + dddd + '"' <del> console.log(ff) <add> outputDebug(ff) <ide> fork.exec(ff <ide> , <ide> function(err, stdout, stderr) { <ide> <ide> <ide> function deleteYazzDataV2(dddd) { <del> console.log("----------------------------------") <del> console.log("Before delete :" + ls(dddd)) <del> console.log("----------------------------------") <add> outputDebug("----------------------------------") <add> outputDebug("Before delete :" + ls(dddd)) <add> outputDebug("----------------------------------") <ide> <ide> rimraf.sync(path.join(dddd, 'uploads/')); <ide> rimraf.sync(path.join(dddd, 'files/')); <ide> rimraf.sync(path.join(dddd, '*.visi')); <ide> rimraf.sync(path.join(dddd, '*.visi*')); <ide> <del> console.log("----------------------------------") <del> console.log("After delete :" + ls(dddd)) <del> console.log("----------------------------------") <add> outputDebug("----------------------------------") <add> outputDebug("After delete :" + ls(dddd)) <add> outputDebug("----------------------------------") <ide> } <ide> <ide> function deleteYazzData(dddd) { <ide> loadjsfile = process.argv[2] <ide> } else if ((process.argv[2]) && (!process.argv[2].startsWith("--"))) { <ide> loadjscode = process.argv[2] <del> console.log("load code: " + loadjscode ) <add> outputDebug("load code: " + loadjscode ) <ide> } <ide> <ide> <ide> resp.on('end', () => { <ide> //console.log("code:" + data); <ide> var baseComponentIdForUrl = saveHelper.getValueOfCodeString(data, "base_component_id") <del> console.log("baseComponentIdForUrl:" + baseComponentIdForUrl); <add> outputDebug("baseComponentIdForUrl:" + baseComponentIdForUrl); <ide> if (!isValidObject(baseComponentIdForUrl)) { <ide> baseComponentIdForUrl = loadjsurl.replace(/[^A-Z0-9]/ig, "_"); <ide> } <ide> var jsCode = data <del> console.log("*********** Trying to load loadjsurl code *************") <add> outputDebug("*********** Trying to load loadjsurl code *************") <ide> forkedProcesses["forked"].send({ <ide> message_type: "save_code", <ide> base_component_id: baseComponentIdForUrl, <ide> }); <ide> <ide> }).on("error", (err) => { <del> console.log("Error: " + err.message); <add> outputDebug("Error: " + err.message); <ide> }); <ide> <ide> } else if (isValidObject(loadjsfile)) { <ide> console.log("loadjscode ...") <ide> var data2 = loadjscode <ide> var baseComponentIdForCode = saveHelper.getValueOfCodeString(data2, "base_component_id") <del> console.log("baseComponentIdForCode:" + baseComponentIdForCode); <add> outputDebug("baseComponentIdForCode:" + baseComponentIdForCode); <ide> if (!isValidObject(baseComponentIdForCode)) { <ide> baseComponentIdForCode = "code_" + (("" + Math.random()).replace(/[^A-Z0-9]/ig, "_")); <del> console.log("baseComponentIdForFile:" + baseComponentIdForCode); <add> outputDebug("baseComponentIdForFile:" + baseComponentIdForCode); <ide> } <ide> <ide> //console.log("code:" + data2); <del> console.log("*********** Trying to load loadjscode code *************") <add> outputDebug("*********** Trying to load loadjscode code *************") <ide> forkedProcesses["forked"].send({ <ide> message_type: "save_code", <ide> base_component_id: baseComponentIdForCode, <ide> var homepageUrl = serverProtocol + '://yazz.com/visifile/index.html?time=' + new Date().getTime() <ide> if (req.headers.host) { <ide> if (req.query.goto) { <del> console.log("*** FOUND goto") <add> outputDebug("*** FOUND goto") <ide> res.end(fs.readFileSync(homepage)); <ide> return <ide> } <ide> if (req.query.embed) { <del> console.log("*** FOUND embed") <add> outputDebug("*** FOUND embed") <ide> res.end(fs.readFileSync(homepage)); <ide> return <ide> } <ide> runOnPageExists(req,res,homepage) <ide> return <ide> } <del> console.log("Serving: " + homepage) <add> outputDebug("Serving: " + homepage) <ide> <ide> <ide> } <ide> var parts = req.path.split('/'); <ide> var lastSegment = parts.pop() || parts.pop(); <ide> <del> console.log("URL PATH: " + lastSegment); <add> outputDebug("URL PATH: " + lastSegment); <ide> //console.log("Full URL: " + req.protocol + '://' + req.get('host') + req.originalUrl); <ide> <ide> <ide> // ______ <ide> // <ide> } else if (receivedMessage.message_type == "edit_static_app") { <del> console.log("*** server got message from static app: edit_static_app") <add> outputDebug("*** server got message from static app: edit_static_app") <ide> var sql_data = receivedMessage.sql_data <ide> var code_fn = receivedMessage.code_fn <ide> <ide> <ide> } else if (receivedMessage.message_type == "browser_asks_server_for_apps") { <ide> <del> // console.log("******************* browser_asks_server_for_apps *******************") <add> // outputDebug("******************* browser_asks_server_for_apps *******************") <ide> findLatestVersionOfApps( function(results) { <del> // console.log(JSON.stringify(results,null,2)) <add> // outputDebug(JSON.stringify(results,null,2)) <ide> <ide> sendToBrowserViaWebSocket( ws, <ide> { <ide> var tts = readIn.substring(indexStart,indexEnd) <ide> //console.log(tts) <ide> var ytr = unescape(tts) <del> console.log("SENDING FROM UPLOAD___=+++****") <add> outputDebug("SENDING FROM UPLOAD___=+++****") <ide> var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") <ide> <ide> var indexStart = readIn.indexOf("/*APP_START*/") <ide> }); <ide> <ide> } else { <del> console.log('Ignoring file '); <add> outputDebug('Ignoring file '); <ide> <ide> } <ide> <ide> var tts = readIn.substring(indexStart,indexEnd) <ide> //console.log(tts) <ide> var ytr = unescape(tts) <del> console.log("SENDINF FROM UPLAOD___=+++****") <add> outputDebug("SENDINF FROM UPLAOD___=+++****") <ide> var bci = saveHelper.getValueOfCodeString(ytr, "base_component_id") <ide> <ide> var indexStart = readIn.indexOf("/*APP_START*/") <ide> }); <ide> <ide> } else { <del> console.log('Ignoring file '); <add> outputDebug('Ignoring file '); <ide> <ide> } <ide> <ide> function(err, results) <ide> { <ide> if (results.length == 0) { <del> console.log("Could not find component : " + appName2) <add> outputDebug("Could not find component : " + appName2) <ide> } else { <del> console.log("Found code for : " + appName2) <add> outputDebug("Found code for : " + appName2) <ide> var fileC = results[0].code.toString() <ide> //console.log("Code : " + fileC) <ide> <ide> <ide> app2.get('*', function(req, res) { <ide> if (req.headers.host.toLowerCase().endsWith('canlabs.com')) { <del> console.log("path: " + req.path) <add> outputDebug("path: " + req.path) <ide> <ide> var rty = req.path <ide> if (req.path == "/canlabs") { <ide> <ide> <ide> } else { <del> console.log("Redirect HTTP to HTTPS") <add> outputDebug("Redirect HTTP to HTTPS") <ide> res.redirect('https://' + req.headers.host + req.url); <ide> } <ide> }) <ide> <ide> <ide> app.get('/live-check',(req,res)=> { <del> console.log("Live check passed") <add> outputDebug("Live check passed") <ide> res.send ("Live check passed"); <ide> }); <ide> app.get('/readiness-check',(req,res)=> { <ide> if (systemReady) { <del> console.log("Readiness check passed") <add> outputDebug("Readiness check passed") <ide> res.send ("Readiness check passed"); <ide> } else { <del> console.log("Readiness check failed") <add> outputDebug("Readiness check failed") <ide> res.status(500).send('Readiness check did not pass'); <ide> } <ide> }); <ide> //app.get('/app/*', keycloakProtector({compIdFromReqFn: getBaseComponentIdFromRequest}), function (req, res, next) { <ide> app.get('/app/*', function (req, res, next) { <ide> if (req.kauth) { <del> console.log('Keycloak details from server:') <del> console.log(req.kauth.grant) <add> outputDebug('Keycloak details from server:') <add> outputDebug(req.kauth.grant) <ide> } <ide> var parts = req.path.split('/'); <ide> var appHtmlFile = parts.pop() || parts.pop(); <ide> <ide> <ide> process.on('uncaughtException', function (err) { <del> console.log(err); <add> outputDebug(err); <ide> }) <ide> <ide> <ide> totalMem += memoryused <ide> } <ide> if (showStats) { <del> console.log(`${childProcessName}: ${Math.round(bytesToMb(memoryused) * 100) / 100} MB`); <add> outputDebug(`${childProcessName}: ${Math.round(bytesToMb(memoryused) * 100) / 100} MB`); <ide> } <ide> } <ide> <ide> returnedmemCount ++ <ide> if (returnedmemCount == allForked.length) { <ide> if (showStats) { <del> console.log("------------------------------------") <del> console.log(" TOTAL MEM = " + bytesToMb(totalMem) + " MB") <del> console.log("------------------------------------") <add> outputDebug("------------------------------------") <add> outputDebug(" TOTAL MEM = " + bytesToMb(totalMem) + " MB") <add> outputDebug("------------------------------------") <ide> } <ide> inmemcalc = false <ide> yazzMemoryUsageMetric.set(totalMem) <ide> totalMem += used <ide> yazzProcessMainMemoryUsageMetric.set(used) <ide> if (showStats) { <del> console.log(`Main: ${Math.round( bytesToMb(used) * 100) / 100} MB`); <add> outputDebug(`Main: ${Math.round( bytesToMb(used) * 100) / 100} MB`); <ide> } <ide> allForked = Object.keys(forkedProcesses) <ide> returnedmemCount = 0 <ide> <ide> <ide> function readCerts() { <del> console.log("Checking CA certs" ) <del> console.log("-----------------" ) <del> console.log("" ) <del> console.log("CA Cert 1 = " + caCertificate1) <del> console.log("CA Cert 2 = " + caCertificate2) <del> console.log("CA Cert 3 = " + caCertificate3) <del> console.log("" ) <del> console.log("" ) <add> outputDebug("Checking CA certs" ) <add> outputDebug("-----------------" ) <add> outputDebug("" ) <add> outputDebug("CA Cert 1 = " + caCertificate1) <add> outputDebug("CA Cert 2 = " + caCertificate2) <add> outputDebug("CA Cert 3 = " + caCertificate3) <add> outputDebug("" ) <add> outputDebug("" ) <ide> <ide> <ide> let caCertsRet = [] <ide> if (caCertificate1) { <del> console.log("CA Cert 1 = " + caCertificate1) <add> outputDebug("CA Cert 1 = " + caCertificate1) <ide> var fff = fs.readFileSync(caCertificate1, 'utf8') <del> console.log(" = " + fff) <add> outputDebug(" = " + fff) <ide> caCertsRet.push(fff) <ide> } <ide> if (caCertificate2) { <del> console.log("CA Cert 2 = " + caCertificate2) <add> outputDebug("CA Cert 2 = " + caCertificate2) <ide> var fff = fs.readFileSync(caCertificate2, 'utf8') <del> console.log(" = " + fff) <add> outputDebug(" = " + fff) <ide> caCertsRet.push(fff) <ide> } <ide> if (caCertificate3) { <del> console.log("CA Cert 3 = " + caCertificate3) <add> outputDebug("CA Cert 3 = " + caCertificate3) <ide> var fff = fs.readFileSync(caCertificate3, 'utf8') <del> console.log(" = " + fff) <add> outputDebug(" = " + fff) <ide> caCertsRet.push(fff) <ide> } <ide> return caCertsRet
Java
agpl-3.0
46b50a03652a56e518fbd3eef6e33595996e7d4b
0
a-gogo/agogo,liimaorg/liima,liimaorg/liima,a-gogo/agogo,liimaorg/liima,liimaorg/liima,liimaorg/liima,a-gogo/agogo,liimaorg/liima,a-gogo/agogo,a-gogo/agogo,a-gogo/agogo,liimaorg/liima,liimaorg/liima,a-gogo/agogo
/* * AMW - Automated Middleware allows you to manage the configurations of * your Java EE applications on an unlimited number of different environments * with various versions, including the automated deployment of those apps. * Copyright (C) 2013-2016 by Puzzle ITC * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ch.puzzle.itc.mobiliar.business.resourcegroup.control; import ch.puzzle.itc.mobiliar.business.auditview.control.AuditService; import ch.puzzle.itc.mobiliar.business.domain.commons.CommonDomainService; import ch.puzzle.itc.mobiliar.business.environment.entity.ContextDependency; import ch.puzzle.itc.mobiliar.business.foreignable.control.ForeignableService; import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwner; import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwnerViolationException; import ch.puzzle.itc.mobiliar.business.function.entity.AmwFunctionEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyDescriptorEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyTagEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyTagType; import ch.puzzle.itc.mobiliar.business.releasing.entity.ReleaseEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.boundary.ResourceLocator; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceContextEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceFactory; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceGroupEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.AbstractResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ConsumedResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ProvidedResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ResourceRelationContextEntity; import ch.puzzle.itc.mobiliar.business.softlinkRelation.control.SoftlinkRelationService; import ch.puzzle.itc.mobiliar.business.softlinkRelation.entity.SoftlinkRelationEntity; import ch.puzzle.itc.mobiliar.business.template.entity.TemplateDescriptorEntity; import ch.puzzle.itc.mobiliar.common.exception.AMWException; import javax.inject.Inject; import javax.persistence.EntityManager; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Service wird verwendet zum kopieren von Ressourcen ("Templating") und zum erstellen von neuen Releases.<br/> * Es gibt gewisse Unterschiede zwischen dem Erstellen einer Kopie und dem Erstellen eines Release, deshalb * mit {@link ch.puzzle.itc.mobiliar.business.resourcegroup.control.CopyResourceDomainService.CopyMode} * angegeben werden, welche der beiden Varianten ausgeführt werden soll.<br/> * <h1>Allgemein</h1> * <ul> * <li>Die Instanzen müssen vom gleichen Typ sein</li> * <li>Bestehende Werte werden überschrieben</li> * <li>Daten auf der Zielresource die auf der Ursprungsresource nicht vorhanden sind, bleiben bestehen.</li> * </ul> * <h1>Was wird kopiert?</h1> * <table> * <tr> * <th></th> * <th>Copy</th> * <th>Release</th> * </tr> * <tr> * <td>Properties und ihre Werte</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Instanz Properties und ihre Werte</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Instanz Templates</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Consumed Relations</td> * <td>ja, ausser wenn die slave resource eine Applikation ist da diese nur von einem AS konsumiert werden * können</td> * <td>ja</td> * </tr> * <tr> * <td>Provided Relations</td> * <td>nein</td> * <td>ja</td> * </tr> * <tr> * <td>Properties auf Relations</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Templates auf Relations</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>AppServerRelations</td> * <td>ja, ausser wenn die slave resource eine Applikation ist da diese nur von einem AS konsumiert werden * können und entsprechen alle AppServerRelations die eine Applikation als direkten oder übergeordneten parent * haben.</td> * <td></td> * </tr> * <tr> * <td>Import name</td> * <td>nein</td> * <td>nein</td> * </tr> * <tr> * <td>Shakedown Tests</td> * <td>nein</td> * <td>nein</td> * </tr> * </table> * * @author cweber */ public class CopyResourceDomainService { @Inject private EntityManager entityManager; @Inject CommonDomainService commonDomainService; @Inject ForeignableService foreignableService; @Inject SoftlinkRelationService softlinkService; @Inject ResourceLocator resourceLocator; @Inject AuditService auditService; public enum CopyMode { COPY, RELEASE, MAIA_PREDECESSOR } public CopyResourceResult copyFromOriginToTargetResource(ResourceEntity origin, ResourceEntity target, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { if (target == null) { throw new RuntimeException("Target resource should not be null for copy action"); } else { return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.COPY, actingOwner)); } } /** * @param origin - the resource to create a new release from * @param release - the release to create */ public CopyResourceResult createReleaseFromOriginResource(ResourceEntity origin, ReleaseEntity release, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { ResourceEntity target = commonDomainService.getResourceEntityByGroupAndRelease(origin.getResourceGroup().getId(), release.getId()); if (target == null) { target = ResourceFactory.createNewResourceForOwner(origin.getResourceGroup(), actingOwner); target.setRelease(release); } return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.RELEASE, actingOwner)); } public CopyResourceResult copyFromPredecessorToSuccessorResource(ResourceEntity predecessor, ResourceEntity successor, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { if (successor == null) { throw new RuntimeException("Successor resource should not be null for copy predecessor action"); } else { return doCopyResourceAndSave(new CopyUnit(predecessor, successor, CopyMode.MAIA_PREDECESSOR, actingOwner)); } } /** * Persists the target * * @return result if copy was successful, contains a list with error messages if copy fails */ protected CopyResourceResult doCopyResourceAndSave(CopyUnit copyUnit) throws ForeignableOwnerViolationException { int targetHashCodeBeforeChange = copyUnit.getTargetResource() != null ? copyUnit.getTargetResource().foreignableFieldHashCode() : 0; // do copy copyUnit.getOriginResource().getCopy(copyUnit.getTargetResource(), copyUnit); copyConsumedMasterRelations(copyUnit); if (copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { copyConsumedSlaveRelations(copyUnit); } copyResourceContexts(copyUnit); copyProvidedMasterRelations(copyUnit); if (copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { copyProvidedSlaveRelations(copyUnit); } copyFunctions(copyUnit); copySoftlinkRelation(copyUnit); // do save if (copyUnit.getResult().isSuccess()) { // check if only decorable fields on resource changed when changing owner is different from resource owner foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), targetHashCodeBeforeChange, copyUnit.getTargetResource()); auditService.storeIdInThreadLocalForAuditLog(copyUnit.getTargetResource()); entityManager.persist(copyUnit.getTargetResource()); } copyUnit.getResult().setTargetResource(copyUnit.getTargetResource()); return copyUnit.getResult(); } protected void copyConsumedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ConsumedResourceRelationEntity> targetConsumedMasterRel = copyUnit.getTargetResource() .getConsumedMasterRelations() != null ? copyUnit.getTargetResource() .getConsumedMasterRelations() : new HashSet<ConsumedResourceRelationEntity>(); Set<ConsumedResourceRelationEntity> originConsumedMasterRel = copyUnit.getOriginResource() .getConsumedMasterRelations(); copyConsumedResourceRelationEntities(originConsumedMasterRel, targetConsumedMasterRel, copyUnit); copyUnit.getTargetResource().setConsumedMasterRelations(targetConsumedMasterRel); } protected void copyConsumedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ConsumedResourceRelationEntity> targetConsumedSlaveRel = copyUnit.getTargetResource() .getConsumedSlaveRelations() != null ? copyUnit.getTargetResource() .getConsumedSlaveRelations() : new HashSet<ConsumedResourceRelationEntity>(); Set<ConsumedResourceRelationEntity> originConsumedSlaveRel = copyUnit.getOriginResource() .getConsumedSlaveRelations(); if (copyUnit.getMode() == CopyMode.RELEASE) { copyConsumedResourceRelationEntities(originConsumedSlaveRel, targetConsumedSlaveRel, copyUnit); copyUnit.getTargetResource().setConsumedSlaveRelations(targetConsumedSlaveRel); } else if (originConsumedSlaveRel != null) { for (ConsumedResourceRelationEntity consumed : originConsumedSlaveRel) { copyUnit.getResult().addSkippedConsumedRelation(copyUnit.getOriginResource().getId(), consumed.getMasterResource().getName(), consumed.getSlaveResource().getName(), consumed.getIdentifier(), consumed.getMasterResource().getResourceType().getName(), consumed.getSlaveResource().getResourceType().getName()); } } } /** * iterate and copy */ protected void copyConsumedResourceRelationEntities(Set<ConsumedResourceRelationEntity> origins, Set<ConsumedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { Map<String, ConsumedResourceRelationEntity> targetMap = new HashMap<>(); // prepare map with identifier as key and list of runtime relations Set<ConsumedResourceRelationEntity> runtimeRelations = new HashSet<>(); for (ConsumedResourceRelationEntity target : targets) { if (target.getSlaveResource() != null) { if (target.getSlaveResource().getResourceType().isRuntimeType()) { runtimeRelations.add(target); } targetMap.put(target.buildIdentifer(), target); } } if (origins != null) { for (ConsumedResourceRelationEntity origin : origins) { // If a runtime already exists and another runtime is copied, we overwrite the previous // TODO: runtime - write test if (shallReplaceRuntime(runtimeRelations, origin)) { for (ConsumedResourceRelationEntity rel : runtimeRelations) { targets.remove(rel); entityManager.remove(rel); } runtimeRelations.clear(); } String key = origin.buildIdentifer(); ConsumedResourceRelationEntity target = targetMap.get(key); int consumedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; target = origin.getCopy(target, copyUnit); if (target != null) { copyResourceRelationContexts(origin.getContexts(), target, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), consumedResourceRelationForeignableHashCodeBeforeChange, target); targets.add(target); } } } } private boolean shallReplaceRuntime(Set<ConsumedResourceRelationEntity> originalRelations, ConsumedResourceRelationEntity relationToCheck) { if (originalRelations.isEmpty() || !relationToCheck.getSlaveResource().getResourceType().isRuntimeType()) { return false; } ResourceGroupEntity originalRuntime = originalRelations.iterator().next().getSlaveResource() .getResourceGroup(); ResourceGroupEntity newRuntime = relationToCheck.getSlaveResource().getResourceGroup(); return !originalRuntime.getId().equals(newRuntime.getId()); } protected void copyProvidedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ProvidedResourceRelationEntity> targetProvidedResRels = copyUnit.getTargetResource() .getProvidedMasterRelations() != null ? copyUnit.getTargetResource() .getProvidedMasterRelations() : new HashSet<ProvidedResourceRelationEntity>(); Set<ProvidedResourceRelationEntity> originProvidedResRels = copyUnit.getOriginResource() .getProvidedMasterRelations(); if (copyUnit.getMode() == CopyMode.RELEASE || copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR) { copyProvidedResourceRelationEntities(originProvidedResRels, targetProvidedResRels, copyUnit); copyUnit.getTargetResource().setProvidedMasterRelations(targetProvidedResRels); } else if (originProvidedResRels != null) { for (ProvidedResourceRelationEntity prov : originProvidedResRels) { copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), prov.getMasterResource().getName(), prov.getSlaveResource().getName(), prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), prov.getSlaveResource().getResourceType().getName()); } } } protected void copyProvidedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ProvidedResourceRelationEntity> targetProvidedSlaveRels = copyUnit.getTargetResource() .getProvidedSlaveRelations() != null ? copyUnit.getTargetResource() .getProvidedSlaveRelations() : new HashSet<ProvidedResourceRelationEntity>(); Set<ProvidedResourceRelationEntity> originProvidedSlaveRels = copyUnit.getOriginResource() .getProvidedSlaveRelations(); if (copyUnit.getMode() == CopyMode.RELEASE) { copyProvidedResourceRelationEntities(originProvidedSlaveRels, targetProvidedSlaveRels, copyUnit); copyUnit.getTargetResource().setProvidedSlaveRelations(targetProvidedSlaveRels); } else if (originProvidedSlaveRels != null) { for (ProvidedResourceRelationEntity prov : originProvidedSlaveRels) { copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), prov.getMasterResource().getName(), prov.getSlaveResource().getName(), prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), prov.getSlaveResource().getResourceType().getName()); } } } protected SoftlinkRelationEntity copySoftlinkRelation(CopyUnit copyUnit) throws ForeignableOwnerViolationException { SoftlinkRelationEntity originSoftlink = copyUnit.getOriginResource().getSoftlinkRelation(); SoftlinkRelationEntity targetSoftlink = copyUnit.getTargetResource().getSoftlinkRelation(); if (originSoftlink != null) { int softlinkRelationForeignableHashCodeBeforeChange = targetSoftlink != null ? targetSoftlink.foreignableFieldHashCode() : 0; targetSoftlink = originSoftlink.getCopy(targetSoftlink, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), softlinkRelationForeignableHashCodeBeforeChange, targetSoftlink); softlinkService.setSoftlinkRelation(copyUnit.getTargetResource(), targetSoftlink); } return targetSoftlink; } /** * iterate and copy */ protected void copyProvidedResourceRelationEntities(Set<ProvidedResourceRelationEntity> origins, Set<ProvidedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { Map<String, ProvidedResourceRelationEntity> targetMap = new HashMap<>(); // prepare map with identifier as key for (ProvidedResourceRelationEntity target : targets) { targetMap.put(target.buildIdentifer(), target); } if (origins != null) { for (ProvidedResourceRelationEntity origin : origins) { String key = origin.buildIdentifer(); ProvidedResourceRelationEntity target = targetMap.get(origin.buildIdentifer()); int providedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; target = origin.getCopy(target, copyUnit); if (target != null) { copyResourceRelationContexts(origin.getContexts(), target, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), providedResourceRelationForeignableHashCodeBeforeChange, target); if (!targetMap.containsKey(key)) { targets.add(target); } } } } } /** * iterate and copy */ protected void copyResourceContexts(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ResourceContextEntity> targets = copyUnit.getTargetResource().getContexts(); Set<ResourceContextEntity> origins = copyUnit.getOriginResource().getContexts(); // prepare map with contextId as key Map<Integer, ResourceContextEntity> targetsMap = new HashMap<>(); if (targets != null) { for (ResourceContextEntity target : targets) { targetsMap.put(target.getContext().getId(), target); } } if (origins != null) { // 1. copy descriptors for all contexts Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); for (ResourceContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceContextEntity(); allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), target.getPropertyDescriptors(), target, copyUnit)); if (!targetsMap.containsKey(key)) { copyUnit.getTargetResource().addContext(target); } targetsMap.put(key, target); } // 2. copy context with properties for (ResourceContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceContextEntity target = targetsMap.get(key); copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); target.setContextualizedObject(copyUnit.getTargetResource()); } } } /** * iterate and copy */ protected void copyResourceRelationContexts(Set<ResourceRelationContextEntity> origins, AbstractResourceRelationEntity targetResRel, CopyUnit copyUnit) throws ForeignableOwnerViolationException { // prepare map with contextId as key Set<ResourceRelationContextEntity> targets = targetResRel.getContexts(); Map<Integer, ResourceRelationContextEntity> targetsMap = new HashMap<>(); if (targets != null) { for (ResourceRelationContextEntity target : targets) { targetsMap.put(target.getContext().getId(), target); } } if (origins != null) { // 1. copy descriptors for all contexts Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); for (ResourceRelationContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), target.getPropertyDescriptors(), target, copyUnit)); if (!targetsMap.containsKey(key)) { targetResRel.addContext(target); } targetsMap.put(key, target); } if (copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR && targetResRel.getSlaveResource() != null && (resourceLocator.hasResourceConsumableSoftlinkType(targetResRel.getSlaveResource()) || resourceLocator .hasResourceProvidableSoftlinkType(targetResRel.getSlaveResource()))) { // propertyValue from relations has to be copied if PropertyDescriptor exists on target (successor) for (ResourceContextEntity resourceContextEntity : copyUnit.getTargetResource().getContexts()) { for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { String key = createDescriptorKey(propertyDescriptorEntity); allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); } } // add PropertyDescriptor from ProvidedMasterRelations for (ProvidedResourceRelationEntity providedResourceRelationEntity : copyUnit.getTargetResource().getProvidedMasterRelations()) { addRelationPropertyDescriptors(allPropertyDescriptorsMap, providedResourceRelationEntity); } // add PropertyDescriptor from ConsumedMasterRelations for (ConsumedResourceRelationEntity consumedResourceRelationEntity : copyUnit.getTargetResource().getConsumedMasterRelations()) { addRelationPropertyDescriptors(allPropertyDescriptorsMap, consumedResourceRelationEntity); } } // do copy for all contexts for (ResourceRelationContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); target.setContextualizedObject(targetResRel); } } } private <T extends AbstractResourceRelationEntity> void addRelationPropertyDescriptors(Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap, T relationEntity) { for (ResourceContextEntity resourceContextEntity : relationEntity.getSlaveResource().getContexts()) { if (resourceContextEntity.getPropertyDescriptors() != null) { for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { String key = createDescriptorKey(propertyDescriptorEntity); allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); } } } } /** * iterate and copy */ protected Map<String, PropertyDescriptorEntity> copyPropertyDescriptors( Set<PropertyDescriptorEntity> origins, Set<PropertyDescriptorEntity> targets, ContextDependency<?> targetContextDependency, CopyUnit copyUnit) throws ForeignableOwnerViolationException { // prepare map with propertyName and isTesting as key Map<String, PropertyDescriptorEntity> targetsMap = new HashMap<>(); if (targets != null) { for (PropertyDescriptorEntity target : targets) { String key = createDescriptorKey(target); targetsMap.put(key, target); } } if (origins != null) { for (PropertyDescriptorEntity origin : origins) { String key = createDescriptorKey(origin); PropertyDescriptorEntity targetDescriptor = targetsMap.get(key); // Predecessor Mode only copy AMW Owned Elements if (CopyMode.MAIA_PREDECESSOR.equals(copyUnit.getMode())) { if (ForeignableOwner.AMW.equals(origin.getOwner())) { copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); } } else { copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); } } } return targetsMap; } private void copyPropertyDescriptor(ContextDependency<?> targetContextDependency, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> targetsMap, PropertyDescriptorEntity origin, String key, PropertyDescriptorEntity targetDescriptor) throws ForeignableOwnerViolationException { int propertyDescriptorForeignableHashCodeBeforeChange = targetDescriptor != null ? targetDescriptor.foreignableFieldHashCode() : 0; PropertyDescriptorEntity target = origin.getCopy(targetDescriptor, copyUnit); copyTags(origin, target); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), propertyDescriptorForeignableHashCodeBeforeChange, target); if (!targetsMap.containsKey(key)) { targetContextDependency.addPropertyDescriptor(target); } targetsMap.put(key, target); } protected String createDescriptorKey(PropertyDescriptorEntity desc) { return desc.getPropertyName() + "_" + String.valueOf(desc.isTesting()); } protected void copyTags(PropertyDescriptorEntity origin, PropertyDescriptorEntity target) { Set<String> tagNames = new HashSet<>(); for (PropertyTagEntity targetTag : target.getPropertyTags()) { tagNames.add(targetTag.getName()); } for (PropertyTagEntity originTag : origin.getPropertyTags()) { if (!tagNames.contains(originTag.getName())) { PropertyTagEntity copy = new PropertyTagEntity(); copy.setName(originTag.getName()); copy.setTagType(PropertyTagType.LOCAL); target.addPropertyTag(copy); } } } /** * Copies values from origin to target contextDependency (with properties and templates). */ protected ContextDependency<?> copyContextDependency(ContextDependency<?> origin, ContextDependency<?> target, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> allTargetDescriptors) { // context target.setContext(origin.getContext()); // properties Set<PropertyEntity> properties = copyProperties(origin.getProperties(), allTargetDescriptors, target.getProperties(), copyUnit); for (PropertyEntity property : properties) { target.addProperty(property); } // templates Set<TemplateDescriptorEntity> templates = copyTemplates(origin.getTemplates(), target.getTemplates(), copyUnit); for (TemplateDescriptorEntity template : templates) { target.addTemplate(template); } return target; } /** * <ul> * <li>The identifier between target propertyDescriptor and origin TargetDescriptor is the propertyName (= technicalKey)</li> * <li>If a propertyDescript of the targetResources has already a properyValue, this value will not be overwritten. * </ul> * * @param origins, all properties of the origin resource for one context * @param targetPropDescriptorMap, map with all propertyDescriptors of the targetResource (after copy), with the propertyName (= technicalKey) as key * @param targetProperties all properties of the target resource for one context */ protected Set<PropertyEntity> copyProperties(Set<PropertyEntity> origins, Map<String, PropertyDescriptorEntity> targetPropDescriptorMap, Set<PropertyEntity> targetProperties, CopyUnit copyUnit) { Map<Integer, PropertyEntity> existingPropertiesByDescriptorId = new HashMap<>(); if (targetProperties != null) { for (PropertyEntity existingProperty : targetProperties) { if (existingProperty.getDescriptor() != null && existingProperty.getDescriptor().getId() != null) { existingPropertiesByDescriptorId.put(existingProperty.getDescriptor().getId(), existingProperty); } } } Set<PropertyEntity> targets = new HashSet<>(); if (origins != null) { for (PropertyEntity origin : origins) { // If a property exists on this context for the same descriptor, we define it as the // target property... PropertyEntity targetProperty = existingPropertiesByDescriptorId.get(origin.getDescriptor().getId()); PropertyDescriptorEntity targetDescriptor = null; if (targetProperty == null) { // If it can't be found, it's possible that we have copied the target descriptor. // Let's look for it. String key = createDescriptorKey(origin.getDescriptor()); targetDescriptor = targetPropDescriptorMap.get(key); if (targetDescriptor != null) { // If a property is already defined for the existing descriptor, we update this // value... targetProperty = existingPropertiesByDescriptorId.get(targetDescriptor.getId()); } } if (CopyMode.MAIA_PREDECESSOR == copyUnit.getMode() && targetDescriptor == null) { // do not add property for null descriptor when Predecessor mode } else { if (targetProperty == null) { // If no property for the found property descriptor exists, we create a new one... PropertyEntity target = origin.getCopy(null, copyUnit); // targetDescriptor null come for properties on ResourceTypes or relations if (targetDescriptor != null) { target.setDescriptor(targetDescriptor); } targets.add(target); } else { // otherwise, we merge the new value with the old property entity targets.add(mergePropertyEntity(origin, targetProperty)); } } } } return targets; } /** * Merges the value (and comment) of the original property into the target property. */ protected PropertyEntity mergePropertyEntity(PropertyEntity origin, PropertyEntity target) { target.setValue(origin.getValue()); return target; } /** * Existing templates in target will be overwritten! <br/> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. */ protected Set<TemplateDescriptorEntity> copyTemplates(Set<TemplateDescriptorEntity> origins, Set<TemplateDescriptorEntity> targets, CopyUnit copyUnit) { Map<String, TemplateDescriptorEntity> targetTemplatesMap = new HashMap<>(); if (targets != null) { for (TemplateDescriptorEntity t : targets) { String key = t.getName() + String.valueOf(t.isTesting()); targetTemplatesMap.put(key, t); } } if (origins != null) { for (TemplateDescriptorEntity origin : origins) { String key = origin.getName() + String.valueOf(origin.isTesting()); targetTemplatesMap.put(key, origin.getCopy(targetTemplatesMap.get(key), copyUnit)); } } return new HashSet<>(targetTemplatesMap.values()); } /** * Existing functions in target won't be overwritten. <br/> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. */ protected void copyFunctions(CopyUnit copyUnit) { Set<String> targetFunctions = new HashSet<>(); for (AmwFunctionEntity targetFct : copyUnit.getTargetResource().getFunctions()) { targetFunctions.add(targetFct.getName()); } for (AmwFunctionEntity origFct : copyUnit.getOriginResource().getFunctions()) { if (!targetFunctions.contains(origFct.getName())) { copyUnit.getTargetResource().addFunction(origFct.getCopy(null, copyUnit)); } } } }
AMW_business/src/main/java/ch/puzzle/itc/mobiliar/business/resourcegroup/control/CopyResourceDomainService.java
/* * AMW - Automated Middleware allows you to manage the configurations of * your Java EE applications on an unlimited number of different environments * with various versions, including the automated deployment of those apps. * Copyright (C) 2013-2016 by Puzzle ITC * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package ch.puzzle.itc.mobiliar.business.resourcegroup.control; import ch.puzzle.itc.mobiliar.business.domain.commons.CommonDomainService; import ch.puzzle.itc.mobiliar.business.environment.entity.ContextDependency; import ch.puzzle.itc.mobiliar.business.foreignable.control.ForeignableService; import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwner; import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwnerViolationException; import ch.puzzle.itc.mobiliar.business.function.entity.AmwFunctionEntity; import ch.puzzle.itc.mobiliar.business.property.control.PropertyTagEditingService; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyDescriptorEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyTagEntity; import ch.puzzle.itc.mobiliar.business.property.entity.PropertyTagType; import ch.puzzle.itc.mobiliar.business.releasing.entity.ReleaseEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.boundary.ResourceLocator; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceContextEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceEntity; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceFactory; import ch.puzzle.itc.mobiliar.business.resourcegroup.entity.ResourceGroupEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.AbstractResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ConsumedResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ProvidedResourceRelationEntity; import ch.puzzle.itc.mobiliar.business.resourcerelation.entity.ResourceRelationContextEntity; import ch.puzzle.itc.mobiliar.business.softlinkRelation.control.SoftlinkRelationService; import ch.puzzle.itc.mobiliar.business.softlinkRelation.entity.SoftlinkRelationEntity; import ch.puzzle.itc.mobiliar.business.template.entity.TemplateDescriptorEntity; import ch.puzzle.itc.mobiliar.common.exception.AMWException; import javax.inject.Inject; import javax.persistence.EntityManager; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Service wird verwendet zum kopieren von Ressourcen ("Templating") und zum erstellen von neuen Releases.<br/> * Es gibt gewisse Unterschiede zwischen dem Erstellen einer Kopie und dem Erstellen eines Release, deshalb * mit {@link ch.puzzle.itc.mobiliar.business.resourcegroup.control.CopyResourceDomainService.CopyMode} * angegeben werden, welche der beiden Varianten ausgeführt werden soll.<br/> * <h1>Allgemein</h1> * <ul> * <li>Die Instanzen müssen vom gleichen Typ sein</li> * <li>Bestehende Werte werden überschrieben</li> * <li>Daten auf der Zielresource die auf der Ursprungsresource nicht vorhanden sind, bleiben bestehen.</li> * </ul> * <h1>Was wird kopiert?</h1> * <table> * <tr> * <th></th> * <th>Copy</th> * <th>Release</th> * </tr> * <tr> * <td>Properties und ihre Werte</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Instanz Properties und ihre Werte</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Instanz Templates</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Consumed Relations</td> * <td>ja, ausser wenn die slave resource eine Applikation ist da diese nur von einem AS konsumiert werden * können</td> * <td>ja</td> * </tr> * <tr> * <td>Provided Relations</td> * <td>nein</td> * <td>ja</td> * </tr> * <tr> * <td>Properties auf Relations</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>Templates auf Relations</td> * <td>ja</td> * <td>ja</td> * </tr> * <tr> * <td>AppServerRelations</td> * <td>ja, ausser wenn die slave resource eine Applikation ist da diese nur von einem AS konsumiert werden * können und entsprechen alle AppServerRelations die eine Applikation als direkten oder übergeordneten parent * haben.</td> * <td></td> * </tr> * <tr> * <td>Import name</td> * <td>nein</td> * <td>nein</td> * </tr> * <tr> * <td>Shakedown Tests</td> * <td>nein</td> * <td>nein</td> * </tr> * </table> * * @author cweber */ public class CopyResourceDomainService { @Inject private EntityManager entityManager; @Inject PropertyTagEditingService propertyTagEditingService; @Inject CommonDomainService commonDomainService; @Inject ForeignableService foreignableService; @Inject SoftlinkRelationService softlinkService; @Inject ResourceLocator resourceLocator; public enum CopyMode { COPY, RELEASE, MAIA_PREDECESSOR; } /** * @param origin * @param target * @param actingOwner * @return * @throws ForeignableOwnerViolationException * @throws AMWException */ public CopyResourceResult copyFromOriginToTargetResource(ResourceEntity origin, ResourceEntity target, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { if (target == null) { throw new RuntimeException("Target resource should not be null for copy action"); } else { return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.COPY, actingOwner)); } } /** * @param origin * - the resource to create a new release from * @param release * - the release to create * @return */ public CopyResourceResult createReleaseFromOriginResource(ResourceEntity origin, ReleaseEntity release, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { ResourceEntity target = commonDomainService.getResourceEntityByGroupAndRelease(origin .getResourceGroup().getId(), release.getId()); if (target == null) { target = ResourceFactory.createNewResourceForOwner(origin.getResourceGroup(), actingOwner); target.setRelease(release); } return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.RELEASE, actingOwner)); } public CopyResourceResult copyFromPredecessorToSuccessorResource(ResourceEntity predecessor, ResourceEntity successor, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { if (successor == null) { throw new RuntimeException("Successor resource should not be null for copy predecessor action"); } else { return doCopyResourceAndSave(new CopyUnit(predecessor, successor, CopyMode.MAIA_PREDECESSOR, actingOwner)); } } /** * @param copyUnit * @return result if copy was successful, contains a list with error messages if copy fails */ protected CopyResourceResult doCopyResourceAndSave(CopyUnit copyUnit) throws ForeignableOwnerViolationException { int targetHashCodeBeforeChange = copyUnit.getTargetResource() != null ? copyUnit.getTargetResource().foreignableFieldHashCode() : 0; // do copy copyUnit.getOriginResource().getCopy(copyUnit.getTargetResource(), copyUnit); copyConsumedMasterRelations(copyUnit); if(copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR){ copyConsumedSlaveRelations(copyUnit); } copyResourceContexts(copyUnit); copyProvidedMasterRelations(copyUnit); if(copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { copyProvidedSlaveRelations(copyUnit); } copyFunctions(copyUnit); copySoftlinkRelation(copyUnit); // do save if (copyUnit.getResult().isSuccess()) { // check if only decorable fields on resource changed when changing owner is different from resource owner foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), targetHashCodeBeforeChange, copyUnit.getTargetResource()); entityManager.persist(copyUnit.getTargetResource()); } copyUnit.getResult().setTargetResource(copyUnit.getTargetResource()); return copyUnit.getResult(); } protected void copyConsumedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ConsumedResourceRelationEntity> targetConsumedMasterRel = copyUnit.getTargetResource() .getConsumedMasterRelations() != null ? copyUnit.getTargetResource() .getConsumedMasterRelations() : new HashSet<ConsumedResourceRelationEntity>(); Set<ConsumedResourceRelationEntity> originConsumedMasterRel = copyUnit.getOriginResource() .getConsumedMasterRelations(); copyConsumedResourceRelationEntities(originConsumedMasterRel, targetConsumedMasterRel, copyUnit); copyUnit.getTargetResource().setConsumedMasterRelations(targetConsumedMasterRel); } protected void copyConsumedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ConsumedResourceRelationEntity> targetConsumedSlaveRel = copyUnit.getTargetResource() .getConsumedSlaveRelations() != null ? copyUnit.getTargetResource() .getConsumedSlaveRelations() : new HashSet<ConsumedResourceRelationEntity>(); Set<ConsumedResourceRelationEntity> originConsumedSlaveRel = copyUnit.getOriginResource() .getConsumedSlaveRelations(); if (copyUnit.getMode() == CopyMode.RELEASE) { copyConsumedResourceRelationEntities(originConsumedSlaveRel, targetConsumedSlaveRel, copyUnit); copyUnit.getTargetResource().setConsumedSlaveRelations(targetConsumedSlaveRel); } else if (originConsumedSlaveRel != null) { for (ConsumedResourceRelationEntity consumed : originConsumedSlaveRel) { copyUnit.getResult().addSkippedConsumedRelation(copyUnit.getOriginResource().getId(), consumed.getMasterResource().getName(), consumed.getSlaveResource().getName(), consumed.getIdentifier(), consumed.getMasterResource().getResourceType().getName(), consumed.getSlaveResource().getResourceType().getName()); } } } /** * iterate and copy * * @param copyUnit */ protected void copyConsumedResourceRelationEntities(Set<ConsumedResourceRelationEntity> origins, Set<ConsumedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { Map<String, ConsumedResourceRelationEntity> targetMap = new HashMap<>(); // prepare map with identifier as key and list of runtime relations Set<ConsumedResourceRelationEntity> runtimeRelations = new HashSet<>(); for (ConsumedResourceRelationEntity target : targets) { if (target.getSlaveResource() != null) { if (target.getSlaveResource().getResourceType().isRuntimeType()) { runtimeRelations.add(target); } targetMap.put(target.buildIdentifer(), target); } } if (origins != null) { for (ConsumedResourceRelationEntity origin : origins) { // If a runtime already exists and another runtime is copied, we overwrite the previous // TODO: runtime - write test if (shallReplaceRuntime(runtimeRelations, origin)) { for (ConsumedResourceRelationEntity rel : runtimeRelations) { targets.remove(rel); entityManager.remove(rel); } runtimeRelations.clear(); } String key = origin.buildIdentifer(); ConsumedResourceRelationEntity target = targetMap.get(key); int consumedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; target = origin.getCopy(target, copyUnit); if(target != null) { copyResourceRelationContexts(origin.getContexts(), target, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), consumedResourceRelationForeignableHashCodeBeforeChange, target); targets.add(target); } } } } private boolean shallReplaceRuntime(Set<ConsumedResourceRelationEntity> originalRelations, ConsumedResourceRelationEntity relationToCheck) { if (originalRelations.isEmpty() || !relationToCheck.getSlaveResource().getResourceType().isRuntimeType()) { return false; } ResourceGroupEntity originalRuntime = originalRelations.iterator().next().getSlaveResource() .getResourceGroup(); ResourceGroupEntity newRuntime = relationToCheck.getSlaveResource().getResourceGroup(); return !originalRuntime.getId().equals(newRuntime.getId()); } protected void copyProvidedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ProvidedResourceRelationEntity> targetProvidedResRels = copyUnit.getTargetResource() .getProvidedMasterRelations() != null ? copyUnit.getTargetResource() .getProvidedMasterRelations() : new HashSet<ProvidedResourceRelationEntity>(); Set<ProvidedResourceRelationEntity> originProvidedResRels = copyUnit.getOriginResource() .getProvidedMasterRelations(); if (copyUnit.getMode() == CopyMode.RELEASE || copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR) { copyProvidedResourceRelationEntities(originProvidedResRels, targetProvidedResRels, copyUnit); copyUnit.getTargetResource().setProvidedMasterRelations(targetProvidedResRels); } else if (originProvidedResRels != null) { for (ProvidedResourceRelationEntity prov : originProvidedResRels) { copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), prov.getMasterResource().getName(), prov.getSlaveResource().getName(), prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), prov.getSlaveResource().getResourceType().getName()); } } } protected void copyProvidedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ProvidedResourceRelationEntity> targetProvidedSlaveRels = copyUnit.getTargetResource() .getProvidedSlaveRelations() != null ? copyUnit.getTargetResource() .getProvidedSlaveRelations() : new HashSet<ProvidedResourceRelationEntity>(); Set<ProvidedResourceRelationEntity> originProvidedSlaveRels = copyUnit.getOriginResource() .getProvidedSlaveRelations(); if (copyUnit.getMode() == CopyMode.RELEASE) { copyProvidedResourceRelationEntities(originProvidedSlaveRels, targetProvidedSlaveRels, copyUnit); copyUnit.getTargetResource().setProvidedSlaveRelations(targetProvidedSlaveRels); } else if (originProvidedSlaveRels != null) { for (ProvidedResourceRelationEntity prov : originProvidedSlaveRels) { copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), prov.getMasterResource().getName(), prov.getSlaveResource().getName(), prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), prov.getSlaveResource().getResourceType().getName()); } } } protected SoftlinkRelationEntity copySoftlinkRelation(CopyUnit copyUnit) throws ForeignableOwnerViolationException { SoftlinkRelationEntity originSoftlink = copyUnit.getOriginResource().getSoftlinkRelation(); SoftlinkRelationEntity targetSoftlink = copyUnit.getTargetResource().getSoftlinkRelation(); if (originSoftlink != null) { int softlinkRelationForeignableHashCodeBeforeChange = targetSoftlink != null ? targetSoftlink .foreignableFieldHashCode() : 0; targetSoftlink = originSoftlink.getCopy(targetSoftlink, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), softlinkRelationForeignableHashCodeBeforeChange, targetSoftlink); softlinkService.setSoftlinkRelation(copyUnit.getTargetResource(), targetSoftlink); } return targetSoftlink; } /** * iterate and copy * * @param origins * @param targets * @param copyUnit */ protected void copyProvidedResourceRelationEntities(Set<ProvidedResourceRelationEntity> origins, Set<ProvidedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { Map<String, ProvidedResourceRelationEntity> targetMap = new HashMap<>(); // prepare map with identifier as key for (ProvidedResourceRelationEntity target : targets) { targetMap.put(target.buildIdentifer(), target); } if (origins != null) { for (ProvidedResourceRelationEntity origin : origins) { String key = origin.buildIdentifer(); ProvidedResourceRelationEntity target = targetMap.get(origin.buildIdentifer()); int providedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; target = origin.getCopy(target, copyUnit); if(target != null){ copyResourceRelationContexts(origin.getContexts(), target, copyUnit); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), providedResourceRelationForeignableHashCodeBeforeChange, target); if (!targetMap.containsKey(key)) { targets.add(target); } } } } } /** * iterate and copy * * @param copyUnit */ protected void copyResourceContexts(CopyUnit copyUnit) throws ForeignableOwnerViolationException { Set<ResourceContextEntity> targets = copyUnit.getTargetResource().getContexts(); Set<ResourceContextEntity> origins = copyUnit.getOriginResource().getContexts(); // prepare map with contextId as key Map<Integer, ResourceContextEntity> targetsMap = new HashMap<>(); if (targets != null) { for (ResourceContextEntity target : targets) { targetsMap.put(target.getContext().getId(), target); } } if (origins != null) { // 1. copy descriptors for all contexts Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); for (ResourceContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceContextEntity(); allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), target.getPropertyDescriptors(), target, copyUnit)); if (!targetsMap.containsKey(key)) { copyUnit.getTargetResource().addContext(target); } targetsMap.put(key, target); } // 2. copy context with properties for (ResourceContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceContextEntity target = targetsMap.get(key); copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); target.setContextualizedObject(copyUnit.getTargetResource()); } } } /** * iterate and copy * * @param origins * @param targetResRel * @return */ protected void copyResourceRelationContexts(Set<ResourceRelationContextEntity> origins, AbstractResourceRelationEntity targetResRel, CopyUnit copyUnit) throws ForeignableOwnerViolationException { // prepare map with contextId as key Set<ResourceRelationContextEntity> targets = targetResRel.getContexts(); Map<Integer, ResourceRelationContextEntity> targetsMap = new HashMap<>(); if (targets != null) { for (ResourceRelationContextEntity target : targets) { targetsMap.put(target.getContext().getId(), target); } } if (origins != null) { // 1. copy descriptors for all contexts Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); for (ResourceRelationContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), target.getPropertyDescriptors(), target, copyUnit)); if (!targetsMap.containsKey(key)) { targetResRel.addContext(target); } targetsMap.put(key, target); } if(copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR && targetResRel.getSlaveResource() != null && (resourceLocator.hasResourceConsumableSoftlinkType(targetResRel.getSlaveResource()) || resourceLocator .hasResourceProvidableSoftlinkType(targetResRel.getSlaveResource()))){ // propertyValue from relations has to be copied if PropertyDescriptor exists on target (successor) for (ResourceContextEntity resourceContextEntity : copyUnit.getTargetResource().getContexts()) { for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { String key = createDescriptorKey(propertyDescriptorEntity); allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); } } // add PropertyDescriptor from ProvidedMasterRelations for (ProvidedResourceRelationEntity providedResourceRelationEntity : copyUnit.getTargetResource().getProvidedMasterRelations()) { addRelationPropertyDescriptors(allPropertyDescriptorsMap, providedResourceRelationEntity); } // add PropertyDescriptor from ConsumedMasterRelations for (ConsumedResourceRelationEntity consumedResourceRelationEntity : copyUnit.getTargetResource().getConsumedMasterRelations()) { addRelationPropertyDescriptors(allPropertyDescriptorsMap, consumedResourceRelationEntity); } } // do copy for all contexts for (ResourceRelationContextEntity origin : origins) { Integer key = origin.getContext().getId(); ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); target.setContextualizedObject(targetResRel); } } } private <T extends AbstractResourceRelationEntity> void addRelationPropertyDescriptors(Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap, T relationEntity) { for (ResourceContextEntity resourceContextEntity : relationEntity.getSlaveResource().getContexts()) { if (resourceContextEntity.getPropertyDescriptors() != null) { for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { String key = createDescriptorKey(propertyDescriptorEntity); allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); } } } } /** * iterate and copy * * @param origins * @param targets * @return */ protected Map<String, PropertyDescriptorEntity> copyPropertyDescriptors( Set<PropertyDescriptorEntity> origins, Set<PropertyDescriptorEntity> targets, ContextDependency<?> targetContextDependency, CopyUnit copyUnit) throws ForeignableOwnerViolationException { // prepare map with propertyName and isTesting as key Map<String, PropertyDescriptorEntity> targetsMap = new HashMap<>(); if (targets != null) { for (PropertyDescriptorEntity target : targets) { String key = createDescriptorKey(target); targetsMap.put(key, target); } } if (origins != null) { for (PropertyDescriptorEntity origin : origins) { String key = createDescriptorKey(origin); PropertyDescriptorEntity targetDescriptor = targetsMap.get(key); // Predecessor Mode only copy AMW Owned Elements if(CopyMode.MAIA_PREDECESSOR.equals(copyUnit.getMode())){ if(ForeignableOwner.AMW.equals(origin.getOwner())){ copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); } }else{ copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); } } } return targetsMap; } private void copyPropertyDescriptor(ContextDependency<?> targetContextDependency, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> targetsMap, PropertyDescriptorEntity origin, String key, PropertyDescriptorEntity targetDescriptor) throws ForeignableOwnerViolationException { int propertyDescriptorForeignableHashCodeBeforeChange = targetDescriptor != null ? targetDescriptor.foreignableFieldHashCode() : 0; PropertyDescriptorEntity target = origin.getCopy(targetDescriptor, copyUnit); copyTags(origin, target); foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), propertyDescriptorForeignableHashCodeBeforeChange, target); if (!targetsMap.containsKey(key)) { targetContextDependency.addPropertyDescriptor(target); } targetsMap.put(key, target); } protected String createDescriptorKey(PropertyDescriptorEntity desc) { return desc.getPropertyName() + "_" + String.valueOf(desc.isTesting()); } protected void copyTags(PropertyDescriptorEntity origin, PropertyDescriptorEntity target) { Set<String> tagNames = new HashSet<>(); for (PropertyTagEntity targetTag : target.getPropertyTags()) { tagNames.add(targetTag.getName()); } for (PropertyTagEntity originTag : origin.getPropertyTags()) { if (!tagNames.contains(originTag.getName())) { PropertyTagEntity copy = new PropertyTagEntity(); copy.setName(originTag.getName()); copy.setTagType(PropertyTagType.LOCAL); target.addPropertyTag(copy); } } } /** * Copies values from origin to target contextDependency (with properties and templates). * * @param origin * @param target * @param copyUnit * @return */ protected ContextDependency<?> copyContextDependency(ContextDependency<?> origin, ContextDependency<?> target, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> allTargetDescriptors) { // context target.setContext(origin.getContext()); // properties Set<PropertyEntity> properties = copyProperties(origin.getProperties(), allTargetDescriptors, target.getProperties(), copyUnit); for (PropertyEntity property : properties) { target.addProperty(property); } // templates Set<TemplateDescriptorEntity> templates = copyTemplates(origin.getTemplates(), target.getTemplates(), copyUnit); for (TemplateDescriptorEntity template : templates) { target.addTemplate(template); } return target; } /** * <ul> * <li>The identifier between target propertyDescriptor and origin TargetDescriptor is the propertyName (= technicalKey)</li> * <li>If a propertyDescript of the targetResources has already a properyValue, this value will not be overwritten. * </ul> * * @param origins, all properties of the origin resource for one context * @param targetPropDescriptorMap, map with all propertyDescriptors of the targetResource (after copy), with the propertyName (= technicalKey) as key * @param targetProperties all properties of the target resource for one context * @param copyUnit * @return */ protected Set<PropertyEntity> copyProperties(Set<PropertyEntity> origins, Map<String, PropertyDescriptorEntity> targetPropDescriptorMap, Set<PropertyEntity> targetProperties, CopyUnit copyUnit) { Map<Integer, PropertyEntity> existingPropertiesByDescriptorId = new HashMap<>(); if (targetProperties != null) { for (PropertyEntity existingProperty : targetProperties) { if (existingProperty.getDescriptor() != null && existingProperty.getDescriptor().getId() != null) { existingPropertiesByDescriptorId.put(existingProperty.getDescriptor().getId(), existingProperty); } } } Set<PropertyEntity> targets = new HashSet<>(); if (origins != null) { for (PropertyEntity origin : origins) { // If a property exists on this context for the same descriptor, we define it as the // target property... PropertyEntity targetProperty = existingPropertiesByDescriptorId.get(origin .getDescriptor().getId()); PropertyDescriptorEntity targetDescriptor = null; if (targetProperty == null) { // If it can't be found, it's possible that we have copied the target descriptor. // Let's look for it. String key = createDescriptorKey(origin.getDescriptor()); targetDescriptor = targetPropDescriptorMap.get(key); if (targetDescriptor != null) { // If a property is already defined for the existing descriptor, we update this // value... targetProperty = existingPropertiesByDescriptorId.get(targetDescriptor.getId()); } } if(CopyMode.MAIA_PREDECESSOR == copyUnit.getMode() && targetDescriptor == null){ // do not add property for null descriptor when Predecessor mode }else{ // If no property for the found property descriptor exists, we create a new one... if (targetProperty == null) { PropertyEntity target = origin.getCopy(null, copyUnit); // targetDescriptor null come for properties on ResourceTypes or relations if(targetDescriptor != null) { target.setDescriptor(targetDescriptor); } targets.add(target); } // otherwise, we merge the new value with the old property entity else { targets.add(mergePropertyEntity(origin, targetProperty)); } } } } return targets; } /** * Merges the value (and comment) of the original property into the target property. * * @param origin * @param target * @return */ protected PropertyEntity mergePropertyEntity(PropertyEntity origin, PropertyEntity target) { target.setValue(origin.getValue()); return target; } /** * Existing templates in target will be overwritten! <br/> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. * * @param origins * @param targets * @param copyUnit * @return */ protected Set<TemplateDescriptorEntity> copyTemplates(Set<TemplateDescriptorEntity> origins, Set<TemplateDescriptorEntity> targets, CopyUnit copyUnit) { Map<String, TemplateDescriptorEntity> targetTemplatesMap = new HashMap<>(); if (targets != null) { for (TemplateDescriptorEntity t : targets) { String key = t.getName() + String.valueOf(t.isTesting()); targetTemplatesMap.put(key, t); } } if (origins != null) { for (TemplateDescriptorEntity origin : origins) { String key = origin.getName() + String.valueOf(origin.isTesting()); targetTemplatesMap.put(key, origin.getCopy(targetTemplatesMap.get(key), copyUnit)); } } return new HashSet<>(targetTemplatesMap.values()); } /** * Existing functions in target won't be overwritten. <br/> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. * * @param copyUnit */ protected void copyFunctions(CopyUnit copyUnit) { Set<String> targetFunctions = new HashSet<>(); for (AmwFunctionEntity targetFct : copyUnit.getTargetResource().getFunctions()) { targetFunctions.add(targetFct.getName()); } for (AmwFunctionEntity origFct : copyUnit.getOriginResource().getFunctions()) { if (!targetFunctions.contains(origFct.getName())) { copyUnit.getTargetResource().addFunction(origFct.getCopy(null, copyUnit)); } } } }
Added audit service call, formatting and cleanup
AMW_business/src/main/java/ch/puzzle/itc/mobiliar/business/resourcegroup/control/CopyResourceDomainService.java
Added audit service call, formatting and cleanup
<ide><path>MW_business/src/main/java/ch/puzzle/itc/mobiliar/business/resourcegroup/control/CopyResourceDomainService.java <ide> <ide> package ch.puzzle.itc.mobiliar.business.resourcegroup.control; <ide> <add>import ch.puzzle.itc.mobiliar.business.auditview.control.AuditService; <ide> import ch.puzzle.itc.mobiliar.business.domain.commons.CommonDomainService; <ide> import ch.puzzle.itc.mobiliar.business.environment.entity.ContextDependency; <ide> import ch.puzzle.itc.mobiliar.business.foreignable.control.ForeignableService; <ide> import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwner; <ide> import ch.puzzle.itc.mobiliar.business.foreignable.entity.ForeignableOwnerViolationException; <ide> import ch.puzzle.itc.mobiliar.business.function.entity.AmwFunctionEntity; <del>import ch.puzzle.itc.mobiliar.business.property.control.PropertyTagEditingService; <ide> import ch.puzzle.itc.mobiliar.business.property.entity.PropertyDescriptorEntity; <ide> import ch.puzzle.itc.mobiliar.business.property.entity.PropertyEntity; <ide> import ch.puzzle.itc.mobiliar.business.property.entity.PropertyTagEntity; <ide> * <td>nein</td> <ide> * </tr> <ide> * </table> <del> * <add> * <ide> * @author cweber <ide> */ <ide> public class CopyResourceDomainService { <ide> <del> @Inject <del> private EntityManager entityManager; <del> <del> @Inject <del> PropertyTagEditingService propertyTagEditingService; <del> <del> @Inject <del> CommonDomainService commonDomainService; <add> @Inject <add> private EntityManager entityManager; <add> <add> @Inject <add> CommonDomainService commonDomainService; <ide> <ide> @Inject <ide> ForeignableService foreignableService; <ide> <del> @Inject <del> SoftlinkRelationService softlinkService; <del> <del> @Inject <del> ResourceLocator resourceLocator; <del> <del> public enum CopyMode { <del> COPY, RELEASE, MAIA_PREDECESSOR; <del> } <del> <del> /** <del> * @param origin <del> * @param target <del> * @param actingOwner <del> * @return <del> * @throws ForeignableOwnerViolationException <del> * @throws AMWException <del> */ <del> public CopyResourceResult copyFromOriginToTargetResource(ResourceEntity origin, ResourceEntity target, <del> ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { <del> if (target == null) { <del> throw new RuntimeException("Target resource should not be null for copy action"); <del> } <del> else { <del> return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.COPY, actingOwner)); <del> } <del> } <del> <del> /** <del> * @param origin <del> * - the resource to create a new release from <del> * @param release <del> * - the release to create <del> * @return <del> */ <del> public CopyResourceResult createReleaseFromOriginResource(ResourceEntity origin, ReleaseEntity release, ForeignableOwner actingOwner) throws <del> ForeignableOwnerViolationException, AMWException { <del> ResourceEntity target = commonDomainService.getResourceEntityByGroupAndRelease(origin <del> .getResourceGroup().getId(), release.getId()); <del> if (target == null) { <del> target = ResourceFactory.createNewResourceForOwner(origin.getResourceGroup(), actingOwner); <del> target.setRelease(release); <del> } <del> return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.RELEASE, actingOwner)); <del> } <del> <del> public CopyResourceResult copyFromPredecessorToSuccessorResource(ResourceEntity predecessor, ResourceEntity successor, ForeignableOwner actingOwner) throws ForeignableOwnerViolationException, AMWException { <del> if (successor == null) { <del> throw new RuntimeException("Successor resource should not be null for copy predecessor action"); <del> } <del> else { <del> return doCopyResourceAndSave(new CopyUnit(predecessor, successor, CopyMode.MAIA_PREDECESSOR, actingOwner)); <del> } <del> } <del> <del> /** <del> * @param copyUnit <del> * @return result if copy was successful, contains a list with error messages if copy fails <del> */ <add> @Inject <add> SoftlinkRelationService softlinkService; <add> <add> @Inject <add> ResourceLocator resourceLocator; <add> <add> @Inject <add> AuditService auditService; <add> <add> public enum CopyMode { <add> COPY, RELEASE, MAIA_PREDECESSOR <add> } <add> <add> public CopyResourceResult copyFromOriginToTargetResource(ResourceEntity origin, ResourceEntity target, ForeignableOwner actingOwner) <add> throws ForeignableOwnerViolationException, AMWException { <add> if (target == null) { <add> throw new RuntimeException("Target resource should not be null for copy action"); <add> } else { <add> return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.COPY, actingOwner)); <add> } <add> } <add> <add> /** <add> * @param origin - the resource to create a new release from <add> * @param release - the release to create <add> */ <add> public CopyResourceResult createReleaseFromOriginResource(ResourceEntity origin, ReleaseEntity release, ForeignableOwner actingOwner) <add> throws ForeignableOwnerViolationException, AMWException { <add> ResourceEntity target = commonDomainService.getResourceEntityByGroupAndRelease(origin.getResourceGroup().getId(), release.getId()); <add> if (target == null) { <add> target = ResourceFactory.createNewResourceForOwner(origin.getResourceGroup(), actingOwner); <add> target.setRelease(release); <add> } <add> return doCopyResourceAndSave(new CopyUnit(origin, target, CopyMode.RELEASE, actingOwner)); <add> } <add> <add> public CopyResourceResult copyFromPredecessorToSuccessorResource(ResourceEntity predecessor, ResourceEntity successor, ForeignableOwner actingOwner) <add> throws ForeignableOwnerViolationException, AMWException { <add> if (successor == null) { <add> throw new RuntimeException("Successor resource should not be null for copy predecessor action"); <add> } else { <add> return doCopyResourceAndSave(new CopyUnit(predecessor, successor, CopyMode.MAIA_PREDECESSOR, actingOwner)); <add> } <add> } <add> <add> /** <add> * Persists the target <add> * <add> * @return result if copy was successful, contains a list with error messages if copy fails <add> */ <ide> protected CopyResourceResult doCopyResourceAndSave(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <ide> int targetHashCodeBeforeChange = copyUnit.getTargetResource() != null ? copyUnit.getTargetResource().foreignableFieldHashCode() : 0; <ide> <ide> // do copy <del> copyUnit.getOriginResource().getCopy(copyUnit.getTargetResource(), copyUnit); <del> copyConsumedMasterRelations(copyUnit); <del> if(copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR){ <del> copyConsumedSlaveRelations(copyUnit); <del> } <del> copyResourceContexts(copyUnit); <del> copyProvidedMasterRelations(copyUnit); <del> if(copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { <del> copyProvidedSlaveRelations(copyUnit); <del> } <del> copyFunctions(copyUnit); <del> copySoftlinkRelation(copyUnit); <del> <del> // do save <del> if (copyUnit.getResult().isSuccess()) { <add> copyUnit.getOriginResource().getCopy(copyUnit.getTargetResource(), copyUnit); <add> copyConsumedMasterRelations(copyUnit); <add> if (copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { <add> copyConsumedSlaveRelations(copyUnit); <add> } <add> copyResourceContexts(copyUnit); <add> copyProvidedMasterRelations(copyUnit); <add> if (copyUnit.getMode() != CopyMode.MAIA_PREDECESSOR) { <add> copyProvidedSlaveRelations(copyUnit); <add> } <add> copyFunctions(copyUnit); <add> copySoftlinkRelation(copyUnit); <add> <add> // do save <add> if (copyUnit.getResult().isSuccess()) { <ide> // check if only decorable fields on resource changed when changing owner is different from resource owner <ide> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), targetHashCodeBeforeChange, copyUnit.getTargetResource()); <del> <del> entityManager.persist(copyUnit.getTargetResource()); <del> } <del> copyUnit.getResult().setTargetResource(copyUnit.getTargetResource()); <del> return copyUnit.getResult(); <del> } <del> <del> protected void copyConsumedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Set<ConsumedResourceRelationEntity> targetConsumedMasterRel = copyUnit.getTargetResource() <del> .getConsumedMasterRelations() != null ? copyUnit.getTargetResource() <del> .getConsumedMasterRelations() : new HashSet<ConsumedResourceRelationEntity>(); <del> Set<ConsumedResourceRelationEntity> originConsumedMasterRel = copyUnit.getOriginResource() <del> .getConsumedMasterRelations(); <del> copyConsumedResourceRelationEntities(originConsumedMasterRel, targetConsumedMasterRel, copyUnit); <del> copyUnit.getTargetResource().setConsumedMasterRelations(targetConsumedMasterRel); <del> } <del> <del> protected void copyConsumedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Set<ConsumedResourceRelationEntity> targetConsumedSlaveRel = copyUnit.getTargetResource() <del> .getConsumedSlaveRelations() != null ? copyUnit.getTargetResource() <del> .getConsumedSlaveRelations() : new HashSet<ConsumedResourceRelationEntity>(); <del> Set<ConsumedResourceRelationEntity> originConsumedSlaveRel = copyUnit.getOriginResource() <del> .getConsumedSlaveRelations(); <del> if (copyUnit.getMode() == CopyMode.RELEASE) { <del> copyConsumedResourceRelationEntities(originConsumedSlaveRel, targetConsumedSlaveRel, copyUnit); <del> copyUnit.getTargetResource().setConsumedSlaveRelations(targetConsumedSlaveRel); <del> } else if (originConsumedSlaveRel != null) { <del> for (ConsumedResourceRelationEntity consumed : originConsumedSlaveRel) { <del> copyUnit.getResult().addSkippedConsumedRelation(copyUnit.getOriginResource().getId(), <del> consumed.getMasterResource().getName(), consumed.getSlaveResource().getName(), <del> consumed.getIdentifier(), <del> consumed.getMasterResource().getResourceType().getName(), <del> consumed.getSlaveResource().getResourceType().getName()); <del> } <del> } <del> } <del> <del> /** <del> * iterate and copy <del> * <del> * @param copyUnit <del> */ <del> protected void copyConsumedResourceRelationEntities(Set<ConsumedResourceRelationEntity> origins, <del> Set<ConsumedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Map<String, ConsumedResourceRelationEntity> targetMap = new HashMap<>(); <del> <del> // prepare map with identifier as key and list of runtime relations <del> Set<ConsumedResourceRelationEntity> runtimeRelations = new HashSet<>(); <del> for (ConsumedResourceRelationEntity target : targets) { <del> if (target.getSlaveResource() != null) { <del> if (target.getSlaveResource().getResourceType().isRuntimeType()) { <del> runtimeRelations.add(target); <del> } <del> targetMap.put(target.buildIdentifer(), target); <del> } <del> } <del> <del> if (origins != null) { <del> for (ConsumedResourceRelationEntity origin : origins) { <del> // If a runtime already exists and another runtime is copied, we overwrite the previous <del> // TODO: runtime - write test <del> if (shallReplaceRuntime(runtimeRelations, origin)) { <del> for (ConsumedResourceRelationEntity rel : runtimeRelations) { <del> targets.remove(rel); <del> entityManager.remove(rel); <del> } <del> runtimeRelations.clear(); <del> } <del> String key = origin.buildIdentifer(); <del> ConsumedResourceRelationEntity target = targetMap.get(key); <del> int consumedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; <del> target = origin.getCopy(target, copyUnit); <del> <del> <del> if(target != null) { <del> copyResourceRelationContexts(origin.getContexts(), target, copyUnit); <del> <del> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), consumedResourceRelationForeignableHashCodeBeforeChange, target); <del> targets.add(target); <del> } <del> } <del> } <del> } <del> <del> private boolean shallReplaceRuntime(Set<ConsumedResourceRelationEntity> originalRelations, <del> ConsumedResourceRelationEntity relationToCheck) { <del> if (originalRelations.isEmpty() <del> || !relationToCheck.getSlaveResource().getResourceType().isRuntimeType()) { <del> return false; <del> } <del> ResourceGroupEntity originalRuntime = originalRelations.iterator().next().getSlaveResource() <del> .getResourceGroup(); <del> ResourceGroupEntity newRuntime = relationToCheck.getSlaveResource().getResourceGroup(); <del> return !originalRuntime.getId().equals(newRuntime.getId()); <del> } <del> <del> <del> protected void copyProvidedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Set<ProvidedResourceRelationEntity> targetProvidedResRels = copyUnit.getTargetResource() <del> .getProvidedMasterRelations() != null ? copyUnit.getTargetResource() <del> .getProvidedMasterRelations() : new HashSet<ProvidedResourceRelationEntity>(); <del> Set<ProvidedResourceRelationEntity> originProvidedResRels = copyUnit.getOriginResource() <del> .getProvidedMasterRelations(); <del> if (copyUnit.getMode() == CopyMode.RELEASE || copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR) { <del> copyProvidedResourceRelationEntities(originProvidedResRels, targetProvidedResRels, copyUnit); <del> copyUnit.getTargetResource().setProvidedMasterRelations(targetProvidedResRels); <del> } <del> else if (originProvidedResRels != null) { <del> for (ProvidedResourceRelationEntity prov : originProvidedResRels) { <del> copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), <del> prov.getMasterResource().getName(), prov.getSlaveResource().getName(), <del> prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), <del> prov.getSlaveResource().getResourceType().getName()); <del> } <del> } <del> } <del> <del> protected void copyProvidedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Set<ProvidedResourceRelationEntity> targetProvidedSlaveRels = copyUnit.getTargetResource() <del> .getProvidedSlaveRelations() != null ? copyUnit.getTargetResource() <del> .getProvidedSlaveRelations() : new HashSet<ProvidedResourceRelationEntity>(); <del> Set<ProvidedResourceRelationEntity> originProvidedSlaveRels = copyUnit.getOriginResource() <del> .getProvidedSlaveRelations(); <del> if (copyUnit.getMode() == CopyMode.RELEASE) { <del> copyProvidedResourceRelationEntities(originProvidedSlaveRels, targetProvidedSlaveRels, <del> copyUnit); <del> copyUnit.getTargetResource().setProvidedSlaveRelations(targetProvidedSlaveRels); <del> } <del> else if (originProvidedSlaveRels != null) { <del> for (ProvidedResourceRelationEntity prov : originProvidedSlaveRels) { <del> copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), <del> prov.getMasterResource().getName(), prov.getSlaveResource().getName(), <del> prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), <del> prov.getSlaveResource().getResourceType().getName()); <del> } <del> } <del> } <del> <del> protected SoftlinkRelationEntity copySoftlinkRelation(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> SoftlinkRelationEntity originSoftlink = copyUnit.getOriginResource().getSoftlinkRelation(); <del> SoftlinkRelationEntity targetSoftlink = copyUnit.getTargetResource().getSoftlinkRelation(); <del> if (originSoftlink != null) { <del> int softlinkRelationForeignableHashCodeBeforeChange = targetSoftlink != null ? targetSoftlink <del> .foreignableFieldHashCode() : 0; <del> <del> targetSoftlink = originSoftlink.getCopy(targetSoftlink, copyUnit); <del> <del> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), <del> softlinkRelationForeignableHashCodeBeforeChange, targetSoftlink); <del> softlinkService.setSoftlinkRelation(copyUnit.getTargetResource(), targetSoftlink); <del> } <del> return targetSoftlink; <del> } <del> <del> /** <del> * iterate and copy <del> * <del> * @param origins <del> * @param targets <del> * @param copyUnit <del> */ <del> protected void copyProvidedResourceRelationEntities(Set<ProvidedResourceRelationEntity> origins, <del> Set<ProvidedResourceRelationEntity> targets, CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Map<String, ProvidedResourceRelationEntity> targetMap = new HashMap<>(); <del> // prepare map with identifier as key <del> for (ProvidedResourceRelationEntity target : targets) { <del> targetMap.put(target.buildIdentifer(), target); <del> } <del> <del> if (origins != null) { <del> for (ProvidedResourceRelationEntity origin : origins) { <del> String key = origin.buildIdentifer(); <del> ProvidedResourceRelationEntity target = targetMap.get(origin.buildIdentifer()); <del> <del> int providedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; <del> target = origin.getCopy(target, copyUnit); <del> <del> if(target != null){ <del> copyResourceRelationContexts(origin.getContexts(), target, copyUnit); <del> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), providedResourceRelationForeignableHashCodeBeforeChange, target); <del> <del> if (!targetMap.containsKey(key)) { <del> targets.add(target); <del> } <del> } <del> } <del> } <del> } <del> <del> /** <del> * iterate and copy <del> * <del> * @param copyUnit <del> */ <del> protected void copyResourceContexts(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> Set<ResourceContextEntity> targets = copyUnit.getTargetResource().getContexts(); <del> Set<ResourceContextEntity> origins = copyUnit.getOriginResource().getContexts(); <del> <del> // prepare map with contextId as key <del> Map<Integer, ResourceContextEntity> targetsMap = new HashMap<>(); <del> if (targets != null) { <del> for (ResourceContextEntity target : targets) { <del> targetsMap.put(target.getContext().getId(), target); <del> } <del> } <del> if (origins != null) { <del> // 1. copy descriptors for all contexts <del> Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); <del> for (ResourceContextEntity origin : origins) { <del> Integer key = origin.getContext().getId(); <del> ResourceContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) <del> : new ResourceContextEntity(); <del> allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), <del> target.getPropertyDescriptors(), target, copyUnit)); <del> if (!targetsMap.containsKey(key)) { <del> copyUnit.getTargetResource().addContext(target); <del> } <del> targetsMap.put(key, target); <del> } <del> <del> // 2. copy context with properties <del> for (ResourceContextEntity origin : origins) { <del> Integer key = origin.getContext().getId(); <del> ResourceContextEntity target = targetsMap.get(key); <del> copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); <del> target.setContextualizedObject(copyUnit.getTargetResource()); <del> } <del> } <del> } <del> <del> /** <del> * iterate and copy <del> * <del> * @param origins <del> * @param targetResRel <del> * @return <del> */ <del> protected void copyResourceRelationContexts(Set<ResourceRelationContextEntity> origins, <del> AbstractResourceRelationEntity targetResRel, CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> // prepare map with contextId as key <del> Set<ResourceRelationContextEntity> targets = targetResRel.getContexts(); <del> Map<Integer, ResourceRelationContextEntity> targetsMap = new HashMap<>(); <del> if (targets != null) { <del> for (ResourceRelationContextEntity target : targets) { <del> targetsMap.put(target.getContext().getId(), target); <del> } <del> } <del> if (origins != null) { <del> // 1. copy descriptors for all contexts <del> Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); <del> for (ResourceRelationContextEntity origin : origins) { <del> Integer key = origin.getContext().getId(); <del> ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) <del> : new ResourceRelationContextEntity(); <del> allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), <del> target.getPropertyDescriptors(), target, copyUnit)); <del> if (!targetsMap.containsKey(key)) { <del> targetResRel.addContext(target); <del> } <del> targetsMap.put(key, target); <del> } <del> <del> if(copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR && targetResRel.getSlaveResource() != null && (resourceLocator.hasResourceConsumableSoftlinkType(targetResRel.getSlaveResource()) || resourceLocator <del> .hasResourceProvidableSoftlinkType(targetResRel.getSlaveResource()))){ <del> <del> // propertyValue from relations has to be copied if PropertyDescriptor exists on target (successor) <del> for (ResourceContextEntity resourceContextEntity : copyUnit.getTargetResource().getContexts()) { <del> for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { <del> String key = createDescriptorKey(propertyDescriptorEntity); <del> allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); <del> } <del> } <add> auditService.storeIdInThreadLocalForAuditLog(copyUnit.getTargetResource()); <add> entityManager.persist(copyUnit.getTargetResource()); <add> } <add> copyUnit.getResult().setTargetResource(copyUnit.getTargetResource()); <add> return copyUnit.getResult(); <add> } <add> <add> protected void copyConsumedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Set<ConsumedResourceRelationEntity> targetConsumedMasterRel = copyUnit.getTargetResource() <add> .getConsumedMasterRelations() != null ? copyUnit.getTargetResource() <add> .getConsumedMasterRelations() : new HashSet<ConsumedResourceRelationEntity>(); <add> Set<ConsumedResourceRelationEntity> originConsumedMasterRel = copyUnit.getOriginResource() <add> .getConsumedMasterRelations(); <add> copyConsumedResourceRelationEntities(originConsumedMasterRel, targetConsumedMasterRel, copyUnit); <add> copyUnit.getTargetResource().setConsumedMasterRelations(targetConsumedMasterRel); <add> } <add> <add> protected void copyConsumedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Set<ConsumedResourceRelationEntity> targetConsumedSlaveRel = copyUnit.getTargetResource() <add> .getConsumedSlaveRelations() != null ? copyUnit.getTargetResource() <add> .getConsumedSlaveRelations() : new HashSet<ConsumedResourceRelationEntity>(); <add> Set<ConsumedResourceRelationEntity> originConsumedSlaveRel = copyUnit.getOriginResource() <add> .getConsumedSlaveRelations(); <add> if (copyUnit.getMode() == CopyMode.RELEASE) { <add> copyConsumedResourceRelationEntities(originConsumedSlaveRel, targetConsumedSlaveRel, copyUnit); <add> copyUnit.getTargetResource().setConsumedSlaveRelations(targetConsumedSlaveRel); <add> } else if (originConsumedSlaveRel != null) { <add> for (ConsumedResourceRelationEntity consumed : originConsumedSlaveRel) { <add> copyUnit.getResult().addSkippedConsumedRelation(copyUnit.getOriginResource().getId(), <add> consumed.getMasterResource().getName(), consumed.getSlaveResource().getName(), <add> consumed.getIdentifier(), <add> consumed.getMasterResource().getResourceType().getName(), <add> consumed.getSlaveResource().getResourceType().getName()); <add> } <add> } <add> } <add> <add> /** <add> * iterate and copy <add> */ <add> protected void copyConsumedResourceRelationEntities(Set<ConsumedResourceRelationEntity> origins, <add> Set<ConsumedResourceRelationEntity> targets, <add> CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Map<String, ConsumedResourceRelationEntity> targetMap = new HashMap<>(); <add> <add> // prepare map with identifier as key and list of runtime relations <add> Set<ConsumedResourceRelationEntity> runtimeRelations = new HashSet<>(); <add> for (ConsumedResourceRelationEntity target : targets) { <add> if (target.getSlaveResource() != null) { <add> if (target.getSlaveResource().getResourceType().isRuntimeType()) { <add> runtimeRelations.add(target); <add> } <add> targetMap.put(target.buildIdentifer(), target); <add> } <add> } <add> <add> if (origins != null) { <add> for (ConsumedResourceRelationEntity origin : origins) { <add> // If a runtime already exists and another runtime is copied, we overwrite the previous <add> // TODO: runtime - write test <add> if (shallReplaceRuntime(runtimeRelations, origin)) { <add> for (ConsumedResourceRelationEntity rel : runtimeRelations) { <add> targets.remove(rel); <add> entityManager.remove(rel); <add> } <add> runtimeRelations.clear(); <add> } <add> String key = origin.buildIdentifer(); <add> ConsumedResourceRelationEntity target = targetMap.get(key); <add> int consumedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; <add> target = origin.getCopy(target, copyUnit); <add> <add> if (target != null) { <add> copyResourceRelationContexts(origin.getContexts(), target, copyUnit); <add> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), consumedResourceRelationForeignableHashCodeBeforeChange, target); <add> targets.add(target); <add> } <add> } <add> } <add> } <add> <add> private boolean shallReplaceRuntime(Set<ConsumedResourceRelationEntity> originalRelations, <add> ConsumedResourceRelationEntity relationToCheck) { <add> if (originalRelations.isEmpty() || !relationToCheck.getSlaveResource().getResourceType().isRuntimeType()) { <add> return false; <add> } <add> ResourceGroupEntity originalRuntime = originalRelations.iterator().next().getSlaveResource() <add> .getResourceGroup(); <add> ResourceGroupEntity newRuntime = relationToCheck.getSlaveResource().getResourceGroup(); <add> return !originalRuntime.getId().equals(newRuntime.getId()); <add> } <add> <add> protected void copyProvidedMasterRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Set<ProvidedResourceRelationEntity> targetProvidedResRels = copyUnit.getTargetResource() <add> .getProvidedMasterRelations() != null ? copyUnit.getTargetResource() <add> .getProvidedMasterRelations() : new HashSet<ProvidedResourceRelationEntity>(); <add> Set<ProvidedResourceRelationEntity> originProvidedResRels = copyUnit.getOriginResource() <add> .getProvidedMasterRelations(); <add> if (copyUnit.getMode() == CopyMode.RELEASE || copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR) { <add> copyProvidedResourceRelationEntities(originProvidedResRels, targetProvidedResRels, copyUnit); <add> copyUnit.getTargetResource().setProvidedMasterRelations(targetProvidedResRels); <add> } else if (originProvidedResRels != null) { <add> for (ProvidedResourceRelationEntity prov : originProvidedResRels) { <add> copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), <add> prov.getMasterResource().getName(), prov.getSlaveResource().getName(), <add> prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), <add> prov.getSlaveResource().getResourceType().getName()); <add> } <add> } <add> } <add> <add> protected void copyProvidedSlaveRelations(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Set<ProvidedResourceRelationEntity> targetProvidedSlaveRels = copyUnit.getTargetResource() <add> .getProvidedSlaveRelations() != null ? copyUnit.getTargetResource() <add> .getProvidedSlaveRelations() : new HashSet<ProvidedResourceRelationEntity>(); <add> Set<ProvidedResourceRelationEntity> originProvidedSlaveRels = copyUnit.getOriginResource() <add> .getProvidedSlaveRelations(); <add> if (copyUnit.getMode() == CopyMode.RELEASE) { <add> copyProvidedResourceRelationEntities(originProvidedSlaveRels, targetProvidedSlaveRels, copyUnit); <add> copyUnit.getTargetResource().setProvidedSlaveRelations(targetProvidedSlaveRels); <add> } else if (originProvidedSlaveRels != null) { <add> for (ProvidedResourceRelationEntity prov : originProvidedSlaveRels) { <add> copyUnit.getResult().addSkippedProvidedRelation(copyUnit.getOriginResource().getId(), <add> prov.getMasterResource().getName(), prov.getSlaveResource().getName(), <add> prov.getIdentifier(), prov.getMasterResource().getResourceType().getName(), <add> prov.getSlaveResource().getResourceType().getName()); <add> } <add> } <add> } <add> <add> protected SoftlinkRelationEntity copySoftlinkRelation(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> SoftlinkRelationEntity originSoftlink = copyUnit.getOriginResource().getSoftlinkRelation(); <add> SoftlinkRelationEntity targetSoftlink = copyUnit.getTargetResource().getSoftlinkRelation(); <add> if (originSoftlink != null) { <add> int softlinkRelationForeignableHashCodeBeforeChange = targetSoftlink != null ? targetSoftlink.foreignableFieldHashCode() : 0; <add> <add> targetSoftlink = originSoftlink.getCopy(targetSoftlink, copyUnit); <add> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), softlinkRelationForeignableHashCodeBeforeChange, targetSoftlink); <add> softlinkService.setSoftlinkRelation(copyUnit.getTargetResource(), targetSoftlink); <add> } <add> return targetSoftlink; <add> } <add> <add> /** <add> * iterate and copy <add> */ <add> protected void copyProvidedResourceRelationEntities(Set<ProvidedResourceRelationEntity> origins, <add> Set<ProvidedResourceRelationEntity> targets, CopyUnit copyUnit) <add> throws ForeignableOwnerViolationException { <add> Map<String, ProvidedResourceRelationEntity> targetMap = new HashMap<>(); <add> // prepare map with identifier as key <add> for (ProvidedResourceRelationEntity target : targets) { <add> targetMap.put(target.buildIdentifer(), target); <add> } <add> <add> if (origins != null) { <add> for (ProvidedResourceRelationEntity origin : origins) { <add> String key = origin.buildIdentifer(); <add> ProvidedResourceRelationEntity target = targetMap.get(origin.buildIdentifer()); <add> <add> int providedResourceRelationForeignableHashCodeBeforeChange = target != null ? target.foreignableFieldHashCode() : 0; <add> target = origin.getCopy(target, copyUnit); <add> <add> if (target != null) { <add> copyResourceRelationContexts(origin.getContexts(), target, copyUnit); <add> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), providedResourceRelationForeignableHashCodeBeforeChange, target); <add> <add> if (!targetMap.containsKey(key)) { <add> targets.add(target); <add> } <add> } <add> } <add> } <add> } <add> <add> /** <add> * iterate and copy <add> */ <add> protected void copyResourceContexts(CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> Set<ResourceContextEntity> targets = copyUnit.getTargetResource().getContexts(); <add> Set<ResourceContextEntity> origins = copyUnit.getOriginResource().getContexts(); <add> <add> // prepare map with contextId as key <add> Map<Integer, ResourceContextEntity> targetsMap = new HashMap<>(); <add> if (targets != null) { <add> for (ResourceContextEntity target : targets) { <add> targetsMap.put(target.getContext().getId(), target); <add> } <add> } <add> if (origins != null) { <add> // 1. copy descriptors for all contexts <add> Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); <add> for (ResourceContextEntity origin : origins) { <add> Integer key = origin.getContext().getId(); <add> ResourceContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceContextEntity(); <add> allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), target.getPropertyDescriptors(), target, copyUnit)); <add> if (!targetsMap.containsKey(key)) { <add> copyUnit.getTargetResource().addContext(target); <add> } <add> targetsMap.put(key, target); <add> } <add> <add> // 2. copy context with properties <add> for (ResourceContextEntity origin : origins) { <add> Integer key = origin.getContext().getId(); <add> ResourceContextEntity target = targetsMap.get(key); <add> copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); <add> target.setContextualizedObject(copyUnit.getTargetResource()); <add> } <add> } <add> } <add> <add> /** <add> * iterate and copy <add> */ <add> protected void copyResourceRelationContexts(Set<ResourceRelationContextEntity> origins, <add> AbstractResourceRelationEntity targetResRel, <add> CopyUnit copyUnit) throws ForeignableOwnerViolationException { <add> // prepare map with contextId as key <add> Set<ResourceRelationContextEntity> targets = targetResRel.getContexts(); <add> Map<Integer, ResourceRelationContextEntity> targetsMap = new HashMap<>(); <add> if (targets != null) { <add> for (ResourceRelationContextEntity target : targets) { <add> targetsMap.put(target.getContext().getId(), target); <add> } <add> } <add> if (origins != null) { <add> // 1. copy descriptors for all contexts <add> Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap = new HashMap<>(); <add> for (ResourceRelationContextEntity origin : origins) { <add> Integer key = origin.getContext().getId(); <add> ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); <add> allPropertyDescriptorsMap.putAll(copyPropertyDescriptors(origin.getPropertyDescriptors(), <add> target.getPropertyDescriptors(), target, copyUnit)); <add> if (!targetsMap.containsKey(key)) { <add> targetResRel.addContext(target); <add> } <add> targetsMap.put(key, target); <add> } <add> <add> if (copyUnit.getMode() == CopyMode.MAIA_PREDECESSOR && targetResRel.getSlaveResource() != null && (resourceLocator.hasResourceConsumableSoftlinkType(targetResRel.getSlaveResource()) || resourceLocator <add> .hasResourceProvidableSoftlinkType(targetResRel.getSlaveResource()))) { <add> <add> // propertyValue from relations has to be copied if PropertyDescriptor exists on target (successor) <add> for (ResourceContextEntity resourceContextEntity : copyUnit.getTargetResource().getContexts()) { <add> for (PropertyDescriptorEntity propertyDescriptorEntity : resourceContextEntity.getPropertyDescriptors()) { <add> String key = createDescriptorKey(propertyDescriptorEntity); <add> allPropertyDescriptorsMap.put(key, propertyDescriptorEntity); <add> } <add> } <ide> // add PropertyDescriptor from ProvidedMasterRelations <ide> for (ProvidedResourceRelationEntity providedResourceRelationEntity : copyUnit.getTargetResource().getProvidedMasterRelations()) { <ide> addRelationPropertyDescriptors(allPropertyDescriptorsMap, providedResourceRelationEntity); <ide> } <ide> } <ide> <del> // do copy for all contexts <del> for (ResourceRelationContextEntity origin : origins) { <del> Integer key = origin.getContext().getId(); <del> ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) <del> : new ResourceRelationContextEntity(); <add> // do copy for all contexts <add> for (ResourceRelationContextEntity origin : origins) { <add> Integer key = origin.getContext().getId(); <add> ResourceRelationContextEntity target = targetsMap.containsKey(key) ? targetsMap.get(key) : new ResourceRelationContextEntity(); <ide> copyContextDependency(origin, target, copyUnit, allPropertyDescriptorsMap); <del> target.setContextualizedObject(targetResRel); <del> } <del> } <del> } <add> target.setContextualizedObject(targetResRel); <add> } <add> } <add> } <ide> <ide> private <T extends AbstractResourceRelationEntity> void addRelationPropertyDescriptors(Map<String, PropertyDescriptorEntity> allPropertyDescriptorsMap, T relationEntity) { <ide> for (ResourceContextEntity resourceContextEntity : relationEntity.getSlaveResource().getContexts()) { <ide> } <ide> <ide> /** <del> * iterate and copy <del> * <del> * @param origins <del> * @param targets <del> * @return <del> */ <del> protected Map<String, PropertyDescriptorEntity> copyPropertyDescriptors( <del> Set<PropertyDescriptorEntity> origins, Set<PropertyDescriptorEntity> targets, <add> * iterate and copy <add> */ <add> protected Map<String, PropertyDescriptorEntity> copyPropertyDescriptors( <add> Set<PropertyDescriptorEntity> origins, Set<PropertyDescriptorEntity> targets, <ide> ContextDependency<?> targetContextDependency, CopyUnit copyUnit) throws ForeignableOwnerViolationException { <del> // prepare map with propertyName and isTesting as key <del> Map<String, PropertyDescriptorEntity> targetsMap = new HashMap<>(); <del> if (targets != null) { <del> for (PropertyDescriptorEntity target : targets) { <del> String key = createDescriptorKey(target); <del> targetsMap.put(key, target); <del> } <del> } <del> <del> if (origins != null) { <del> for (PropertyDescriptorEntity origin : origins) { <del> String key = createDescriptorKey(origin); <del> <add> // prepare map with propertyName and isTesting as key <add> Map<String, PropertyDescriptorEntity> targetsMap = new HashMap<>(); <add> if (targets != null) { <add> for (PropertyDescriptorEntity target : targets) { <add> String key = createDescriptorKey(target); <add> targetsMap.put(key, target); <add> } <add> } <add> <add> if (origins != null) { <add> for (PropertyDescriptorEntity origin : origins) { <add> String key = createDescriptorKey(origin); <ide> PropertyDescriptorEntity targetDescriptor = targetsMap.get(key); <ide> <del> // Predecessor Mode only copy AMW Owned Elements <del> if(CopyMode.MAIA_PREDECESSOR.equals(copyUnit.getMode())){ <del> if(ForeignableOwner.AMW.equals(origin.getOwner())){ <del> copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); <del> } <del> }else{ <del> copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); <del> } <del> } <del> } <del> return targetsMap; <del> } <del> <del> private void copyPropertyDescriptor(ContextDependency<?> targetContextDependency, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> targetsMap, PropertyDescriptorEntity origin, String key, PropertyDescriptorEntity targetDescriptor) throws ForeignableOwnerViolationException { <del> int propertyDescriptorForeignableHashCodeBeforeChange = targetDescriptor != null ? targetDescriptor.foreignableFieldHashCode() : 0; <del> <del> PropertyDescriptorEntity target = origin.getCopy(targetDescriptor, copyUnit); <del> <del> copyTags(origin, target); <del> <del> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), propertyDescriptorForeignableHashCodeBeforeChange, target); <del> <del> if (!targetsMap.containsKey(key)) { <add> // Predecessor Mode only copy AMW Owned Elements <add> if (CopyMode.MAIA_PREDECESSOR.equals(copyUnit.getMode())) { <add> if (ForeignableOwner.AMW.equals(origin.getOwner())) { <add> copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); <add> } <add> } else { <add> copyPropertyDescriptor(targetContextDependency, copyUnit, targetsMap, origin, key, targetDescriptor); <add> } <add> } <add> } <add> return targetsMap; <add> } <add> <add> private void copyPropertyDescriptor(ContextDependency<?> targetContextDependency, CopyUnit copyUnit, Map<String, PropertyDescriptorEntity> targetsMap, PropertyDescriptorEntity origin, String key, PropertyDescriptorEntity targetDescriptor) throws ForeignableOwnerViolationException { <add> int propertyDescriptorForeignableHashCodeBeforeChange = targetDescriptor != null ? targetDescriptor.foreignableFieldHashCode() : 0; <add> <add> PropertyDescriptorEntity target = origin.getCopy(targetDescriptor, copyUnit); <add> <add> copyTags(origin, target); <add> <add> foreignableService.verifyEditableByOwner(copyUnit.getActingOwner(), propertyDescriptorForeignableHashCodeBeforeChange, target); <add> <add> if (!targetsMap.containsKey(key)) { <ide> targetContextDependency.addPropertyDescriptor(target); <ide> } <del> targetsMap.put(key, target); <del> } <del> <del> protected String createDescriptorKey(PropertyDescriptorEntity desc) { <del> return desc.getPropertyName() + "_" + String.valueOf(desc.isTesting()); <del> } <del> <del> protected void copyTags(PropertyDescriptorEntity origin, PropertyDescriptorEntity target) { <del> Set<String> tagNames = new HashSet<>(); <del> for (PropertyTagEntity targetTag : target.getPropertyTags()) { <del> tagNames.add(targetTag.getName()); <del> } <del> <del> for (PropertyTagEntity originTag : origin.getPropertyTags()) { <del> if (!tagNames.contains(originTag.getName())) { <del> PropertyTagEntity copy = new PropertyTagEntity(); <del> copy.setName(originTag.getName()); <del> copy.setTagType(PropertyTagType.LOCAL); <del> target.addPropertyTag(copy); <del> } <del> } <del> } <del> <del> /** <del> * Copies values from origin to target contextDependency (with properties and templates). <del> * <del> * @param origin <del> * @param target <del> * @param copyUnit <del> * @return <del> */ <del> protected ContextDependency<?> copyContextDependency(ContextDependency<?> origin, <del> ContextDependency<?> target, CopyUnit copyUnit, <del> Map<String, PropertyDescriptorEntity> allTargetDescriptors) { <del> // context <del> target.setContext(origin.getContext()); <del> <del> // properties <del> Set<PropertyEntity> properties = copyProperties(origin.getProperties(), allTargetDescriptors, <del> target.getProperties(), copyUnit); <del> for (PropertyEntity property : properties) { <del> target.addProperty(property); <del> } <del> <del> // templates <del> Set<TemplateDescriptorEntity> templates = copyTemplates(origin.getTemplates(), <del> target.getTemplates(), copyUnit); <del> for (TemplateDescriptorEntity template : templates) { <del> target.addTemplate(template); <del> } <del> <del> return target; <del> } <del> <del> /** <del> * <ul> <del> * <li>The identifier between target propertyDescriptor and origin TargetDescriptor is the propertyName (= technicalKey)</li> <del> * <li>If a propertyDescript of the targetResources has already a properyValue, this value will not be overwritten. <del> * </ul> <del> * <del> * @param origins, all properties of the origin resource for one context <del> * @param targetPropDescriptorMap, map with all propertyDescriptors of the targetResource (after copy), with the propertyName (= technicalKey) as key <del> * @param targetProperties all properties of the target resource for one context <del> * @param copyUnit <del> * @return <del> */ <del> protected Set<PropertyEntity> copyProperties(Set<PropertyEntity> origins, <del> Map<String, PropertyDescriptorEntity> targetPropDescriptorMap, <del> Set<PropertyEntity> targetProperties, CopyUnit copyUnit) { <del> Map<Integer, PropertyEntity> existingPropertiesByDescriptorId = new HashMap<>(); <del> if (targetProperties != null) { <del> for (PropertyEntity existingProperty : targetProperties) { <del> if (existingProperty.getDescriptor() != null <del> && existingProperty.getDescriptor().getId() != null) { <del> existingPropertiesByDescriptorId.put(existingProperty.getDescriptor().getId(), <del> existingProperty); <del> } <del> } <del> } <del> <del> Set<PropertyEntity> targets = new HashSet<>(); <del> if (origins != null) { <del> for (PropertyEntity origin : origins) { <del> // If a property exists on this context for the same descriptor, we define it as the <del> // target property... <del> PropertyEntity targetProperty = existingPropertiesByDescriptorId.get(origin <del> .getDescriptor().getId()); <del> PropertyDescriptorEntity targetDescriptor = null; <del> if (targetProperty == null) { <del> // If it can't be found, it's possible that we have copied the target descriptor. <del> // Let's look for it. <del> String key = createDescriptorKey(origin.getDescriptor()); <del> targetDescriptor = targetPropDescriptorMap.get(key); <del> if (targetDescriptor != null) { <del> // If a property is already defined for the existing descriptor, we update this <del> // value... <del> targetProperty = existingPropertiesByDescriptorId.get(targetDescriptor.getId()); <del> } <del> } <del> <del> <del> if(CopyMode.MAIA_PREDECESSOR == copyUnit.getMode() && targetDescriptor == null){ <del> // do not add property for null descriptor when Predecessor mode <del> <del> }else{ <del> // If no property for the found property descriptor exists, we create a new one... <del> if (targetProperty == null) { <del> PropertyEntity target = origin.getCopy(null, copyUnit); <del> // targetDescriptor null come for properties on ResourceTypes or relations <del> if(targetDescriptor != null) { <del> target.setDescriptor(targetDescriptor); <del> } <del> targets.add(target); <del> } <del> // otherwise, we merge the new value with the old property entity <del> else { <del> targets.add(mergePropertyEntity(origin, targetProperty)); <del> } <del> } <del> } <del> } <del> return targets; <del> } <del> <del> /** <del> * Merges the value (and comment) of the original property into the target property. <del> * <del> * @param origin <del> * @param target <del> * @return <del> */ <del> protected PropertyEntity mergePropertyEntity(PropertyEntity origin, PropertyEntity target) { <del> target.setValue(origin.getValue()); <del> return target; <del> } <del> <del> /** <del> * Existing templates in target will be overwritten! <br/> <del> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. <del> * <del> * @param origins <del> * @param targets <del> * @param copyUnit <del> * @return <del> */ <del> protected Set<TemplateDescriptorEntity> copyTemplates(Set<TemplateDescriptorEntity> origins, <del> Set<TemplateDescriptorEntity> targets, CopyUnit copyUnit) { <del> Map<String, TemplateDescriptorEntity> targetTemplatesMap = new HashMap<>(); <del> if (targets != null) { <del> for (TemplateDescriptorEntity t : targets) { <del> String key = t.getName() + String.valueOf(t.isTesting()); <del> targetTemplatesMap.put(key, t); <del> } <del> } <del> <del> if (origins != null) { <del> for (TemplateDescriptorEntity origin : origins) { <del> String key = origin.getName() + String.valueOf(origin.isTesting()); <del> targetTemplatesMap.put(key, origin.getCopy(targetTemplatesMap.get(key), copyUnit)); <del> } <del> } <del> return new HashSet<>(targetTemplatesMap.values()); <del> } <del> <del> /** <del> * Existing functions in target won't be overwritten. <br/> <del> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. <del> * <del> * @param copyUnit <del> */ <del> protected void copyFunctions(CopyUnit copyUnit) { <del> Set<String> targetFunctions = new HashSet<>(); <del> <del> for (AmwFunctionEntity targetFct : copyUnit.getTargetResource().getFunctions()) { <del> targetFunctions.add(targetFct.getName()); <del> } <del> <del> for (AmwFunctionEntity origFct : copyUnit.getOriginResource().getFunctions()) { <del> if (!targetFunctions.contains(origFct.getName())) { <del> copyUnit.getTargetResource().addFunction(origFct.getCopy(null, copyUnit)); <del> } <del> } <del> } <add> targetsMap.put(key, target); <add> } <add> <add> protected String createDescriptorKey(PropertyDescriptorEntity desc) { <add> return desc.getPropertyName() + "_" + String.valueOf(desc.isTesting()); <add> } <add> <add> protected void copyTags(PropertyDescriptorEntity origin, PropertyDescriptorEntity target) { <add> Set<String> tagNames = new HashSet<>(); <add> for (PropertyTagEntity targetTag : target.getPropertyTags()) { <add> tagNames.add(targetTag.getName()); <add> } <add> <add> for (PropertyTagEntity originTag : origin.getPropertyTags()) { <add> if (!tagNames.contains(originTag.getName())) { <add> PropertyTagEntity copy = new PropertyTagEntity(); <add> copy.setName(originTag.getName()); <add> copy.setTagType(PropertyTagType.LOCAL); <add> target.addPropertyTag(copy); <add> } <add> } <add> } <add> <add> /** <add> * Copies values from origin to target contextDependency (with properties and templates). <add> */ <add> protected ContextDependency<?> copyContextDependency(ContextDependency<?> origin, <add> ContextDependency<?> target, <add> CopyUnit copyUnit, <add> Map<String, PropertyDescriptorEntity> allTargetDescriptors) { <add> // context <add> target.setContext(origin.getContext()); <add> <add> // properties <add> Set<PropertyEntity> properties = copyProperties(origin.getProperties(), allTargetDescriptors, target.getProperties(), copyUnit); <add> for (PropertyEntity property : properties) { <add> target.addProperty(property); <add> } <add> <add> // templates <add> Set<TemplateDescriptorEntity> templates = copyTemplates(origin.getTemplates(), target.getTemplates(), copyUnit); <add> for (TemplateDescriptorEntity template : templates) { <add> target.addTemplate(template); <add> } <add> <add> return target; <add> } <add> <add> /** <add> * <ul> <add> * <li>The identifier between target propertyDescriptor and origin TargetDescriptor is the propertyName (= technicalKey)</li> <add> * <li>If a propertyDescript of the targetResources has already a properyValue, this value will not be overwritten. <add> * </ul> <add> * <add> * @param origins, all properties of the origin resource for one context <add> * @param targetPropDescriptorMap, map with all propertyDescriptors of the targetResource (after copy), with the propertyName (= technicalKey) as key <add> * @param targetProperties all properties of the target resource for one context <add> */ <add> protected Set<PropertyEntity> copyProperties(Set<PropertyEntity> origins, <add> Map<String, PropertyDescriptorEntity> targetPropDescriptorMap, <add> Set<PropertyEntity> targetProperties, <add> CopyUnit copyUnit) { <add> Map<Integer, PropertyEntity> existingPropertiesByDescriptorId = new HashMap<>(); <add> if (targetProperties != null) { <add> for (PropertyEntity existingProperty : targetProperties) { <add> if (existingProperty.getDescriptor() != null && existingProperty.getDescriptor().getId() != null) { <add> existingPropertiesByDescriptorId.put(existingProperty.getDescriptor().getId(), existingProperty); <add> } <add> } <add> } <add> <add> Set<PropertyEntity> targets = new HashSet<>(); <add> if (origins != null) { <add> for (PropertyEntity origin : origins) { <add> // If a property exists on this context for the same descriptor, we define it as the <add> // target property... <add> PropertyEntity targetProperty = existingPropertiesByDescriptorId.get(origin.getDescriptor().getId()); <add> PropertyDescriptorEntity targetDescriptor = null; <add> if (targetProperty == null) { <add> // If it can't be found, it's possible that we have copied the target descriptor. <add> // Let's look for it. <add> String key = createDescriptorKey(origin.getDescriptor()); <add> targetDescriptor = targetPropDescriptorMap.get(key); <add> if (targetDescriptor != null) { <add> // If a property is already defined for the existing descriptor, we update this <add> // value... <add> targetProperty = existingPropertiesByDescriptorId.get(targetDescriptor.getId()); <add> } <add> } <add> <add> if (CopyMode.MAIA_PREDECESSOR == copyUnit.getMode() && targetDescriptor == null) { <add> // do not add property for null descriptor when Predecessor mode <add> } else { <add> if (targetProperty == null) { <add> // If no property for the found property descriptor exists, we create a new one... <add> PropertyEntity target = origin.getCopy(null, copyUnit); <add> // targetDescriptor null come for properties on ResourceTypes or relations <add> if (targetDescriptor != null) { <add> target.setDescriptor(targetDescriptor); <add> } <add> targets.add(target); <add> } else { <add> // otherwise, we merge the new value with the old property entity <add> targets.add(mergePropertyEntity(origin, targetProperty)); <add> } <add> } <add> } <add> } <add> return targets; <add> } <add> <add> /** <add> * Merges the value (and comment) of the original property into the target property. <add> */ <add> protected PropertyEntity mergePropertyEntity(PropertyEntity origin, PropertyEntity target) { <add> target.setValue(origin.getValue()); <add> return target; <add> } <add> <add> /** <add> * Existing templates in target will be overwritten! <br/> <add> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. <add> */ <add> protected Set<TemplateDescriptorEntity> copyTemplates(Set<TemplateDescriptorEntity> origins, <add> Set<TemplateDescriptorEntity> targets, <add> CopyUnit copyUnit) { <add> Map<String, TemplateDescriptorEntity> targetTemplatesMap = new HashMap<>(); <add> if (targets != null) { <add> for (TemplateDescriptorEntity t : targets) { <add> String key = t.getName() + String.valueOf(t.isTesting()); <add> targetTemplatesMap.put(key, t); <add> } <add> } <add> <add> if (origins != null) { <add> for (TemplateDescriptorEntity origin : origins) { <add> String key = origin.getName() + String.valueOf(origin.isTesting()); <add> targetTemplatesMap.put(key, origin.getCopy(targetTemplatesMap.get(key), copyUnit)); <add> } <add> } <add> return new HashSet<>(targetTemplatesMap.values()); <add> } <add> <add> /** <add> * Existing functions in target won't be overwritten. <br/> <add> * Functions are always owned by {@link ForeignableOwner#AMW}, all functions will be copied in {@link CopyMode#MAIA_PREDECESSOR}. <add> */ <add> protected void copyFunctions(CopyUnit copyUnit) { <add> Set<String> targetFunctions = new HashSet<>(); <add> <add> for (AmwFunctionEntity targetFct : copyUnit.getTargetResource().getFunctions()) { <add> targetFunctions.add(targetFct.getName()); <add> } <add> <add> for (AmwFunctionEntity origFct : copyUnit.getOriginResource().getFunctions()) { <add> if (!targetFunctions.contains(origFct.getName())) { <add> copyUnit.getTargetResource().addFunction(origFct.getCopy(null, copyUnit)); <add> } <add> } <add> } <ide> <ide> }
JavaScript
mit
067d54bdcc7c24c65d05fe0769471006eac8ff50
0
irynabond/word-guesser,irynabond/word-guesser
var userStats; var mode; var showingUserStats = false; var verySecureGlobalToken = false; var untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; //calling gameID(567) sets id to 567 and returns 567. subsequent calling gameID() returns 567. var gameID = function(set) { var id = set; return function(set) { if (set) return id = set; else return id; }; }(); //calling turns() increments it by one and returns it. calling turns(true) resets it to 0 and returns 0. var turns = function(reset) { var turns = 0; return function(reset) { if (reset) return turns = 0; else return turns++; } }(); $(function() { //opens rules dialog when #showRules button clicked $("#rulesDialog").dialog({ width: '75%', autoOpen: false }); $("#showRules").on("click", function() { $("#rulesDialog").dialog("open"); }); }); function showSignUp() { $('#signIn').css('display', 'none'); $('#signUp').css('display', 'block'); } function showSignIn() { $('#signUp').css('display', 'none'); $('#signIn').css('display', 'block'); } function loggedIn(user) { $('#signUp').css('display', 'none'); $('#signIn').css('display', 'none'); $('#loggedIn').css('display', 'block'); $('#loggedIn').html('logged in as ' + user); } $(document).ready(function() { //prevents default behavior of hitting enter from a form field, runs guess() instead $(document).on('keypress', 'form input[type="text"]', function(e) { if(e.keyCode == 13) { e.preventDefault(); guess(); return false; } }); }); $('#loggedIn').click(function() { $('#userStats').html('average time: ' + userStats.avgTime); $('#userStats').append('<br>average guesses: ' + userStats.avgGuesses); $('#userStats').append('<br>games played: ' + userStats.totalGames); if (!showingUserStats) { $('#userStatsContainer').css('display', 'block'); showingUserStats = true; } else { $('#userStatsContainer').css('display', 'none'); showingUserStats = false; } }); $('#hideUntried').click(function() { //hides/shows the list of yet to be guessed letters if ($('#hideUntried').html() == 'hide untried letters') { $('#untried').css('display', 'none'); $('#hideUntried').html('show untried letters'); } else { $('#untried').css('display', 'table'); $('#hideUntried').html('hide untried letters'); } }); function makeGame() { if ($('#newgame').html() == 'give me a word') { if ($("input[id='hard']:checked").val()) mode = 'hard'; if ($("input[id='normal']:checked").val()) mode = 'normal'; var options = { category: ($("select[id='category']").val()), letters: ($("select[id='letters']").val()), mode: mode, token: verySecureGlobalToken }; $.ajax({url: "new", method: "POST", data: options, success: function(result) { if(result.err) { alert('No word found.'); } else { gameID(result.id); turns(true); newGameDisplay(result.length); } }}); } } function signIn() { var signedIn = false; var name = $('input[id="user"]').val().toLowerCase(); var pass = $('input[id="pass"]').val(); if (name == '') $('#signInError').html('please enter a username'); else { if(pass == '') $('#signInError').html('please enter a password'); else { $.ajax({url: "signin", method: "GET", headers: {"Authorization": "Basic " + btoa(name + ":" + pass)}, success: function(result) { console.log('done'); if (result.info) console.log(result.info); if (result.msg) console.log(result.msg); if (result.token) { userStats = result.stats; verySecureGlobalToken = result.token; signedIn = true; loggedIn(name); } }}); if (!signedIn) $('#signInError').html('invalid login info'); } } } function signUp() { var name = $('input[id="newUser"]').val().toLowerCase(); var pass = $('input[id="newPass"]').val(); var repeat = $('input[id="repeatPass"]').val(); var confirmedPass = ''; if (name == '') $('#signUpError').html('please choose a username'); else { if (pass != repeat) $('#signUpError').html('password != repeated'); else { confirmedPass = pass; if (confirmedPass == '') $('#signUpError').html('please choose a password'); else { var newUserData = { username: name, password: confirmedPass }; $.ajax({url: "signup", method: "POST", data: newUserData, success: function(result) { if(result.info) console.log(result.info); if(result.msg) console.log(result.msg); if (result.token) { verySecureGlobalToken = result.token; loggedIn(name); } }}); } } } } function guess() { $('#hideUntried').css('display', 'block'); var input = $('input[id="guessInput"]').val(); for (i=0; i<input.length; i++) { removeLetter(input[i]); } $('input[id="guessInput"]').val(''); $('#guessInput').select(); $.ajax({url: gameID() +'/' + input, success: function(result) { if (result.gameOver) winner(result.arr, result.stats); else if (mode == 'hard') displayHard(result.arr); else displayFeedback(result.arr); displayUntried(); }}); } function displayFeedback(resArray) { displayHistory(); $('#feedback').html(''); for (i=0; i<resArray.length; i++) { if (resArray[i][1] == '2') $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); if (resArray[i][1] == '1') $('#feedback').append('<td class="half">' + resArray[i][0] + '</td>'); if (resArray[i][1] == '0') $('#feedback').append('<td class="none">' + resArray[i][0] + '</td>'); } } function displayHard(resArray) { displayHistory(); var hardArray = []; var lastGuess = ''; for (i=0; i<resArray.length; i++) { lastGuess += resArray[i][0]; hardArray.push(resArray[i][1]); } hardArray.sort(function(a, b){return b-a}); $('#feedback').html(''); $('#feedback').append('<td class="lastGuess">' + lastGuess + '</td>') for (i=0; i<hardArray.length; i++) { if (hardArray[i] == '2') $('#feedback').append('<td class="full"> </td>'); if (hardArray[i] == '1') $('#feedback').append('<td class="half"> </td>'); if (hardArray[i] == '0') $('#feedback').append('<td class="none"> </td>'); } } function winner(resArray, stats) { displayHistory(); $('#feedback').html(''); for (i=0; i<resArray.length; i++) { $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); } displayStats(stats); endGameDisplay(); } function displayStats(stats) { var yourTurns = turns(); if (stats.yourTime < stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#248f24">' + stats.yourTime); else if (stats.yourTime > stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#cc0000">' + stats.yourTime); else $('#stats').html('avg time: ' + stats.avgTime + ', your time: ' + stats.yourTime); if (yourTurns < stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#248f24">' + yourTurns); else if (yourTurns > stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#cc0000">' + yourTurns); else $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: ' + yourTurns); } function displayHistory() { var turnCount = '<td class="turns">' + turns() + '</td>'; var feedback = $('#feedback').html(); var history = turnCount + feedback; if (feedback !== '') $('#history').append('<tr>' + history + '</tr>'); } function displayUntried() { $('#untried').html('<tr>'); untriedLetters.forEach(function (val, index) { if ((index % 4) == 0) $('#untried').append('</tr> <tr>'); $('#untried').append('<td class="untried">' + val + '</td>'); }); $('#untried').append('</tr>'); } function removeLetter(letter) { var index = $.inArray(letter, untriedLetters); if (index !== -1) untriedLetters.splice(index, 1); } function resetUntried() { untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; } function newGameDisplay(letters) { resetUntried(); $('#guessForm').css('display', 'block'); $('#stats').css('display', 'none'); $('#newgame').css('box-shadow', 'none'); $('#guess').css('box-shadow', '5px 5px 3px #888888'); $('#hideUntried').css('display', 'none'); $('input[id="guessInput"]').val(''); $('#guessInput').select(); $('#untried').html(''); $('#title').html('wor<span style="color:#cc0000">db</span>reaker'); $('#guess').html('guess'); $('#feedback').html(''); $('#history').html(''); $('#newgame').html(letters + ' letter word'); $('input[id="guessInput"]').attr("maxlength", letters); } function endGameDisplay() { $('#newgame').html('give me a word'); $('#newgame').css('box-shadow', '5px 5px 3px #888888'); $('#guess').css('box-shadow', 'none'); $('#guessForm').css('display', 'none'); $('#stats').css('display', 'block'); $('#guess').html('you won!'); $('#title').html('wor<span style="color:#248f24">db</span>reaker'); }
public/script.js
var userStats; var mode; var showingUserStats = false; var verySecureGlobalToken = false; var untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; //calling gameID(567) sets id to 567 and returns 567. subsequent calling gameID() returns 567. var gameID = function(set) { var id = set; return function(set) { if (set) return id = set; else return id; }; }(); //calling turns() increments it by one and returns it. calling turns(true) resets it to 0 and returns 0. var turns = function(reset) { var turns = 0; return function(reset) { if (reset) return turns = 0; else return turns++; } }(); $(function() { //opens rules dialog when #showRules button clicked $("#rulesDialog").dialog({ width: '75%', autoOpen: false }); $("#showRules").on("click", function() { $("#rulesDialog").dialog("open"); }); }); function showSignUp() { $('#signIn').css('display', 'none'); $('#signUp').css('display', 'block'); } function showSignIn() { $('#signUp').css('display', 'none'); $('#signIn').css('display', 'block'); } function loggedIn(user) { $('#signUp').css('display', 'none'); $('#signIn').css('display', 'none'); $('#loggedIn').css('display', 'block'); $('#loggedIn').html('logged in as ' + user); } $(document).ready(function() { //prevents default behavior of hitting enter from a form field, runs guess() instead $(document).on('keypress', 'form input[type="text"]', function(e) { if(e.keyCode == 13) { e.preventDefault(); guess(); return false; } }); }); $('#loggedIn').click(function() { $('#userStats').html('average time: ' + userStats.avgTime); $('#userStats').append('<br>average guesses: ' + userStats.avgGuesses); $('#userStats').append('<br>games played: ' + userStats.totalGames); if (!showingUserStats) { $('#userStatsContainer').css('display', 'block'); showingUserStats = true; } else { $('#userStatsContainer').css('display', 'none'); showingUserStats = false; } }); $('#hideUntried').click(function() { //hides/shows the list of yet to be guessed letters if ($('#hideUntried').html() == 'hide untried letters') { $('#untried').css('display', 'none'); $('#hideUntried').html('show untried letters'); } else { $('#untried').css('display', 'table'); $('#hideUntried').html('hide untried letters'); } }); function makeGame() { if ($('#newgame').html() == 'give me a word') { if ($("input[id='hard']:checked").val()) mode = 'hard'; if ($("input[id='normal']:checked").val()) mode = 'normal'; var options = { category: ($("select[id='category']").val()), letters: ($("select[id='letters']").val()), mode: mode, token: verySecureGlobalToken }; $.ajax({url: "new", method: "POST", data: options, success: function(result) { if(result.err) { alert('No word found.'); } else { gameID(result.id); turns(true); newGameDisplay(result.length); } }}); } } function signIn() { var signedIn = false; var name = $('input[id="user"]').val().toLowerCase(); var pass = $('input[id="pass"]').val(); if (name == '') $('#signInError').html('please enter a username'); else { if(pass == '') $('#signInError').html('please enter a password'); else { $.ajax({url: "signin", method: "GET", headers: {"Authorization": "Basic " + btoa(name + ":" + pass)}, success: function(result) { console.log('done'); if (result.info) console.log(result.info); if (result.msg) console.log(result.msg); if (result.token) { userStats = result.stats; verySecureGlobalToken = result.token; signedIn = true; loggedIn(name); } }}); if (!signedIn) $('#signInError').html('invalid login info'); } } } function signUp() { var name = $('input[id="newUser"]').val().toLowerCase(); var pass = $('input[id="newPass"]').val(); var repeat = $('input[id="repeatPass"]').val(); var confirmedPass = ''; if (name == '') $('#signUpError').html('please choose a username'); else { if (pass != repeat) $('#signUpError').html('password != repeated'); else { confirmedPass = pass; if (confirmedPass == '') $('#signUpError').html('please choose a password'); else { var newUserData = { username: name, password: confirmedPass }; $.ajax({url: "signup", method: "POST", data: newUserData, success: function(result) { if(result.info) console.log(result.info); if(result.msg) console.log(result.msg); if (result.token) { verySecureGlobalToken = result.token; loggedIn(name); } }}); } } } } function guess() { $('#hideUntried').css('display', 'block'); var input = $('input[id="guessInput"]').val(); for (i=0; i<input.length; i++) { removeLetter(input[i]); } $('input[id="guessInput"]').val(''); $('#guessInput').select(); $.ajax({url: gameID() +'/' + input, success: function(result) { if (result.gameOver) winner(result.arr, result.stats); else if (mode == 'hard') displayHard(result.arr); else displayFeedback(result.arr); displayUntried(); }}); } function displayFeedback(resArray) { displayHistory(); $('#feedback').html(''); for (i=0; i<resArray.length; i++) { if (resArray[i][1] == '2') $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); if (resArray[i][1] == '1') $('#feedback').append('<td class="half">' + resArray[i][0] + '</td>'); if (resArray[i][1] == '0') $('#feedback').append('<td class="none">' + resArray[i][0] + '</td>'); } } function displayHard(resArray) { displayHistory(); var hardArray = []; var lastGuess = ''; for (i=0; i<resArray.length; i++) { lastGuess += resArray[i][0]; hardArray.push(resArray[i][1]); } hardArray.sort(function(a, b){return b-a}); $('#feedback').html(''); $('#feedback').append('<td class="lastGuess">' + lastGuess + '</td>') for (i=0; i<hardArray.length; i++) { if (hardArray[i] == '2') $('#feedback').append('<td class="full"> </td>'); if (hardArray[i] == '1') $('#feedback').append('<td class="half"> </td>'); if (hardArray[i] == '0') $('#feedback').append('<td class="none"> </td>'); } } function winner(resArray, stats) { displayHistory(); $('#feedback').html(''); for (i=0; i<resArray.length; i++) { $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); } displayStats(stats); endGameDisplay(); } function displayStats(stats) { var yourTurns = turns(); if (stats.yourTime < stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#248f24">' + stats.yourTime); else if (stats.yourTime > stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#cc0000">' + stats.yourTime); else $('#stats').html('avg time: ' + stats.avgTime + ', your time: ' + stats.yourTime); if (yourTurns < stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#248f24">' + yourTurns); else if (yourTurns > stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#cc0000">' + yourTurns); else $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: ' + yourTurns); } function displayHistory() { var turnCount = '<td class="turns">' + turns() + '</td>'; var feedback = $('#feedback').html(); var history = turnCount + feedback; if (feedback !== '') $('#history').append('<tr>' + history + '</tr>'); } function displayUntried() { $('#untried').html('<tr>'); untriedLetters.forEach(function (val, index) { if ((index % 4) == 0) $('#untried').append('</tr> <tr>'); $('#untried').append('<td class="untried">' + val + '</td>'); }); $('#untried').append('</tr>'); } function removeLetter(letter) { var index = $.inArray(letter, untriedLetters); if (index !== -1) untriedLetters.splice(index, 1); } function resetUntried() { untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; } function newGameDisplay(letters) { resetUntried(); $('#guessForm').css('display', 'block'); $('#stats').css('display', 'none'); $('#newgame').css('box-shadow', 'none'); $('#guess').css('box-shadow', '5px 5px 3px #888888'); $('#hideUntried').css('display', 'none'); $('input[id="guessInput"]').val(''); $('#guessInput').select(); $('#untried').html(''); $('#title').html('wor<span style="color:#cc0000">db</span>reaker'); $('#guess').html('guess'); $('#feedback').html(''); $('#history').html(''); $('#newgame').html(letters + ' letter word'); $('input[id="guessInput"]').attr("maxlength", letters); } function endGameDisplay() { $('#newgame').html('give me a word'); $('#newgame').css('box-shadow', '5px 5px 3px #888888'); $('#guess').css('box-shadow', 'none'); $('#guessForm').css('display', 'none'); $('#stats').css('display', 'block'); $('#guess').html('you won!'); $('#title').html('wor<span style="color:#248f24">db</span>reaker'); }
Style fix
public/script.js
Style fix
<ide><path>ublic/script.js <ide> <ide> //calling gameID(567) sets id to 567 and returns 567. subsequent calling gameID() returns 567. <ide> var gameID = function(set) { <del> var id = set; <del> return function(set) { <del> if (set) return id = set; <del> else return id; <del> }; <add> var id = set; <add> return function(set) { <add> if (set) return id = set; <add> else return id; <add> }; <ide> }(); <ide> //calling turns() increments it by one and returns it. calling turns(true) resets it to 0 and returns 0. <ide> var turns = function(reset) { <del> var turns = 0; <del> return function(reset) { <del> if (reset) return turns = 0; <del> else return turns++; <del> } <add> var turns = 0; <add> return function(reset) { <add> if (reset) return turns = 0; <add> else return turns++; <add> } <ide> }(); <ide> <ide> $(function() { //opens rules dialog when #showRules button clicked <del> $("#rulesDialog").dialog({ <del> width: '75%', <del> autoOpen: false <del> }); <del> $("#showRules").on("click", function() { <del> $("#rulesDialog").dialog("open"); <del> }); <add> $("#rulesDialog").dialog({ <add> width: '75%', <add> autoOpen: false <add> }); <add> $("#showRules").on("click", function() { <add> $("#rulesDialog").dialog("open"); <add> }); <ide> }); <ide> <ide> function showSignUp() { <del> $('#signIn').css('display', 'none'); <del> $('#signUp').css('display', 'block'); <add> $('#signIn').css('display', 'none'); <add> $('#signUp').css('display', 'block'); <ide> } <ide> <ide> function showSignIn() { <del> $('#signUp').css('display', 'none'); <del> $('#signIn').css('display', 'block'); <add> $('#signUp').css('display', 'none'); <add> $('#signIn').css('display', 'block'); <ide> } <ide> <ide> function loggedIn(user) { <del> $('#signUp').css('display', 'none'); <del> $('#signIn').css('display', 'none'); <del> $('#loggedIn').css('display', 'block'); <del> $('#loggedIn').html('logged in as ' + user); <add> $('#signUp').css('display', 'none'); <add> $('#signIn').css('display', 'none'); <add> $('#loggedIn').css('display', 'block'); <add> $('#loggedIn').html('logged in as ' + user); <ide> } <ide> <ide> $(document).ready(function() { //prevents default behavior of hitting enter from a form field, runs guess() instead <del> $(document).on('keypress', 'form input[type="text"]', function(e) { <del> if(e.keyCode == 13) { <del> e.preventDefault(); <del> guess(); <del> return false; <del> } <del> }); <add> $(document).on('keypress', 'form input[type="text"]', function(e) { <add> if(e.keyCode == 13) { <add> e.preventDefault(); <add> guess(); <add> return false; <add> } <add> }); <ide> }); <ide> <ide> $('#loggedIn').click(function() { <del> $('#userStats').html('average time: ' + userStats.avgTime); <del> $('#userStats').append('<br>average guesses: ' + userStats.avgGuesses); <del> $('#userStats').append('<br>games played: ' + userStats.totalGames); <del> if (!showingUserStats) { <del> $('#userStatsContainer').css('display', 'block'); <del> showingUserStats = true; <del> } else { <del> $('#userStatsContainer').css('display', 'none'); <del> showingUserStats = false; <del> } <add> $('#userStats').html('average time: ' + userStats.avgTime); <add> $('#userStats').append('<br>average guesses: ' + userStats.avgGuesses); <add> $('#userStats').append('<br>games played: ' + userStats.totalGames); <add> if (!showingUserStats) { <add> $('#userStatsContainer').css('display', 'block'); <add> showingUserStats = true; <add> } else { <add> $('#userStatsContainer').css('display', 'none'); <add> showingUserStats = false; <add> } <ide> }); <ide> <ide> $('#hideUntried').click(function() { //hides/shows the list of yet to be guessed letters <del> if ($('#hideUntried').html() == 'hide untried letters') { <del> $('#untried').css('display', 'none'); <del> $('#hideUntried').html('show untried letters'); <del> } else { <del> $('#untried').css('display', 'table'); <del> $('#hideUntried').html('hide untried letters'); <del> } <add> if ($('#hideUntried').html() == 'hide untried letters') { <add> $('#untried').css('display', 'none'); <add> $('#hideUntried').html('show untried letters'); <add> } else { <add> $('#untried').css('display', 'table'); <add> $('#hideUntried').html('hide untried letters'); <add> } <ide> }); <ide> <ide> function makeGame() { <del> if ($('#newgame').html() == 'give me a word') { <del> if ($("input[id='hard']:checked").val()) mode = 'hard'; <del> if ($("input[id='normal']:checked").val()) mode = 'normal'; <del> var options = { <del> category: ($("select[id='category']").val()), <del> letters: ($("select[id='letters']").val()), <del> mode: mode, <del> token: verySecureGlobalToken <del> }; <add> if ($('#newgame').html() == 'give me a word') { <add> if ($("input[id='hard']:checked").val()) mode = 'hard'; <add> if ($("input[id='normal']:checked").val()) mode = 'normal'; <add> var options = { <add> category: ($("select[id='category']").val()), <add> letters: ($("select[id='letters']").val()), <add> mode: mode, <add> token: verySecureGlobalToken <add> }; <ide> <ide> $.ajax({url: "new", method: "POST", data: options, success: function(result) { <del> if(result.err) { <del> alert('No word found.'); <del> } else { <del> gameID(result.id); <del> turns(true); <del> newGameDisplay(result.length); <del> } <del> }}); <add> if(result.err) { <add> alert('No word found.'); <add> } else { <add> gameID(result.id); <add> turns(true); <add> newGameDisplay(result.length); <add> } <add> }}); <ide> } <ide> } <ide> <ide> function signIn() { <del> var signedIn = false; <del> var name = $('input[id="user"]').val().toLowerCase(); <del> var pass = $('input[id="pass"]').val(); <del> if (name == '') $('#signInError').html('please enter a username'); <del> else { <del> if(pass == '') $('#signInError').html('please enter a password'); <del> else { <del> $.ajax({url: "signin", method: "GET", headers: {"Authorization": "Basic " + btoa(name + ":" + pass)}, <del> success: function(result) { <del> console.log('done'); <del> if (result.info) console.log(result.info); <del> if (result.msg) console.log(result.msg); <del> if (result.token) { <del> userStats = result.stats; <del> verySecureGlobalToken = result.token; <del> signedIn = true; <del> loggedIn(name); <del> } <del> }}); <del> if (!signedIn) $('#signInError').html('invalid login info'); <del> } <del> } <add> var signedIn = false; <add> var name = $('input[id="user"]').val().toLowerCase(); <add> var pass = $('input[id="pass"]').val(); <add> if (name == '') $('#signInError').html('please enter a username'); <add> else { <add> if(pass == '') $('#signInError').html('please enter a password'); <add> else { <add> $.ajax({url: "signin", method: "GET", headers: {"Authorization": "Basic " + btoa(name + ":" + pass)}, <add> success: function(result) { <add> console.log('done'); <add> if (result.info) console.log(result.info); <add> if (result.msg) console.log(result.msg); <add> if (result.token) { <add> userStats = result.stats; <add> verySecureGlobalToken = result.token; <add> signedIn = true; <add> loggedIn(name); <add> } <add> }}); <add> if (!signedIn) $('#signInError').html('invalid login info'); <add> } <add> } <ide> } <ide> <ide> function signUp() { <del> var name = $('input[id="newUser"]').val().toLowerCase(); <del> var pass = $('input[id="newPass"]').val(); <del> var repeat = $('input[id="repeatPass"]').val(); <del> var confirmedPass = ''; <del> if (name == '') $('#signUpError').html('please choose a username'); <del> else { <del> if (pass != repeat) $('#signUpError').html('password != repeated'); <del> else { <del> confirmedPass = pass; <del> if (confirmedPass == '') $('#signUpError').html('please choose a password'); <del> else { <del> var newUserData = { <del> username: name, <del> password: confirmedPass <del> }; <del> $.ajax({url: "signup", method: "POST", data: newUserData, success: function(result) { <del> if(result.info) console.log(result.info); <del> if(result.msg) console.log(result.msg); <del> if (result.token) { <del> verySecureGlobalToken = result.token; <del> loggedIn(name); <del> } <del> }}); <del> } <del> } <del> } <add> var name = $('input[id="newUser"]').val().toLowerCase(); <add> var pass = $('input[id="newPass"]').val(); <add> var repeat = $('input[id="repeatPass"]').val(); <add> var confirmedPass = ''; <add> if (name == '') $('#signUpError').html('please choose a username'); <add> else { <add> if (pass != repeat) $('#signUpError').html('password != repeated'); <add> else { <add> confirmedPass = pass; <add> if (confirmedPass == '') $('#signUpError').html('please choose a password'); <add> else { <add> var newUserData = { <add> username: name, <add> password: confirmedPass <add> }; <add> $.ajax({url: "signup", method: "POST", data: newUserData, success: function(result) { <add> if(result.info) console.log(result.info); <add> if(result.msg) console.log(result.msg); <add> if (result.token) { <add> verySecureGlobalToken = result.token; <add> loggedIn(name); <add> } <add> }}); <add> } <add> } <add> } <ide> } <ide> <ide> function guess() { <del> $('#hideUntried').css('display', 'block'); <del> var input = $('input[id="guessInput"]').val(); <del> for (i=0; i<input.length; i++) { <del> removeLetter(input[i]); <del> } <del> $('input[id="guessInput"]').val(''); <del> $('#guessInput').select(); <del> $.ajax({url: gameID() +'/' + input, success: function(result) { <del> if (result.gameOver) winner(result.arr, result.stats); <del> else if (mode == 'hard') displayHard(result.arr); <del> else displayFeedback(result.arr); <del> displayUntried(); <del> }}); <add> $('#hideUntried').css('display', 'block'); <add> var input = $('input[id="guessInput"]').val(); <add> for (i=0; i<input.length; i++) { <add> removeLetter(input[i]); <add> } <add> $('input[id="guessInput"]').val(''); <add> $('#guessInput').select(); <add> $.ajax({url: gameID() +'/' + input, success: function(result) { <add> if (result.gameOver) winner(result.arr, result.stats); <add> else if (mode == 'hard') displayHard(result.arr); <add> else displayFeedback(result.arr); <add> displayUntried(); <add> }}); <ide> } <ide> <ide> function displayFeedback(resArray) { <del> displayHistory(); <del> $('#feedback').html(''); <del> for (i=0; i<resArray.length; i++) { <del> if (resArray[i][1] == '2') $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); <del> if (resArray[i][1] == '1') $('#feedback').append('<td class="half">' + resArray[i][0] + '</td>'); <del> if (resArray[i][1] == '0') $('#feedback').append('<td class="none">' + resArray[i][0] + '</td>'); <del> } <add> displayHistory(); <add> $('#feedback').html(''); <add> for (i=0; i<resArray.length; i++) { <add> if (resArray[i][1] == '2') $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); <add> if (resArray[i][1] == '1') $('#feedback').append('<td class="half">' + resArray[i][0] + '</td>'); <add> if (resArray[i][1] == '0') $('#feedback').append('<td class="none">' + resArray[i][0] + '</td>'); <add> } <ide> } <ide> <ide> function displayHard(resArray) { <del> displayHistory(); <del> var hardArray = []; <del> var lastGuess = ''; <del> for (i=0; i<resArray.length; i++) { <del> lastGuess += resArray[i][0]; <del> hardArray.push(resArray[i][1]); <del> } <del> hardArray.sort(function(a, b){return b-a}); <del> $('#feedback').html(''); <del> $('#feedback').append('<td class="lastGuess">' + lastGuess + '</td>') <del> for (i=0; i<hardArray.length; i++) { <del> if (hardArray[i] == '2') $('#feedback').append('<td class="full"> </td>'); <del> if (hardArray[i] == '1') $('#feedback').append('<td class="half"> </td>'); <del> if (hardArray[i] == '0') $('#feedback').append('<td class="none"> </td>'); <del> } <add> displayHistory(); <add> var hardArray = []; <add> var lastGuess = ''; <add> for (i=0; i<resArray.length; i++) { <add> lastGuess += resArray[i][0]; <add> hardArray.push(resArray[i][1]); <add> } <add> hardArray.sort(function(a, b){return b-a}); <add> $('#feedback').html(''); <add> $('#feedback').append('<td class="lastGuess">' + lastGuess + '</td>') <add> for (i=0; i<hardArray.length; i++) { <add> if (hardArray[i] == '2') $('#feedback').append('<td class="full"> </td>'); <add> if (hardArray[i] == '1') $('#feedback').append('<td class="half"> </td>'); <add> if (hardArray[i] == '0') $('#feedback').append('<td class="none"> </td>'); <add> } <ide> } <ide> <ide> function winner(resArray, stats) { <del> displayHistory(); <del> $('#feedback').html(''); <del> for (i=0; i<resArray.length; i++) { <del> $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); <del> } <del> displayStats(stats); <del> endGameDisplay(); <add> displayHistory(); <add> $('#feedback').html(''); <add> for (i=0; i<resArray.length; i++) { <add> $('#feedback').append('<td class="full">' + resArray[i][0] + '</td>'); <add> } <add> displayStats(stats); <add> endGameDisplay(); <ide> } <ide> <ide> function displayStats(stats) { <del> var yourTurns = turns(); <del> if (stats.yourTime < stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#248f24">' + stats.yourTime); <del> else if (stats.yourTime > stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#cc0000">' + stats.yourTime); <del> else $('#stats').html('avg time: ' + stats.avgTime + ', your time: ' + stats.yourTime); <del> if (yourTurns < stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#248f24">' + yourTurns); <del> else if (yourTurns > stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#cc0000">' + yourTurns); <del> else $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: ' + yourTurns); <add> var yourTurns = turns(); <add> if (stats.yourTime < stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#248f24">' + stats.yourTime); <add> else if (stats.yourTime > stats.avgTime) $('#stats').html('avg time: ' + stats.avgTime + ', your time: <span style="color:#cc0000">' + stats.yourTime); <add> else $('#stats').html('avg time: ' + stats.avgTime + ', your time: ' + stats.yourTime); <add> if (yourTurns < stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#248f24">' + yourTurns); <add> else if (yourTurns > stats.avgGuesses) $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: <span style="color:#cc0000">' + yourTurns); <add> else $('#stats').append('<br>avg guesses: ' + stats.avgGuesses + ', your guesses: ' + yourTurns); <ide> } <ide> <ide> function displayHistory() { <del> var turnCount = '<td class="turns">' + turns() + '</td>'; <del> var feedback = $('#feedback').html(); <del> var history = turnCount + feedback; <del> if (feedback !== '') $('#history').append('<tr>' + history + '</tr>'); <add> var turnCount = '<td class="turns">' + turns() + '</td>'; <add> var feedback = $('#feedback').html(); <add> var history = turnCount + feedback; <add> if (feedback !== '') $('#history').append('<tr>' + history + '</tr>'); <ide> } <ide> <ide> function displayUntried() { <del> $('#untried').html('<tr>'); <del> untriedLetters.forEach(function (val, index) { <del> if ((index % 4) == 0) $('#untried').append('</tr> <tr>'); <del> $('#untried').append('<td class="untried">' + val + '</td>'); <del> }); <del> $('#untried').append('</tr>'); <add> $('#untried').html('<tr>'); <add> untriedLetters.forEach(function (val, index) { <add> if ((index % 4) == 0) $('#untried').append('</tr> <tr>'); <add> $('#untried').append('<td class="untried">' + val + '</td>'); <add> }); <add> $('#untried').append('</tr>'); <ide> } <ide> <ide> function removeLetter(letter) { <del> var index = $.inArray(letter, untriedLetters); <del> if (index !== -1) untriedLetters.splice(index, 1); <add> var index = $.inArray(letter, untriedLetters); <add> if (index !== -1) untriedLetters.splice(index, 1); <ide> } <ide> <ide> function resetUntried() { <del> untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; <add> untriedLetters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']; <ide> } <ide> <ide> function newGameDisplay(letters) { <del> resetUntried(); <del> $('#guessForm').css('display', 'block'); <del> $('#stats').css('display', 'none'); <del> $('#newgame').css('box-shadow', 'none'); <del> $('#guess').css('box-shadow', '5px 5px 3px #888888'); <del> $('#hideUntried').css('display', 'none'); <del> $('input[id="guessInput"]').val(''); <del> $('#guessInput').select(); <del> $('#untried').html(''); <del> $('#title').html('wor<span style="color:#cc0000">db</span>reaker'); <del> $('#guess').html('guess'); <del> $('#feedback').html(''); <del> $('#history').html(''); <del> $('#newgame').html(letters + ' letter word'); <del> $('input[id="guessInput"]').attr("maxlength", letters); <add> resetUntried(); <add> $('#guessForm').css('display', 'block'); <add> $('#stats').css('display', 'none'); <add> $('#newgame').css('box-shadow', 'none'); <add> $('#guess').css('box-shadow', '5px 5px 3px #888888'); <add> $('#hideUntried').css('display', 'none'); <add> $('input[id="guessInput"]').val(''); <add> $('#guessInput').select(); <add> $('#untried').html(''); <add> $('#title').html('wor<span style="color:#cc0000">db</span>reaker'); <add> $('#guess').html('guess'); <add> $('#feedback').html(''); <add> $('#history').html(''); <add> $('#newgame').html(letters + ' letter word'); <add> $('input[id="guessInput"]').attr("maxlength", letters); <ide> } <ide> <ide> function endGameDisplay() { <del> $('#newgame').html('give me a word'); <del> $('#newgame').css('box-shadow', '5px 5px 3px #888888'); <del> $('#guess').css('box-shadow', 'none'); <del> $('#guessForm').css('display', 'none'); <del> $('#stats').css('display', 'block'); <del> $('#guess').html('you won!'); <del> $('#title').html('wor<span style="color:#248f24">db</span>reaker'); <del>} <add> $('#newgame').html('give me a word'); <add> $('#newgame').css('box-shadow', '5px 5px 3px #888888'); <add> $('#guess').css('box-shadow', 'none'); <add> $('#guessForm').css('display', 'none'); <add> $('#stats').css('display', 'block'); <add> $('#guess').html('you won!'); <add> $('#title').html('wor<span style="color:#248f24">db</span>reaker'); <add>}
JavaScript
mit
1088695f2fe91a4b9ca08fe40aaed131aef97d46
0
glistening-gibus/hackifieds,Hackifieds/hackifieds,glistening-gibus/hackifieds,Hackifieds/hackifieds,glistening-gibus/hackifieds
// To run seed.js // Prereq: must clear categories table before running to avoid duplicate categories (same can be applied to users and listings table if your database already has the seed data) // In terminal run: node seed.js var data = require('./data'); var db = require('./db'); // iterate through data.json data.forEach(function(table) { // iterate through categories and insert into categories table if (table.hasOwnProperty('categories')) { table.categories.forEach(function(entry) { db.Category.create(entry) .then(function(category) { console.log('Created category: ', category.dataValues.categoryName); }) .catch(function(error) { console.error(error); }); }); // iterate through users and insert into users table } else if (table.hasOwnProperty('users')) { table.users.forEach(function(entry) { db.User.create(entry) .then(function(user) { console.log('Created user: ', user.dataValues.username); }) .catch(function(error) { console.error(error); }); }); // iterate through listings and insert into listings table } else if (table.hasOwnProperty('listings')) { table.listings.forEach(function(entry) { db.Listing.create(entry) .then(function(listing) { console.log('Created listing: ', listing.dataValues.title); }) .catch(function(error) { console.error(error); }); }); } });
db/seed.js
var data = require('./data'); var db = require('./db'); data.forEach(function(table) { if (table.hasOwnProperty('categories')) { table.categories.forEach(function(entry) { db.Category.create(entry) .then(function(category) { console.log('Created category: ', category.dataValues.categoryName); }) .catch(function(error) { console.error(error); }); }); } else if (table.hasOwnProperty('users')) { table.users.forEach(function(entry) { db.User.create(entry) .then(function(user) { console.log('Created user: ', user.dataValues.username); }) .catch(function(error) { console.error(error); }); }); } else if (table.hasOwnProperty('listings')) { table.listings.forEach(function(entry) { db.Listing.create(entry) .then(function(listing) { console.log('Created listing: ', listing.dataValues.title); }) .catch(function(error) { console.error(error); }); }); } });
Add comments to seed.js
db/seed.js
Add comments to seed.js
<ide><path>b/seed.js <add>// To run seed.js <add>// Prereq: must clear categories table before running to avoid duplicate categories (same can be applied to users and listings table if your database already has the seed data) <add>// In terminal run: node seed.js <add> <ide> var data = require('./data'); <ide> var db = require('./db'); <ide> <add>// iterate through data.json <ide> data.forEach(function(table) { <add> // iterate through categories and insert into categories table <ide> if (table.hasOwnProperty('categories')) { <ide> table.categories.forEach(function(entry) { <ide> db.Category.create(entry) <ide> console.error(error); <ide> }); <ide> }); <add> // iterate through users and insert into users table <ide> } else if (table.hasOwnProperty('users')) { <ide> table.users.forEach(function(entry) { <ide> db.User.create(entry) <ide> console.error(error); <ide> }); <ide> }); <add> // iterate through listings and insert into listings table <ide> } else if (table.hasOwnProperty('listings')) { <ide> table.listings.forEach(function(entry) { <ide> db.Listing.create(entry)
JavaScript
mit
7f4dea1ea88396500c14abcdf5c4c22a8953da46
0
marcuswestin/std.js,ASAPPinc/std.js
var identity = require('std/identity') module.exports = function sum(list, fn) { if (!list) { return 0 } if (!fn) { fn = identity } var total = 0 each(list, function(val, key) { total += fn(val, key) }) return total }
sum.js
module.exports = function sum(list, fn) { if (!list) { return 0 } var total = 0 for (var i=0; i<list.length; i++) { total += fn(list[i]) } return total }
make identity the default sum function, to easily sum up an array of numbers
sum.js
make identity the default sum function, to easily sum up an array of numbers
<ide><path>um.js <add>var identity = require('std/identity') <add> <ide> module.exports = function sum(list, fn) { <ide> if (!list) { return 0 } <add> if (!fn) { fn = identity } <ide> var total = 0 <del> for (var i=0; i<list.length; i++) { <del> total += fn(list[i]) <del> } <add> each(list, function(val, key) { <add> total += fn(val, key) <add> }) <ide> return total <ide> }
Java
mit
cc4814646c6643b0dac10306b00a35f7281ecfc5
0
MrMilan/DastaMessageCreator
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package dastamessageCreator; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import mzcr.cz.ns.dasta.ds4.ds_cistype.*; import mzcr.cz.ns.dasta.ds4.ds_dasta.*; import mzcr.cz.ns.dasta.ds4.ds_ip.*; import mzcr.cz.ns.dasta.ds4.ds_ip.DgzType.Diag; import mzcr.cz.ns.dasta.ds4.ds_type.*; /*import pro Xdom*/ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.w3c.dom.Element; import java.io.File; import java.io.IOException; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException; /** * * @author User */ public class DastaMessageJava { /** * @param args the command line arguments */ public static void main(String[] args) throws JAXBException { /* * Vytvoreni fantory pro jednotlive balicky */ mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory factoryDasta = new mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory(); mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory factoryDsType = new mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory(); mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory factoryDsIp = new mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory(); /*Nacteni xml*/ /*C:\\Documents and Settings\\Milan\\Dokumenty\\škola\\FBMI\\3rocnik\\17bieth\\Programs\\DastaMessageCreator\\zprava2.xml*/ File fXmlFile; JAXBContext context = JAXBContext.newInstance(Dasta.class); Unmarshaller un = context.createUnmarshaller(); Dasta dastaInput = factoryDasta.createDasta(); try { fXmlFile = new File(".\\zprava2.xml"); dastaInput = (Dasta) un.unmarshal(fXmlFile); } catch (Exception e) { e.printStackTrace(); } String verzeDs = dastaInput.getVerzeDs(); System.out.println(verzeDs); /** * Zakladni hlavicka */ Dasta dasta = factoryDasta.createDasta(); dasta.setVerzeDs( "04.06.08"); dasta.setIdSoubor( "MEDICALC_KK11115_2005-12-12T14:46:25"); dasta.setBinPriloha( "T"); dasta.setUr( "T"); dasta.setTypOdesm( "KK"); /** * Informace o programu, ktery generoval zpravu */ ZdrojIsType zdrojIsType = factoryDasta.createZdrojIsType(); zdrojIsType.setKodFirmy( "MEDICALC"); zdrojIsType.setKodProg( "WMEXP"); zdrojIsType.setVerzeProg( "2.2.3.8"); /** * Poskytovatel dat */ PmType pm = factoryDasta.createPmType(); AsType as1 = factoryDasta.createAsType(); as1.setTyp( "I"); as1.setVnitrni( "999"); pm.setAs(as1); /** * Informace o garantovi dat */ GarantDatType garantDat = factoryDsType.createGarantDatType(); garantDat.setIdGarant( "450124145"); garantDat.setOdbornost( "801"); garantDat.setValue( "MUDr. Jmeno Prijmeni"); /** * Informace o zdravotnickem zarizeni */ IsType is = factoryDasta.createIsType(); is.setIco( "12345678"); is.setIcz( "44101000"); is.setIcp( "44101882"); AsType as2 = factoryDasta.createAsType(); as2.setVnitrni( "801"); is.setAs(as2); /** * Pacientska cast */ Ip ip = factoryDsIp.createIp(); ip.setIdPac( "7601019998"); ip.setRodcis( "7601019998"); ip.setJmeno( "Jmeno"); ip.setPrijmeni( "Prijmeni"); DatXxType datDn = new DatXxType(); datDn.setFormat( "D"); datDn.setValue( "1976-01-01"); ip.setDatDn(datDn); ip.setSex(SexType.M); KuZType kuZType = factoryDsIp.createKuZType(); kuZType.setTypku( "AMBUL"); kuZType.setFazespec( "ZF"); kuZType.setIdku( "MEDICALC.FNPL.1234567890"); DatXxType datProv = new DatXxType(); DatXxType datVydani = new DatXxType(); datProv.setValue( "2006-12-03T11:00:00"); datVydani.setValue( "2006-11-03T11:00:00"); kuZType.setDatProv(datProv); kuZType.setDatVydani(datVydani); /* * Definice pracoviste, kde probehlo osetreni */ PracovisteType pPracoviste = factoryDsIp.createPracovisteType(); pPracoviste.setIcz( "44101000"); pPracoviste.setIcp( "44101882"); pPracoviste.setOdb( "501"); pPracoviste.setNazev( "Chirurgická ambulance"); kuZType.setPPracoviste(pPracoviste); TextType textType = factoryDsIp.createTextType(); Ptext pText = factoryDsIp.createPtext(); pText.setSpace( "preserve"); pText.setValue( "Při fotbale si podvrkl pravé koleno. O koleno s náplní, " + "čéška nebolestivá, bolestivá flexe kolena koleno špatně " + "vyšetřitelné, zásuvka negativní RTG bez traumatu Dg.Distorsio geni l.dx" + "Th provedena punkce kolenního kloubu.vypunktováno 60 ml krve " + "bez tukových kapének, naložena ortesa Dop klidový režim kontrola zde za 2 dny dop"); textType.setPtext(pText); kuZType.setText(textType); //Definice diagnozy DgzType dgzType = factoryDsIp.createDgzType(); Diag diag = new Diag(); diag.setValue( "I158"); diag.setPoradi( 1); dgzType.setDiag(diag); dgzType.setTypDg( "P"); dgzType.setIndOpravSd( "N"); List<DgzType> dgzTypes = kuZType.getDgVys(); dgzTypes.add(dgzType); AuzkuType auzkuType = factoryDsIp.createAuzkuType(); auzkuType.setIndikace( "I"); auzkuType.setTyppolVz( "V"); auzkuType.setDiag( "I158"); DatDuType datDu = factoryDsType.createDatDuType(); datDu.setValue( "2006-12-03T11:00:00"); datDu.setTyp(LVTZDCUN.A); auzkuType.setDatDu(datDu); List<AuzkuType> auzkuTypes = kuZType.getAuzku(); auzkuTypes.add(auzkuType); /** * Nyni je potreba navazat jednotlive bloky na hlavni tag dasta */ dasta.setZdrojIs(zdrojIsType); // pridani zdroje dasta.setPm(pm); // pridani prijmoveho mista dasta.setGarantDat(garantDat); // pridani garanta /** * Blok ku muze obsahovat vetsi pocet klinickych udalosti * (reprezentovnych kuZType), proto je potreba tento blok definovat v * poli (rep. ArrayListu) */ ArrayList<IsType> isTypes = new ArrayList<IsType>(); Ip.Ku ipKu = factoryDsIp.createIpKu(); // trida Ku je soucasti tridy Ip List<KuZType> kuZTypes = ipKu.getKuZ(); // Je vyvoren seznam klinickych udalosti, ktery je soucasti Ip.Ku kuZTypes.add(kuZType); // prida se vytvorena klinicka udalost ip.setKu(ipKu); // klinicke udalosti se pridaji jako blok k bloku pacienta is.setIp(ip); // krok pridani bloku pacienta do bloku odesilatele isTypes.add(is); // Pro pripad, ze by zprava obsahovale vice udalosti je halvni blok is zarazen do seznamu dasta.setIs(isTypes); // prirazeni cele zpravy do obalu bloku datsa /** * Vytvori xml z hlavniho objektu dasta * * JAXBContext context = JAXBContext.newInstance(Dasta.class); * JAXBElement<Dasta> element = factoryDasta.createExpenseReport(dasta); * Marshaller marshaller = context.createMarshaller(); * marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); /** * Namapuje elementy jednotlivych namespace na nami pozadovane nazvy * * marshaller.setProperty("com.sun.xml.internal.bind.namespacePrefixMapper", * new NamespacePrefixMapper() { * * @Override public String[] getPreDeclaredNamespaceUris() { return * null; //new String[]{WellKnownNamespace.XML_SCHEMA_INSTANCE}; } * * @Override public String getPreferredPrefix(String namespaceUri, * String suggestion, boolean requirePrefix) { if * (namespaceUri.equals("urn:cz-mzcr:ns:dasta:ds4:ds_dasta")) { return * "ds"; } if (namespaceUri.equals("urn:cz-mzcr:ns:dasta:ds4:ds_ip")) { * return "dsip"; } return suggestion; * * } * }); * * /** * Vypis xml zpavy * * marshaller.marshal(element, System.out); */ } }
src/dastamessageCreator/DastaMessageJava.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package dastamessageCreator; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import mzcr.cz.ns.dasta.ds4.ds_cistype.*; import mzcr.cz.ns.dasta.ds4.ds_dasta.*; import mzcr.cz.ns.dasta.ds4.ds_ip.*; import mzcr.cz.ns.dasta.ds4.ds_ip.DgzType.Diag; import mzcr.cz.ns.dasta.ds4.ds_type.*; /*import pro Xdom*/ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import org.w3c.dom.Document; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.w3c.dom.Element; import java.io.File; import java.io.IOException; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException; /** * * @author User */ public class DastaMessageJava { /** * @param args the command line arguments */ public static void main(String[] args) throws JAXBException, ParserConfigurationException, SAXException, IOException { /*Nacteni java DOM by Mkyong tutorial*/ /*C:/Documents and Settings/Milan/Dokumenty/škola/FBMI/3rocnik/17bieth/Programs/DastaMessageCreator/zprava2.xml*/ DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder; File fXmlFile; Document doc; try { fXmlFile = new File("C:/Documents and Settings/Milan/Dokumenty/škola/FBMI/3rocnik/17bieth/Programs/DastaMessageCreator/zprava2.xml"); dBuilder = dbFactory.newDocumentBuilder(); doc = dBuilder.parse(fXmlFile); } catch (Exception e) { e.printStackTrace(); } /* * Vytvoreni fantory pro jednotlive balicky */ mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory factoryDasta = new mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory(); mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory factoryDsType = new mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory(); mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory factoryDsIp = new mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory(); /** * Zakladni hlavicka */ Dasta dasta = factoryDasta.createDasta(); dasta.setVerzeDs( "04.06.08"); dasta.setIdSoubor( "MEDICALC_KK11115_2005-12-12T14:46:25"); dasta.setBinPriloha( "T"); dasta.setUr( "T"); dasta.setTypOdesm( "KK"); /** * Informace o programu, ktery generoval zpravu */ ZdrojIsType zdrojIsType = factoryDasta.createZdrojIsType(); zdrojIsType.setKodFirmy( "MEDICALC"); zdrojIsType.setKodProg( "WMEXP"); zdrojIsType.setVerzeProg( "2.2.3.8"); /** * Poskytovatel dat */ PmType pm = factoryDasta.createPmType(); AsType as1 = factoryDasta.createAsType(); as1.setTyp( "I"); as1.setVnitrni( "999"); pm.setAs(as1); /** * Informace o garantovi dat */ GarantDatType garantDat = factoryDsType.createGarantDatType(); garantDat.setIdGarant( "450124145"); garantDat.setOdbornost( "801"); garantDat.setValue( "MUDr. Jmeno Prijmeni"); /** * Informace o zdravotnickem zarizeni */ IsType is = factoryDasta.createIsType(); is.setIco( "12345678"); is.setIcz( "44101000"); is.setIcp( "44101882"); AsType as2 = factoryDasta.createAsType(); as2.setVnitrni( "801"); is.setAs(as2); /** * Pacientska cast */ Ip ip = factoryDsIp.createIp(); ip.setIdPac( "7601019998"); ip.setRodcis( "7601019998"); ip.setJmeno( "Jmeno"); ip.setPrijmeni( "Prijmeni"); DatXxType datDn = new DatXxType(); datDn.setFormat( "D"); datDn.setValue( "1976-01-01"); ip.setDatDn(datDn); ip.setSex(SexType.M); KuZType kuZType = factoryDsIp.createKuZType(); kuZType.setTypku( "AMBUL"); kuZType.setFazespec( "ZF"); kuZType.setIdku( "MEDICALC.FNPL.1234567890"); DatXxType datProv = new DatXxType(); DatXxType datVydani = new DatXxType(); datProv.setValue( "2006-12-03T11:00:00"); datVydani.setValue( "2006-11-03T11:00:00"); kuZType.setDatProv(datProv); kuZType.setDatVydani(datVydani); /* * Definice pracoviste, kde probehlo osetreni */ PracovisteType pPracoviste = factoryDsIp.createPracovisteType(); pPracoviste.setIcz( "44101000"); pPracoviste.setIcp( "44101882"); pPracoviste.setOdb( "501"); pPracoviste.setNazev( "Chirurgická ambulance"); kuZType.setPPracoviste(pPracoviste); TextType textType = factoryDsIp.createTextType(); Ptext pText = factoryDsIp.createPtext(); pText.setSpace( "preserve"); pText.setValue( "Při fotbale si podvrkl pravé koleno. O koleno s náplní, " + "čéška nebolestivá, bolestivá flexe kolena koleno špatně " + "vyšetřitelné, zásuvka negativní RTG bez traumatu Dg.Distorsio geni l.dx" + "Th provedena punkce kolenního kloubu.vypunktováno 60 ml krve " + "bez tukových kapének, naložena ortesa Dop klidový režim kontrola zde za 2 dny dop"); textType.setPtext(pText); kuZType.setText(textType); //Definice diagnozy DgzType dgzType = factoryDsIp.createDgzType(); Diag diag = new Diag(); diag.setValue( "I158"); diag.setPoradi( 1); dgzType.setDiag(diag); dgzType.setTypDg( "P"); dgzType.setIndOpravSd( "N"); List<DgzType> dgzTypes = kuZType.getDgVys(); dgzTypes.add(dgzType); AuzkuType auzkuType = factoryDsIp.createAuzkuType(); auzkuType.setIndikace( "I"); auzkuType.setTyppolVz( "V"); auzkuType.setDiag( "I158"); DatDuType datDu = factoryDsType.createDatDuType(); datDu.setValue( "2006-12-03T11:00:00"); datDu.setTyp(LVTZDCUN.A); auzkuType.setDatDu(datDu); List<AuzkuType> auzkuTypes = kuZType.getAuzku(); auzkuTypes.add(auzkuType); /** * Nyni je potreba navazat jednotlive bloky na hlavni tag dasta */ dasta.setZdrojIs(zdrojIsType); // pridani zdroje dasta.setPm(pm); // pridani prijmoveho mista dasta.setGarantDat(garantDat); // pridani garanta /** * Blok ku muze obsahovat vetsi pocet klinickych udalosti * (reprezentovnych kuZType), proto je potreba tento blok definovat v * poli (rep. ArrayListu) */ ArrayList<IsType> isTypes = new ArrayList<IsType>(); Ip.Ku ipKu = factoryDsIp.createIpKu(); // trida Ku je soucasti tridy Ip List<KuZType> kuZTypes = ipKu.getKuZ(); // Je vyvoren seznam klinickych udalosti, ktery je soucasti Ip.Ku kuZTypes.add(kuZType); // prida se vytvorena klinicka udalost ip.setKu(ipKu); // klinicke udalosti se pridaji jako blok k bloku pacienta is.setIp(ip); // krok pridani bloku pacienta do bloku odesilatele isTypes.add(is); // Pro pripad, ze by zprava obsahovale vice udalosti je halvni blok is zarazen do seznamu dasta.setIs(isTypes); // prirazeni cele zpravy do obalu bloku datsa /** * Vytvori xml z hlavniho objektu dasta * * JAXBContext context = JAXBContext.newInstance(Dasta.class); * JAXBElement<Dasta> element = factoryDasta.createExpenseReport(dasta); * Marshaller marshaller = context.createMarshaller(); * marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true); /** * Namapuje elementy jednotlivych namespace na nami pozadovane nazvy * * marshaller.setProperty("com.sun.xml.internal.bind.namespacePrefixMapper", * new NamespacePrefixMapper() { * * @Override public String[] getPreDeclaredNamespaceUris() { return * null; //new String[]{WellKnownNamespace.XML_SCHEMA_INSTANCE}; } * * @Override public String getPreferredPrefix(String namespaceUri, * String suggestion, boolean requirePrefix) { if * (namespaceUri.equals("urn:cz-mzcr:ns:dasta:ds4:ds_dasta")) { return * "ds"; } if (namespaceUri.equals("urn:cz-mzcr:ns:dasta:ds4:ds_ip")) { * return "dsip"; } return suggestion; * * } * }); * * /** * Vypis xml zpavy * * marshaller.marshal(element, System.out); */ } }
Pridan Marsharel
src/dastamessageCreator/DastaMessageJava.java
Pridan Marsharel
<ide><path>rc/dastamessageCreator/DastaMessageJava.java <ide> import org.w3c.dom.Element; <ide> import java.io.File; <ide> import java.io.IOException; <add>import javax.xml.bind.Unmarshaller; <ide> import javax.xml.parsers.ParserConfigurationException; <ide> import org.xml.sax.SAXException; <ide> <ide> /** <ide> * @param args the command line arguments <ide> */ <del> public static void main(String[] args) throws JAXBException, ParserConfigurationException, SAXException, IOException { <del> <del> /*Nacteni java DOM by Mkyong tutorial*/ <del> /*C:/Documents and Settings/Milan/Dokumenty/škola/FBMI/3rocnik/17bieth/Programs/DastaMessageCreator/zprava2.xml*/ <del> DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); <del> DocumentBuilder dBuilder; <add> public static void main(String[] args) throws JAXBException { <add> <add> /* <add> * Vytvoreni fantory pro jednotlive balicky <add> */ <add> mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory factoryDasta = new mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory(); <add> mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory factoryDsType = new mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory(); <add> mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory factoryDsIp = new mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory(); <add> <add> /*Nacteni xml*/ <add> /*C:\\Documents and Settings\\Milan\\Dokumenty\\škola\\FBMI\\3rocnik\\17bieth\\Programs\\DastaMessageCreator\\zprava2.xml*/ <ide> File fXmlFile; <del> Document doc; <add> JAXBContext context = JAXBContext.newInstance(Dasta.class); <add> Unmarshaller un = context.createUnmarshaller(); <add> Dasta dastaInput = factoryDasta.createDasta(); <ide> try { <del> fXmlFile = new File("C:/Documents and Settings/Milan/Dokumenty/škola/FBMI/3rocnik/17bieth/Programs/DastaMessageCreator/zprava2.xml"); <del> dBuilder = dbFactory.newDocumentBuilder(); <del> doc = dBuilder.parse(fXmlFile); <add> fXmlFile = new File(".\\zprava2.xml"); <add> dastaInput = (Dasta) un.unmarshal(fXmlFile); <ide> } catch (Exception e) { <ide> e.printStackTrace(); <ide> } <del> /* <del> * Vytvoreni fantory pro jednotlive balicky <del> */ <del> mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory factoryDasta = new mzcr.cz.ns.dasta.ds4.ds_dasta.ObjectFactory(); <del> mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory factoryDsType = new mzcr.cz.ns.dasta.ds4.ds_type.ObjectFactory(); <del> mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory factoryDsIp = new mzcr.cz.ns.dasta.ds4.ds_ip.ObjectFactory(); <add> String verzeDs = dastaInput.getVerzeDs(); <add> System.out.println(verzeDs); <add> <add> <ide> <ide> /** <ide> * Zakladni hlavicka
Java
lgpl-2.1
902c8c38081babe9574d4a199c68abc77aee7b9c
0
sbonoc/opencms-core,serrapos/opencms-core,sbonoc/opencms-core,alkacon/opencms-core,alkacon/opencms-core,alkacon/opencms-core,serrapos/opencms-core,serrapos/opencms-core,gallardo/opencms-core,victos/opencms-core,serrapos/opencms-core,MenZil/opencms-core,serrapos/opencms-core,MenZil/opencms-core,alkacon/opencms-core,ggiudetti/opencms-core,ggiudetti/opencms-core,ggiudetti/opencms-core,MenZil/opencms-core,mediaworx/opencms-core,mediaworx/opencms-core,it-tavis/opencms-core,serrapos/opencms-core,mediaworx/opencms-core,MenZil/opencms-core,it-tavis/opencms-core,serrapos/opencms-core,gallardo/opencms-core,gallardo/opencms-core,ggiudetti/opencms-core,victos/opencms-core,gallardo/opencms-core,victos/opencms-core,sbonoc/opencms-core,sbonoc/opencms-core,victos/opencms-core,mediaworx/opencms-core,it-tavis/opencms-core,it-tavis/opencms-core
package com.opencms.file; import java.util.*; import javax.servlet.http.*; import com.opencms.core.*; /** * This class describes a resource broker for user and groups in the Cms.<BR/> * <B>All</B> Methods get a first parameter: A_CmsUser. It is the current user. This * is for security-reasons, to check if this current user has the rights to call the * method.<BR/> * * This class has package visibility for security reasons. * * @author Michael Emmerich * @version $Revision: 1.11 $ $Date: 2000/01/04 17:06:05 $ */ class CmsRbUserGroup implements I_CmsRbUserGroup, I_CmsConstants { /** * The user/group access object which is required to access the * user and group databases. */ private I_CmsAccessUserGroup m_accessUserGroup; /** * Constructor, creates a new Cms User & Group Resource Broker. * * @param accessUserGroup The user/group access object. */ public CmsRbUserGroup(I_CmsAccessUserGroup accessUserGroup) { m_accessUserGroup=accessUserGroup; } /** * Returns a user object.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The name of the user that is to be read. * @return User * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsUser readUser(String username) throws CmsException { A_CmsUser user=null; user=m_accessUserGroup.readUser(username); return user; } /** * Returns a user object if the password for the user is correct.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The username of the user that is to be read. * @param password The password of the user that is to be read. * @return User * * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsUser readUser(String username, String password) throws CmsException { A_CmsUser user=null; user=m_accessUserGroup.readUser(username,password); return user; } /** * Returns a list of groups of a user.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The name of the user. * @return Vector of groups * @exception CmsException Throws CmsException if operation was not succesful */ public Vector getGroupsOfUser(String username) throws CmsException { Vector allGroups; Vector subGroups; A_CmsGroup group; // get all groups of the user Vector groups=m_accessUserGroup.getGroupsOfUser(username); allGroups=groups; // now get all childs of the groups Enumeration enu = groups.elements(); while (enu.hasMoreElements()) { group=(A_CmsGroup)enu.nextElement(); subGroups=getChilds(group.getName()); //add the subchilds to the already existing groups Enumeration enusub=subGroups.elements(); while (enusub.hasMoreElements()) { group=(A_CmsGroup)enusub.nextElement(); allGroups.addElement(group); } } return allGroups; } /** * Returns a group object.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param groupname The name of the group that is to be read. * @return Group. * * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsGroup readGroup(String groupname) throws CmsException { A_CmsGroup group = null; group=m_accessUserGroup.readGroup(groupname); return group; } /** * Returns a list of users in a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param callingUser The user who wants to use this method. * @param groupname The name of the group to list users from. * @return Vector of users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsersOfGroup(String groupname) throws CmsException { return m_accessUserGroup.getUsersOfGroup(groupname); } /** * Checks if a user is member of a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param nameuser The name of the user to check. * @param groupname The name of the group to check. * @return True or False * * @exception CmsException Throws CmsException if operation was not succesful */ public boolean userInGroup(String username, String groupname) throws CmsException { boolean userInGroup; A_CmsGroup parent; // check if the user is in the given group userInGroup=m_accessUserGroup.userInGroup(username,groupname); //if not, check if the user is in the parent groups if (!userInGroup) { parent=getParent(groupname); while ((parent!= null) && (userInGroup==false)) { if (m_accessUserGroup.userInGroup(username,parent.getName())) { userInGroup=true; } parent=getParent(parent.getName()); } } return userInGroup; } /** * Adds a user to the Cms. * * Only a adminstrator can add users to the cms.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The new name for the user. * @param password The new password for the user. * @param group The default groupname for the user. * @param description The description for the user. * @param additionalInfos A Hashtable with additional infos for the user. These * Infos may be stored into the Usertables (depending on the implementation). * @param flags The flags for a user (e.g. C_FLAG_ENABLED) * * @return user The added user will be returned. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public A_CmsUser addUser(String name, String password, String group, String description, Hashtable additionalInfos, int flags) throws CmsException { A_CmsUser user=null; A_CmsGroup grp = null; //check if the group is exiting grp=m_accessUserGroup.readGroup(group); if (grp != null) { //create new user. user=m_accessUserGroup.createUser(name,password,group,description,additionalInfos,flags); //add user to user group. m_accessUserGroup.addUserToGroup(name,group); } else { throw new CmsException(CmsException.C_NO_GROUP); } return user; } /** * Deletes a user from the Cms. * * Only a adminstrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The name of the user to be deleted. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteUser(String username) throws CmsException { m_accessUserGroup.deleteUser(username); } /** * Updated the userinformation.<BR/> * * Only the administrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The user to be updated. * @param additionalInfos A Hashtable with additional infos for the user. These * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeUser(A_CmsUser user) throws CmsException { m_accessUserGroup.writeUser(user); } /** * Add a new group to the Cms.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The name of the new group. * @param description The description for the new group. * @int flags The flags for the new group. * @param name The name of the parent group (or null). * * @return Group * * @exception CmsException Throws CmsException if operation was not succesfull. */ public A_CmsGroup addGroup(String name, String description, int flags, String parent) throws CmsException { A_CmsGroup group=null; group=m_accessUserGroup.createGroup(name,description,flags,parent); return group; } /** * Writes an already existing group in the Cms.<BR/> * * Only the admin can do this.<P/> * * @param group The group that should be written to the Cms. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void writeGroup(A_CmsGroup group) throws CmsException{ m_accessUserGroup.writeGroup(group); } /** * Delete a group from the Cms.<BR/> * Only groups that contain no subgroups can be deleted. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param delgroup The name of the group that is to be deleted. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteGroup(String delgroup) throws CmsException { A_CmsGroup group = null; Vector childs=null; // get all child groups of the group childs=getChild(delgroup); // delete group only if it has no childs if (childs == null) { m_accessUserGroup.deleteGroup(delgroup); } else { throw new CmsException(CmsException.C_GROUP_NOT_EMPTY); } } /** * Adds a user to a group.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The name of the user that is to be added to the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void addUserToGroup(String username, String groupname) throws CmsException { m_accessUserGroup.addUserToGroup(username,groupname); } /** * Removes a user from a group. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The name of the user that is to be removed from the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesful. */ public void removeUserFromGroup(String username, String groupname) throws CmsException { m_accessUserGroup.removeUserFromGroup(username,groupname); } /** * Returns all users<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @return users A Vector of all existing users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsers() throws CmsException{ Vector users=null; users=m_accessUserGroup.getUsers(); return users; } /** * Returns all groups<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @return users A Vector of all existing groups. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getGroups() throws CmsException { Vector groups=null; groups=m_accessUserGroup.getGroups(); return groups; } /** * Returns all child groups of a groups<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param groupname The name of the group. * @return users A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChild(String groupname) throws CmsException { Vector childs=null; childs=m_accessUserGroup.getChild(groupname); return childs; } /** * Returns all child groups of a groups<P/> * This method also returns all sub-child groups of the current group. * * * @param groupname The name of the group. * @return users A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChilds(String groupname) throws CmsException { Vector childs=null; Vector allChilds=null; Vector subchilds=null; A_CmsGroup group=null; // get all child groups if the user group childs=m_accessUserGroup.getChild(groupname); allChilds=childs; // now get all subchilds for each group Enumeration enu=childs.elements(); while (enu.hasMoreElements()) { group=(A_CmsGroup)enu.nextElement(); subchilds=getChilds(group.getName()); //add the subchilds to the already existing groups Enumeration enusub=subchilds.elements(); while (enusub.hasMoreElements()) { group=(A_CmsGroup)enusub.nextElement(); allChilds.addElement(group); } } return allChilds; } /** * Returns the patent group of a group<P/> * * * @param groupname The name of the group. * @return The parent group of the actual group or null; * @exception CmsException Throws CmsException if operation was not succesful. */ public A_CmsGroup getParent(String groupname) throws CmsException { A_CmsGroup parent= null; parent = m_accessUserGroup.getParent(groupname); return parent; } /** * Sets the password for a user. * * Only a adminstrator or the curretuser can do this.<P/> * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * Current users can change their own password. * * @param username The name of the user. * @param newPassword The new password. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setPassword(String username, String newPassword) throws CmsException{ m_accessUserGroup.setPassword(username,newPassword); } }
src/com/opencms/file/CmsRbUserGroup.java
package com.opencms.file; import java.util.*; import javax.servlet.http.*; import com.opencms.core.*; /** * This class describes a resource broker for user and groups in the Cms.<BR/> * <B>All</B> Methods get a first parameter: A_CmsUser. It is the current user. This * is for security-reasons, to check if this current user has the rights to call the * method.<BR/> * * This class has package visibility for security reasons. * * @author Michael Emmerich * @version $Revision: 1.10 $ $Date: 2000/01/04 16:23:26 $ */ class CmsRbUserGroup implements I_CmsRbUserGroup, I_CmsConstants { /** * The user/group access object which is required to access the * user and group databases. */ private I_CmsAccessUserGroup m_accessUserGroup; /** * Constructor, creates a new Cms User & Group Resource Broker. * * @param accessUserGroup The user/group access object. */ public CmsRbUserGroup(I_CmsAccessUserGroup accessUserGroup) { m_accessUserGroup=accessUserGroup; } /** * Returns a user object.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The name of the user that is to be read. * @return User * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsUser readUser(String username) throws CmsException { A_CmsUser user=null; user=m_accessUserGroup.readUser(username); return user; } /** * Returns a user object if the password for the user is correct.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The username of the user that is to be read. * @param password The password of the user that is to be read. * @return User * * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsUser readUser(String username, String password) throws CmsException { A_CmsUser user=null; user=m_accessUserGroup.readUser(username,password); return user; } /** * Returns a list of groups of a user.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param username The name of the user. * @return Vector of groups * @exception CmsException Throws CmsException if operation was not succesful */ public Vector getGroupsOfUser(String username) throws CmsException { Vector allGroups; Vector subGroups; A_CmsGroup group; // get all groups of the user Vector groups=m_accessUserGroup.getGroupsOfUser(username); allGroups=groups; // now get all childs of the groups Enumeration enu = groups.elements(); while (enu.hasMoreElements()) { group=(A_CmsGroup)enu.nextElement(); subGroups=getChilds(group.getName()); //add the subchilds to the already existing groups Enumeration enusub=subGroups.elements(); while (enusub.hasMoreElements()) { group=(A_CmsGroup)enusub.nextElement(); allGroups.addElement(group); } } return allGroups; } /** * Returns a group object.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param groupname The name of the group that is to be read. * @return Group. * * @exception CmsException Throws CmsException if operation was not succesful */ public A_CmsGroup readGroup(String groupname) throws CmsException { A_CmsGroup group = null; group=m_accessUserGroup.readGroup(groupname); return group; } /** * Returns a list of users in a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param callingUser The user who wants to use this method. * @param groupname The name of the group to list users from. * @return Vector of users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsersOfGroup(String groupname) throws CmsException { return m_accessUserGroup.getUsersOfGroup(groupname); } /** * Checks if a user is member of a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param nameuser The name of the user to check. * @param groupname The name of the group to check. * @return True or False * * @exception CmsException Throws CmsException if operation was not succesful */ public boolean userInGroup(String username, String groupname) throws CmsException { return m_accessUserGroup.userInGroup(username,groupname); } /** * Adds a user to the Cms. * * Only a adminstrator can add users to the cms.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The new name for the user. * @param password The new password for the user. * @param group The default groupname for the user. * @param description The description for the user. * @param additionalInfos A Hashtable with additional infos for the user. These * Infos may be stored into the Usertables (depending on the implementation). * @param flags The flags for a user (e.g. C_FLAG_ENABLED) * * @return user The added user will be returned. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public A_CmsUser addUser(String name, String password, String group, String description, Hashtable additionalInfos, int flags) throws CmsException { A_CmsUser user=null; A_CmsGroup grp = null; //check if the group is exiting grp=m_accessUserGroup.readGroup(group); if (grp != null) { //create new user. user=m_accessUserGroup.createUser(name,password,group,description,additionalInfos,flags); //add user to user group. m_accessUserGroup.addUserToGroup(name,group); } else { throw new CmsException(CmsException.C_NO_GROUP); } return user; } /** * Deletes a user from the Cms. * * Only a adminstrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The name of the user to be deleted. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteUser(String username) throws CmsException { m_accessUserGroup.deleteUser(username); } /** * Updated the userinformation.<BR/> * * Only the administrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The user to be updated. * @param additionalInfos A Hashtable with additional infos for the user. These * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeUser(A_CmsUser user) throws CmsException { m_accessUserGroup.writeUser(user); } /** * Add a new group to the Cms.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param name The name of the new group. * @param description The description for the new group. * @int flags The flags for the new group. * @param name The name of the parent group (or null). * * @return Group * * @exception CmsException Throws CmsException if operation was not succesfull. */ public A_CmsGroup addGroup(String name, String description, int flags, String parent) throws CmsException { A_CmsGroup group=null; group=m_accessUserGroup.createGroup(name,description,flags,parent); return group; } /** * Writes an already existing group in the Cms.<BR/> * * Only the admin can do this.<P/> * * @param group The group that should be written to the Cms. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void writeGroup(A_CmsGroup group) throws CmsException{ m_accessUserGroup.writeGroup(group); } /** * Delete a group from the Cms.<BR/> * Only groups that contain no subgroups can be deleted. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param delgroup The name of the group that is to be deleted. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteGroup(String delgroup) throws CmsException { A_CmsGroup group = null; Vector childs=null; // get all child groups of the group childs=getChild(delgroup); // delete group only if it has no childs if (childs == null) { m_accessUserGroup.deleteGroup(delgroup); } else { throw new CmsException(CmsException.C_GROUP_NOT_EMPTY); } } /** * Adds a user to a group.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The name of the user that is to be added to the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void addUserToGroup(String username, String groupname) throws CmsException { m_accessUserGroup.addUserToGroup(username,groupname); } /** * Removes a user from a group. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param username The name of the user that is to be removed from the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesful. */ public void removeUserFromGroup(String username, String groupname) throws CmsException { m_accessUserGroup.removeUserFromGroup(username,groupname); } /** * Returns all users<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @return users A Vector of all existing users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsers() throws CmsException{ Vector users=null; users=m_accessUserGroup.getUsers(); return users; } /** * Returns all groups<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @return users A Vector of all existing groups. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getGroups() throws CmsException { Vector groups=null; groups=m_accessUserGroup.getGroups(); return groups; } /** * Returns all child groups of a groups<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param groupname The name of the group. * @return users A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChild(String groupname) throws CmsException { Vector childs=null; childs=m_accessUserGroup.getChild(groupname); return childs; } /** * Returns all child groups of a groups<P/> * This method also returns all sub-child groups of the current group. * * * @param groupname The name of the group. * @return users A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChilds(String groupname) throws CmsException { Vector childs=null; Vector allChilds=null; Vector subchilds=null; A_CmsGroup group=null; // get all child groups if the user group childs=m_accessUserGroup.getChild(groupname); allChilds=childs; // now get all subchilds for each group Enumeration enu=childs.elements(); while (enu.hasMoreElements()) { group=(A_CmsGroup)enu.nextElement(); subchilds=getChilds(group.getName()); //add the subchilds to the already existing groups Enumeration enusub=subchilds.elements(); while (enusub.hasMoreElements()) { group=(A_CmsGroup)enusub.nextElement(); allChilds.addElement(group); } } return allChilds; } /** * Returns the patent group of a group<P/> * * * @param groupname The name of the group. * @return The parent group of the actual group or null; * @exception CmsException Throws CmsException if operation was not succesful. */ public A_CmsGroup getParent(String groupname) throws CmsException { A_CmsGroup parent= null; parent = m_accessUserGroup.getParent(groupname); return parent; } /** * Sets the password for a user. * * Only a adminstrator or the curretuser can do this.<P/> * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * Current users can change their own password. * * @param username The name of the user. * @param newPassword The new password. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setPassword(String username, String newPassword) throws CmsException{ m_accessUserGroup.setPassword(username,newPassword); } }
Update: updated userInGroup Method
src/com/opencms/file/CmsRbUserGroup.java
Update: updated userInGroup Method
<ide><path>rc/com/opencms/file/CmsRbUserGroup.java <ide> * This class has package visibility for security reasons. <ide> * <ide> * @author Michael Emmerich <del> * @version $Revision: 1.10 $ $Date: 2000/01/04 16:23:26 $ <add> * @version $Revision: 1.11 $ $Date: 2000/01/04 17:06:05 $ <ide> */ <ide> class CmsRbUserGroup implements I_CmsRbUserGroup, I_CmsConstants { <ide> <ide> */ <ide> public boolean userInGroup(String username, String groupname) <ide> throws CmsException { <del> return m_accessUserGroup.userInGroup(username,groupname); <add> boolean userInGroup; <add> A_CmsGroup parent; <add> // check if the user is in the given group <add> userInGroup=m_accessUserGroup.userInGroup(username,groupname); <add> //if not, check if the user is in the parent groups <add> if (!userInGroup) { <add> parent=getParent(groupname); <add> while ((parent!= null) && (userInGroup==false)) { <add> if (m_accessUserGroup.userInGroup(username,parent.getName())) { <add> userInGroup=true; <add> } <add> parent=getParent(parent.getName()); <add> } <add> } <add> return userInGroup; <add> <ide> } <ide> <ide> /**
Java
apache-2.0
7052621f39aa127406e977e8beb95c501596d186
0
jimma/xerces,jimma/xerces,RackerWilliams/xercesj,ronsigal/xerces,RackerWilliams/xercesj,RackerWilliams/xercesj,ronsigal/xerces,jimma/xerces,ronsigal/xerces
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 2000-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.apache.org. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.xerces.parsers; import org.apache.xerces.dom3.ls.DOMInputSource; import org.apache.xerces.dom3.as.ASModel; import org.apache.xerces.dom3.as.DOMASBuilder; import org.apache.xerces.dom3.as.DOMASException; import org.apache.xerces.xni.parser.XMLInputSource; import org.apache.xerces.xni.parser.XMLEntityResolver; import org.apache.xerces.xni.parser.XMLParserConfiguration; import org.apache.xerces.xni.XNIException; import org.apache.xerces.xni.parser.XMLConfigurationException; import org.apache.xerces.xni.grammars.XMLGrammarPool; import org.apache.xerces.xni.grammars.Grammar; import org.apache.xerces.dom.ASModelImpl; import org.apache.xerces.impl.Constants; import org.apache.xerces.impl.XMLErrorReporter; import org.apache.xerces.impl.validation.XMLGrammarPoolImpl; import org.apache.xerces.impl.xs.traversers.XSDHandler; import org.apache.xerces.impl.xs.XSDDescription; import org.apache.xerces.impl.xs.XSGrammarBucket; import org.apache.xerces.impl.xs.SubstitutionGroupHandler; import org.apache.xerces.impl.xs.models.CMBuilder; import org.apache.xerces.impl.xs.SchemaGrammar; import org.apache.xerces.impl.xs.XSConstraints; import org.apache.xerces.impl.xs.XSDeclarationPool; import java.util.Vector; import org.apache.xerces.util.SymbolTable; /** * This is Abstract Schema DOM Builder class. It extends the DOMBuilderImpl * class. Provides support for preparsing schemas. * * @author Pavani Mukthipudi, Sun Microsystems Inc. * @author Neil Graham, IBM * @version $Id$ * */ public class DOMASBuilderImpl extends DOMBuilderImpl implements DOMASBuilder { // // Constants // // Feature ids protected static final String SCHEMA_FULL_CHECKING = Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_FULL_CHECKING; // Property ids protected static final String ERROR_REPORTER = Constants.XERCES_PROPERTY_PREFIX + Constants.ERROR_REPORTER_PROPERTY; protected static final String SYMBOL_TABLE = Constants.XERCES_PROPERTY_PREFIX + Constants.SYMBOL_TABLE_PROPERTY; protected static final String ENTITY_MANAGER = Constants.XERCES_PROPERTY_PREFIX + Constants.ENTITY_MANAGER_PROPERTY; // // Data // protected XSGrammarBucket fGrammarBucket; protected SubstitutionGroupHandler fSubGroupHandler; protected XSDHandler fSchemaHandler; protected ASModelImpl fAbstractSchema; // // Constructors // /** * Constructs a DOM Builder using the dtd/xml schema parser configuration. */ public DOMASBuilderImpl() { super(new XMLGrammarCachingConfiguration()); } // <init> /** * Constructs a DOM Builder using the specified parser configuration. * We must demand that the configuration extend XMLGrammarCachingConfiguration to make * sure all relevant methods/features are available. */ public DOMASBuilderImpl(XMLGrammarCachingConfiguration config) { super(config); } // <init>(XMLParserConfiguration) /** * Constructs a DOM Builder using the specified symbol table. */ public DOMASBuilderImpl(SymbolTable symbolTable) { super(new XMLGrammarCachingConfiguration(symbolTable)); } // <init>(SymbolTable) /** * Constructs a DOM Builder using the specified symbol table and * grammar pool. * The grammarPool implementation should extent the default * implementation; otherwise, correct functioning of this class may * not occur. */ public DOMASBuilderImpl(SymbolTable symbolTable, XMLGrammarPool grammarPool) { super(new XMLGrammarCachingConfiguration(symbolTable, grammarPool)); } // // DOMASBuilder methods // /** * Associate an <code>ASModel</code> with a document instance. This * <code>ASModel</code> will be used by the " * <code>validate-if-schema</code>" and " * <code>datatype-normalization</code>" options during the load of a new * <code>Document</code>. */ public ASModel getAbstractSchema() { return fAbstractSchema; } /** * Associate an <code>ASModel</code> with a document instance. This * <code>ASModel</code> will be used by the " * <code>validate-if-schema</code>" and " * <code>datatype-normalization</code>" options during the load of a new * <code>Document</code>. */ public void setAbstractSchema(ASModel abstractSchema) { // since the ASModel associated with this object is an attribute // according to the DOM IDL, we must obliterate anything // that was set before, rather than adding to it. // REVISIT: so shouldn't we attempt to clear the // grammarPool before adding stuff to it? - NG fAbstractSchema = (ASModelImpl)abstractSchema; // make sure the GrammarPool is properly initialized. XMLGrammarPool grammarPool = (XMLGrammarPool)fConfiguration.getProperty(StandardParserConfiguration.XMLGRAMMAR_POOL); // if there is no grammar pool, create one // REVISIT: ASBuilder should always create one. if (grammarPool == null) { // something's not right in this situation... grammarPool = new XMLGrammarPoolImpl(); fConfiguration.setProperty(StandardParserConfiguration.XMLGRAMMAR_POOL, grammarPool); } if (fAbstractSchema != null) { initGrammarPool(fAbstractSchema, grammarPool); } } /** * Parse a Abstract Schema from a location identified by an URI. * * @param uri The location of the Abstract Schema to be read. * @return The newly created <code>Abstract Schema</code>. * @exception DOMASException * Exceptions raised by <code>parseASURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a <code>DOMASException</code> if any form of * Abstract Schema inconsistencies or warning occurs during the parse, * but application defined errorHandlers are not required to do so. * <br> WRONG_MIME_TYPE_ERR: Raised when <code>mimeTypeCheck</code> is * <code>true</code> and the inputsource has an incorrect MIME Type. * See attribute <code>mimeTypeCheck</code>. * @exception DOMSystemException * Exceptions raised by <code>parseURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a DOMSystemException if any form I/O or other * system error occurs during the parse, but application defined error * handlers are not required to do so. */ public ASModel parseASURI(String uri) throws DOMASException, Exception { XMLInputSource source = new XMLInputSource(null, uri, null); return parseASInputSource(source); } /** * Parse a Abstract Schema from a location identified by an * <code>DOMInputSource</code>. * * @param is The <code>DOMInputSource</code> from which the source * Abstract Schema is to be read. * @return The newly created <code>ASModel</code>. * @exception DOMASException * Exceptions raised by <code>parseASURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a <code>DOMASException</code> if any form of * Abstract Schema inconsistencies or warning occurs during the parse, * but application defined errorHandlers are not required to do so. * <br> WRONG_MIME_TYPE_ERR: Raised when <code>mimeTypeCheck</code> is * true and the inputsource has an incorrect MIME Type. See attribute * <code>mimeTypeCheck</code>. * @exception DOMSystemException * Exceptions raised by <code>parseURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a DOMSystemException if any form I/O or other * system error occurs during the parse, but application defined error * handlers are not required to do so. */ public ASModel parseASInputSource(DOMInputSource is) throws DOMASException, Exception { // need to wrap the DOMInputSource with an XMLInputSource XMLInputSource xis = this.dom2xmlInputSource(is); try { return parseASInputSource(xis); } catch (XNIException e) { Exception ex = e.getException(); throw ex; } } ASModel parseASInputSource(XMLInputSource is) throws Exception { if (fSchemaHandler == null) { fGrammarBucket = new XSGrammarBucket(); fSubGroupHandler = new SubstitutionGroupHandler(fGrammarBucket); fSchemaHandler = new XSDHandler(fGrammarBucket); } initGrammarBucket(); // actually do the parse: // save some casting XMLGrammarCachingConfiguration gramConfig = (XMLGrammarCachingConfiguration)fConfiguration; // ensure grammarPool doesn't absorb grammars while it's parsing gramConfig.lockGrammarPool(); SchemaGrammar grammar = gramConfig.parseXMLSchema(is, fGrammarBucket, fSchemaHandler, fSubGroupHandler); gramConfig.unlockGrammarPool(); ASModelImpl newAsModel = new ASModelImpl(); addGrammars(newAsModel, fGrammarBucket); return newAsModel; } // put all the grammars we have access to in the GrammarBucket private void initGrammarBucket() { fGrammarBucket.reset(); if (fAbstractSchema != null) initGrammarBucketRecurse(fAbstractSchema); } private void initGrammarBucketRecurse(ASModelImpl currModel) { if(currModel.getGrammar() != null) { fGrammarBucket.putGrammar(currModel.getGrammar()); } for(int i = 0; i < currModel.getInternalASModels().size(); i++) { ASModelImpl nextModel = (ASModelImpl)(currModel.getInternalASModels().elementAt(i)); initGrammarBucketRecurse(nextModel); } } private void addGrammars(ASModelImpl model, XSGrammarBucket grammarBucket) { SchemaGrammar [] grammarList = grammarBucket.getGrammars(); for(int i=0; i<grammarList.length; i++) { ASModelImpl newModel = new ASModelImpl(); newModel.setGrammar(grammarList[i]); model.addASModel(newModel); } } // addGrammars private void initGrammarPool(ASModelImpl currModel, XMLGrammarPool grammarPool) { // put all the grammars in fAbstractSchema into the grammar pool. // grammarPool must never be null! Grammar[] grammars = new Grammar[1]; if ((grammars[0] = (Grammar)currModel.getGrammar()) != null) { grammarPool.cacheGrammars(grammars[0].getGrammarDescription().getGrammarType(), grammars); } Vector modelStore = currModel.getInternalASModels(); for (int i = 0; i < modelStore.size(); i++) { initGrammarPool((ASModelImpl)modelStore.elementAt(i), grammarPool); } } } // class DOMASBuilderImpl
src/org/apache/xerces/parsers/DOMASBuilderImpl.java
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 2000-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact [email protected]. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.apache.org. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.xerces.parsers; import org.apache.xerces.dom3.ls.DOMInputSource; import org.apache.xerces.dom3.as.ASModel; import org.apache.xerces.dom3.as.DOMASBuilder; import org.apache.xerces.dom3.as.DOMASException; import org.apache.xerces.xni.parser.XMLInputSource; import org.apache.xerces.xni.parser.XMLEntityResolver; import org.apache.xerces.xni.parser.XMLParserConfiguration; import org.apache.xerces.xni.XNIException; import org.apache.xerces.xni.parser.XMLConfigurationException; import org.apache.xerces.xni.grammars.XMLGrammarPool; import org.apache.xerces.xni.grammars.Grammar; import org.apache.xerces.dom.ASModelImpl; import org.apache.xerces.impl.Constants; import org.apache.xerces.impl.XMLErrorReporter; import org.apache.xerces.impl.validation.XMLGrammarPoolImpl; import org.apache.xerces.impl.xs.traversers.XSDHandler; import org.apache.xerces.impl.xs.XSDDescription; import org.apache.xerces.impl.xs.XSGrammarBucket; import org.apache.xerces.impl.xs.SubstitutionGroupHandler; import org.apache.xerces.impl.xs.models.CMBuilder; import org.apache.xerces.impl.xs.SchemaGrammar; import org.apache.xerces.impl.xs.XSConstraints; import org.apache.xerces.impl.xs.XSDeclarationPool; import java.util.Vector; import org.apache.xerces.util.SymbolTable; /** * This is Abstract Schema DOM Builder class. It extends the DOMBuilderImpl * class. Provides support for preparsing schemas. * * @author Pavani Mukthipudi, Sun Microsystems Inc. * @author Neil Graham, IBM * @version $Id$ * */ public class DOMASBuilderImpl extends DOMBuilderImpl implements DOMASBuilder { // // Constants // // Feature ids protected static final String SCHEMA_FULL_CHECKING = Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_FULL_CHECKING; // Property ids protected static final String ERROR_REPORTER = Constants.XERCES_PROPERTY_PREFIX + Constants.ERROR_REPORTER_PROPERTY; protected static final String SYMBOL_TABLE = Constants.XERCES_PROPERTY_PREFIX + Constants.SYMBOL_TABLE_PROPERTY; protected static final String ENTITY_MANAGER = Constants.XERCES_PROPERTY_PREFIX + Constants.ENTITY_MANAGER_PROPERTY; // // Data // protected XSGrammarBucket fGrammarBucket; protected SubstitutionGroupHandler fSubGroupHandler; protected XSDHandler fSchemaHandler; protected ASModelImpl fAbstractSchema; // // Constructors // /** * Constructs a DOM Builder using the dtd/xml schema parser configuration. */ public DOMASBuilderImpl() { super(new XMLGrammarCachingConfiguration()); } // <init> /** * Constructs a DOM Builder using the specified parser configuration. * We must demand that the configuration extend XMLGrammarCachingConfiguration to make * sure all relevant methods/features are available. */ public DOMASBuilderImpl(XMLGrammarCachingConfiguration config) { super(config); } // <init>(XMLParserConfiguration) /** * Constructs a DOM Builder using the specified symbol table. */ public DOMASBuilderImpl(SymbolTable symbolTable) { super(new XMLGrammarCachingConfiguration(symbolTable)); } // <init>(SymbolTable) /** * Constructs a DOM Builder using the specified symbol table and * grammar pool. * The grammarPool implementation should extent the default * implementation; otherwise, correct functioning of this class may * not occur. */ public DOMASBuilderImpl(SymbolTable symbolTable, XMLGrammarPool grammarPool) { super(new XMLGrammarCachingConfiguration(symbolTable, grammarPool)); } // // DOMASBuilder methods // /** * Associate an <code>ASModel</code> with a document instance. This * <code>ASModel</code> will be used by the " * <code>validate-if-schema</code>" and " * <code>datatype-normalization</code>" options during the load of a new * <code>Document</code>. */ public ASModel getAbstractSchema() { return fAbstractSchema; } /** * Associate an <code>ASModel</code> with a document instance. This * <code>ASModel</code> will be used by the " * <code>validate-if-schema</code>" and " * <code>datatype-normalization</code>" options during the load of a new * <code>Document</code>. */ public void setAbstractSchema(ASModel abstractSchema) { // since the ASModel associated with this object is an attribute // according to the DOM IDL, we must obliterate anything // that was set before, rather than adding to it. // REVISIT: so shouldn't we attempt to clear the // grammarPool before adding stuff to it? - NG fAbstractSchema = (ASModelImpl)abstractSchema; // make sure the GrammarPool is properly initialized. XMLGrammarPool grammarPool = (XMLGrammarPool)fConfiguration.getProperty(StandardParserConfiguration.XMLGRAMMAR_POOL); // if there is no grammar pool, create one // REVISIT: ASBuilder should always create one. if (grammarPool == null) { // something's not right in this situation... grammarPool = new XMLGrammarPoolImpl(); fConfiguration.setProperty(StandardParserConfiguration.XMLGRAMMAR_POOL, grammarPool); } if (fAbstractSchema != null) { initGrammarPool(fAbstractSchema, grammarPool); } } /** * Parse a Abstract Schema from a location identified by an URI. * * @param uri The location of the Abstract Schema to be read. * @return The newly created <code>Abstract Schema</code>. * @exception DOMASException * Exceptions raised by <code>parseASURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a <code>DOMASException</code> if any form of * Abstract Schema inconsistencies or warning occurs during the parse, * but application defined errorHandlers are not required to do so. * <br> WRONG_MIME_TYPE_ERR: Raised when <code>mimeTypeCheck</code> is * <code>true</code> and the inputsource has an incorrect MIME Type. * See attribute <code>mimeTypeCheck</code>. * @exception DOMSystemException * Exceptions raised by <code>parseURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a DOMSystemException if any form I/O or other * system error occurs during the parse, but application defined error * handlers are not required to do so. */ public ASModel parseASURI(String uri) throws DOMASException, Exception { XMLInputSource source = new XMLInputSource(null, uri, null); try { return parseASInputSource(source); } catch (XNIException e) { Exception ex = e.getException(); throw ex; } } /** * Parse a Abstract Schema from a location identified by an * <code>DOMInputSource</code>. * * @param is The <code>DOMInputSource</code> from which the source * Abstract Schema is to be read. * @return The newly created <code>ASModel</code>. * @exception DOMASException * Exceptions raised by <code>parseASURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a <code>DOMASException</code> if any form of * Abstract Schema inconsistencies or warning occurs during the parse, * but application defined errorHandlers are not required to do so. * <br> WRONG_MIME_TYPE_ERR: Raised when <code>mimeTypeCheck</code> is * true and the inputsource has an incorrect MIME Type. See attribute * <code>mimeTypeCheck</code>. * @exception DOMSystemException * Exceptions raised by <code>parseURI()</code> originate with the * installed ErrorHandler, and thus depend on the implementation of * the <code>DOMErrorHandler</code> interfaces. The default error * handlers will raise a DOMSystemException if any form I/O or other * system error occurs during the parse, but application defined error * handlers are not required to do so. */ public ASModel parseASInputSource(DOMInputSource is) throws DOMASException, Exception { // need to wrap the DOMInputSource with an XMLInputSource XMLInputSource xis = this.dom2xmlInputSource(is); try { return parseASInputSource(xis); } catch (XNIException e) { Exception ex = e.getException(); throw ex; } } ASModel parseASInputSource(XMLInputSource is) throws Exception { if (fSchemaHandler == null) { fGrammarBucket = new XSGrammarBucket(); fSubGroupHandler = new SubstitutionGroupHandler(fGrammarBucket); fSchemaHandler = new XSDHandler(fGrammarBucket); } initGrammarBucket(); // actually do the parse: // save some casting XMLGrammarCachingConfiguration gramConfig = (XMLGrammarCachingConfiguration)fConfiguration; // ensure grammarPool doesn't absorb grammars while it's parsing gramConfig.lockGrammarPool(); SchemaGrammar grammar = gramConfig.parseXMLSchema(is, fGrammarBucket, fSchemaHandler, fSubGroupHandler); gramConfig.unlockGrammarPool(); ASModelImpl newAsModel = new ASModelImpl(); addGrammars(newAsModel, fGrammarBucket); return newAsModel; } // put all the grammars we have access to in the GrammarBucket private void initGrammarBucket() { fGrammarBucket.reset(); if (fAbstractSchema != null) initGrammarBucketRecurse(fAbstractSchema); } private void initGrammarBucketRecurse(ASModelImpl currModel) { if(currModel.getGrammar() != null) { fGrammarBucket.putGrammar(currModel.getGrammar()); } for(int i = 0; i < currModel.getInternalASModels().size(); i++) { ASModelImpl nextModel = (ASModelImpl)(currModel.getInternalASModels().elementAt(i)); initGrammarBucketRecurse(nextModel); } } private void addGrammars(ASModelImpl model, XSGrammarBucket grammarBucket) { SchemaGrammar [] grammarList = grammarBucket.getGrammars(); for(int i=0; i<grammarList.length; i++) { ASModelImpl newModel = new ASModelImpl(); newModel.setGrammar(grammarList[i]); model.addASModel(newModel); } } // addGrammars private void initGrammarPool(ASModelImpl currModel, XMLGrammarPool grammarPool) { // put all the grammars in fAbstractSchema into the grammar pool. // grammarPool must never be null! Grammar[] grammars = new Grammar[1]; if ((grammars[0] = (Grammar)currModel.getGrammar()) != null) { grammarPool.cacheGrammars(grammars[0].getGrammarDescription().getGrammarType(), grammars); } Vector modelStore = currModel.getInternalASModels(); for (int i = 0; i < modelStore.size(); i++) { initGrammarPool((ASModelImpl)modelStore.elementAt(i), grammarPool); } } } // class DOMASBuilderImpl
The work of "try/catch XNIException" is already done in parseASInputSource, so we shouldn't do it again in parseASURI. git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@318322 13f79535-47bb-0310-9956-ffa450edef68
src/org/apache/xerces/parsers/DOMASBuilderImpl.java
The work of "try/catch XNIException" is already done in parseASInputSource, so we shouldn't do it again in parseASURI.
<ide><path>rc/org/apache/xerces/parsers/DOMASBuilderImpl.java <ide> public ASModel parseASURI(String uri) <ide> throws DOMASException, Exception { <ide> XMLInputSource source = new XMLInputSource(null, uri, null); <del> try { <del> return parseASInputSource(source); <del> } <del> <del> catch (XNIException e) { <del> Exception ex = e.getException(); <del> throw ex; <del> } <del> <add> return parseASInputSource(source); <ide> } <ide> <ide> /**
Java
apache-2.0
1ee74217dcc5c48adda791517e4f38be4b2e78ba
0
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
package uk.ac.ebi.quickgo.rest.search.results.transformer; import uk.ac.ebi.quickgo.rest.search.request.converter.RESTFilterConverterFactory; import uk.ac.ebi.quickgo.rest.search.results.QueryResult; import java.util.List; import static com.google.common.base.Preconditions.checkArgument; /** * For every model held by a {@link QueryResult}, update the model using each injector passed to the mutate method. * @author Tony Wardell * Date: 09/10/2017 * Time: 10:33 * Created with IntelliJ IDEA. */ public class ValueInjectionToQueryResults<R> implements ValueMutator<QueryResult<R>, R> { private final RESTFilterConverterFactory restFilterConverterFactory; public ValueInjectionToQueryResults(RESTFilterConverterFactory restFilterConverterFactory) { checkArgument(restFilterConverterFactory != null, "RESTFilterConverterFactory cannot be null"); this.restFilterConverterFactory = restFilterConverterFactory; } @Override public void mutate(QueryResult<R> results, List<ResponseValueInjector<R>> requiredInjectors) { results.getResults() .forEach(result -> requiredInjectors.forEach(valueInjector -> valueInjector.inject( restFilterConverterFactory, result))); } }
rest-common/src/main/java/uk/ac/ebi/quickgo/rest/search/results/transformer/ValueInjectionToQueryResults.java
package uk.ac.ebi.quickgo.rest.search.results.transformer; import uk.ac.ebi.quickgo.rest.search.request.converter.RESTFilterConverterFactory; import uk.ac.ebi.quickgo.rest.search.results.QueryResult; import java.util.List; import static com.google.common.base.Preconditions.checkArgument; /** * For every model held by a {@link QueryResult}, update the model using each injector passed to the mutate method. * @author Tony Wardell * Date: 09/10/2017 * Time: 10:33 * Created with IntelliJ IDEA. */ public class ValueInjectionToQueryResults<R> implements ValueMutator<QueryResult<R>, R> { private final RESTFilterConverterFactory restFilterConverterFactory; public ValueInjectionToQueryResults(RESTFilterConverterFactory restFilterConverterFactory) { checkArgument(restFilterConverterFactory != null, "RESTFilterConverterFactory cannot be null"); this.restFilterConverterFactory = restFilterConverterFactory; } @Override public void mutate(QueryResult<R> results, List<ResponseValueInjector<R>> requiredInjectors) { results.getResults() .forEach(result -> requiredInjectors.forEach(valueInjector -> valueInjector.inject( restFilterConverterFactory, result))); } }
Reformat.
rest-common/src/main/java/uk/ac/ebi/quickgo/rest/search/results/transformer/ValueInjectionToQueryResults.java
Reformat.
<ide><path>est-common/src/main/java/uk/ac/ebi/quickgo/rest/search/results/transformer/ValueInjectionToQueryResults.java <ide> this.restFilterConverterFactory = restFilterConverterFactory; <ide> } <ide> <del> @Override public void mutate(QueryResult<R> results, List<ResponseValueInjector<R>> requiredInjectors) { <add> @Override <add> public void mutate(QueryResult<R> results, List<ResponseValueInjector<R>> requiredInjectors) { <ide> results.getResults() <del> .forEach(result -> requiredInjectors.forEach(valueInjector -> valueInjector.inject( <del> restFilterConverterFactory, <del> result))); <del> } <add> .forEach(result -> requiredInjectors.forEach(valueInjector -> valueInjector.inject( <add> restFilterConverterFactory, <add> result))); <add> } <ide> }
JavaScript
apache-2.0
a25da4ebf4c72da01ea8785179b9582a81372e7f
0
krombel/matrix-js-sdk,matrix-org/matrix-js-sdk,matrix-org/matrix-js-sdk,matrix-org/matrix-js-sdk,krombel/matrix-js-sdk,krombel/matrix-js-sdk,matrix-org/matrix-js-sdk
/* Copyright 2017 Vector Creations Ltd Copyright 2018 New Vector Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import Promise from 'bluebird'; import utils from '../../utils'; export const VERSION = 6; /** * Implementation of a CryptoStore which is backed by an existing * IndexedDB connection. Generally you want IndexedDBCryptoStore * which connects to the database and defers to one of these. * * @implements {module:crypto/store/base~CryptoStore} */ export class Backend { /** * @param {IDBDatabase} db */ constructor(db) { this._db = db; // make sure we close the db on `onversionchange` - otherwise // attempts to delete the database will block (and subsequent // attempts to re-create it will also block). db.onversionchange = (ev) => { console.log(`versionchange for indexeddb ${this._dbName}: closing`); db.close(); }; } /** * Look for an existing outgoing room key request, and if none is found, * add a new one * * @param {module:crypto/store/base~OutgoingRoomKeyRequest} request * * @returns {Promise} resolves to * {@link module:crypto/store/base~OutgoingRoomKeyRequest}: either the * same instance as passed in, or the existing one. */ getOrAddOutgoingRoomKeyRequest(request) { const requestBody = request.requestBody; const deferred = Promise.defer(); const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); txn.onerror = deferred.reject; // first see if we already have an entry for this request. this._getOutgoingRoomKeyRequest(txn, requestBody, (existing) => { if (existing) { // this entry matches the request - return it. console.log( `already have key request outstanding for ` + `${requestBody.room_id} / ${requestBody.session_id}: ` + `not sending another`, ); deferred.resolve(existing); return; } // we got to the end of the list without finding a match // - add the new request. console.log( `enqueueing key request for ${requestBody.room_id} / ` + requestBody.session_id, ); txn.oncomplete = () => { deferred.resolve(request); }; const store = txn.objectStore("outgoingRoomKeyRequests"); store.add(request); }); return deferred.promise; } /** * Look for an existing room key request * * @param {module:crypto~RoomKeyRequestBody} requestBody * existing request to look for * * @return {Promise} resolves to the matching * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * not found */ getOutgoingRoomKeyRequest(requestBody) { const deferred = Promise.defer(); const txn = this._db.transaction("outgoingRoomKeyRequests", "readonly"); txn.onerror = deferred.reject; this._getOutgoingRoomKeyRequest(txn, requestBody, (existing) => { deferred.resolve(existing); }); return deferred.promise; } /** * look for an existing room key request in the db * * @private * @param {IDBTransaction} txn database transaction * @param {module:crypto~RoomKeyRequestBody} requestBody * existing request to look for * @param {Function} callback function to call with the results of the * search. Either passed a matching * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * not found. */ _getOutgoingRoomKeyRequest(txn, requestBody, callback) { const store = txn.objectStore("outgoingRoomKeyRequests"); const idx = store.index("session"); const cursorReq = idx.openCursor([ requestBody.room_id, requestBody.session_id, ]); cursorReq.onsuccess = (ev) => { const cursor = ev.target.result; if(!cursor) { // no match found callback(null); return; } const existing = cursor.value; if (utils.deepCompare(existing.requestBody, requestBody)) { // got a match callback(existing); return; } // look at the next entry in the index cursor.continue(); }; } /** * Look for room key requests by state * * @param {Array<Number>} wantedStates list of acceptable states * * @return {Promise} resolves to the a * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * there are no pending requests in those states. If there are multiple * requests in those states, an arbitrary one is chosen. */ getOutgoingRoomKeyRequestByState(wantedStates) { if (wantedStates.length === 0) { return Promise.resolve(null); } // this is a bit tortuous because we need to make sure we do the lookup // in a single transaction, to avoid having a race with the insertion // code. // index into the wantedStates array let stateIndex = 0; let result; function onsuccess(ev) { const cursor = ev.target.result; if (cursor) { // got a match result = cursor.value; return; } // try the next state in the list stateIndex++; if (stateIndex >= wantedStates.length) { // no matches return; } const wantedState = wantedStates[stateIndex]; const cursorReq = ev.target.source.openCursor(wantedState); cursorReq.onsuccess = onsuccess; } const txn = this._db.transaction("outgoingRoomKeyRequests", "readonly"); const store = txn.objectStore("outgoingRoomKeyRequests"); const wantedState = wantedStates[stateIndex]; const cursorReq = store.index("state").openCursor(wantedState); cursorReq.onsuccess = onsuccess; return promiseifyTxn(txn).then(() => result); } /** * Look for an existing room key request by id and state, and update it if * found * * @param {string} requestId ID of request to update * @param {number} expectedState state we expect to find the request in * @param {Object} updates name/value map of updates to apply * * @returns {Promise} resolves to * {@link module:crypto/store/base~OutgoingRoomKeyRequest} * updated request, or null if no matching row was found */ updateOutgoingRoomKeyRequest(requestId, expectedState, updates) { let result = null; function onsuccess(ev) { const cursor = ev.target.result; if (!cursor) { return; } const data = cursor.value; if (data.state != expectedState) { console.warn( `Cannot update room key request from ${expectedState} ` + `as it was already updated to ${data.state}`, ); return; } Object.assign(data, updates); cursor.update(data); result = data; } const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); const cursorReq = txn.objectStore("outgoingRoomKeyRequests") .openCursor(requestId); cursorReq.onsuccess = onsuccess; return promiseifyTxn(txn).then(() => result); } /** * Look for an existing room key request by id and state, and delete it if * found * * @param {string} requestId ID of request to update * @param {number} expectedState state we expect to find the request in * * @returns {Promise} resolves once the operation is completed */ deleteOutgoingRoomKeyRequest(requestId, expectedState) { const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); const cursorReq = txn.objectStore("outgoingRoomKeyRequests") .openCursor(requestId); cursorReq.onsuccess = (ev) => { const cursor = ev.target.result; if (!cursor) { return; } const data = cursor.value; if (data.state != expectedState) { console.warn( `Cannot delete room key request in state ${data.state} ` + `(expected ${expectedState})`, ); return; } cursor.delete(); }; return promiseifyTxn(txn); } // Olm Account getAccount(txn, func) { const objectStore = txn.objectStore("account"); const getReq = objectStore.get("-"); getReq.onsuccess = function() { try { func(getReq.result || null); } catch (e) { abortWithException(txn, e); } }; } storeAccount(txn, newData) { const objectStore = txn.objectStore("account"); objectStore.put(newData, "-"); } // Olm Sessions countEndToEndSessions(txn, func) { const objectStore = txn.objectStore("sessions"); const countReq = objectStore.count(); countReq.onsuccess = function() { func(countReq.result); }; } getEndToEndSessions(deviceKey, txn, func) { const objectStore = txn.objectStore("sessions"); const idx = objectStore.index("deviceKey"); const getReq = idx.openCursor(deviceKey); const results = {}; getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { results[cursor.value.sessionId] = cursor.value.session; cursor.continue(); } else { try { func(results); } catch (e) { abortWithException(txn, e); } } }; } getEndToEndSession(deviceKey, sessionId, txn, func) { const objectStore = txn.objectStore("sessions"); const getReq = objectStore.get([deviceKey, sessionId]); getReq.onsuccess = function() { try { if (getReq.result) { func(getReq.result.session); } else { func(null); } } catch (e) { abortWithException(txn, e); } }; } storeEndToEndSession(deviceKey, sessionId, session, txn) { const objectStore = txn.objectStore("sessions"); objectStore.put({deviceKey, sessionId, session}); } // Inbound group sessions getEndToEndInboundGroupSession(senderCurve25519Key, sessionId, txn, func) { const objectStore = txn.objectStore("inbound_group_sessions"); const getReq = objectStore.get([senderCurve25519Key, sessionId]); getReq.onsuccess = function() { try { if (getReq.result) { func(getReq.result.session); } else { func(null); } } catch (e) { abortWithException(txn, e); } }; } getAllEndToEndInboundGroupSessions(txn, func) { const objectStore = txn.objectStore("inbound_group_sessions"); const getReq = objectStore.openCursor(); getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { try { func({ senderKey: cursor.value.senderCurve25519Key, sessionId: cursor.value.sessionId, sessionData: cursor.value.session, }); } catch (e) { abortWithException(txn, e); } cursor.continue(); } else { try { func(null); } catch (e) { abortWithException(txn, e); } } }; } addEndToEndInboundGroupSession(senderCurve25519Key, sessionId, sessionData, txn) { const objectStore = txn.objectStore("inbound_group_sessions"); const addReq = objectStore.add({ senderCurve25519Key, sessionId, session: sessionData, }); addReq.onerror = () => { if (addReq.name === 'ConstraintError') { console.log( "Ignoring duplicate inbound group session: " + senderCurve25519Key + " / " + sessionId, ); } else { abortWithException(txn, new Error( "Failed to add inbound group session" + addReq.error, )); } }; } storeEndToEndInboundGroupSession(senderCurve25519Key, sessionId, sessionData, txn) { const objectStore = txn.objectStore("inbound_group_sessions"); objectStore.put({ senderCurve25519Key, sessionId, session: sessionData, }); } getEndToEndDeviceData(txn, func) { const objectStore = txn.objectStore("device_data"); const getReq = objectStore.get("-"); getReq.onsuccess = function() { try { func(getReq.result || null); } catch (e) { abortWithException(txn, e); } }; } storeEndToEndDeviceData(deviceData, txn) { const objectStore = txn.objectStore("device_data"); objectStore.put(deviceData, "-"); } storeEndToEndRoom(roomId, roomInfo, txn) { const objectStore = txn.objectStore("rooms"); objectStore.put(roomInfo, roomId); } getEndToEndRooms(txn, func) { const rooms = {}; const objectStore = txn.objectStore("rooms"); const getReq = objectStore.openCursor(); getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { rooms[cursor.key] = cursor.value; cursor.continue(); } else { try { func(rooms); } catch (e) { abortWithException(txn, e); } } }; } doTxn(mode, stores, func) { const txn = this._db.transaction(stores, mode); const promise = promiseifyTxn(txn); const result = func(txn); return promise.then(() => { return result; }); } } export function upgradeDatabase(db, oldVersion) { console.log( `Upgrading IndexedDBCryptoStore from version ${oldVersion}` + ` to ${VERSION}`, ); if (oldVersion < 1) { // The database did not previously exist. createDatabase(db); } if (oldVersion < 2) { db.createObjectStore("account"); } if (oldVersion < 3) { const sessionsStore = db.createObjectStore("sessions", { keyPath: ["deviceKey", "sessionId"], }); sessionsStore.createIndex("deviceKey", "deviceKey"); } if (oldVersion < 4) { db.createObjectStore("inbound_group_sessions", { keyPath: ["senderCurve25519Key", "sessionId"], }); } if (oldVersion < 5) { db.createObjectStore("device_data"); } if (oldVersion < 6) { db.createObjectStore("rooms"); } // Expand as needed. } function createDatabase(db) { const outgoingRoomKeyRequestsStore = db.createObjectStore("outgoingRoomKeyRequests", { keyPath: "requestId" }); // we assume that the RoomKeyRequestBody will have room_id and session_id // properties, to make the index efficient. outgoingRoomKeyRequestsStore.createIndex("session", ["requestBody.room_id", "requestBody.session_id"], ); outgoingRoomKeyRequestsStore.createIndex("state", "state"); } /* * Aborts a transaction with a given exception * The transaction promise will be rejected with this exception. */ function abortWithException(txn, e) { // We cheekily stick our exception onto the transaction object here // We could alternatively make the thing we pass back to the app // an object containing the transaction and exception. txn._mx_abortexception = e; try { txn.abort(); } catch (e) { // sometimes we won't be able to abort the transaction // (ie. if it's aborted or completed) } } function promiseifyTxn(txn) { return new Promise((resolve, reject) => { txn.oncomplete = () => { if (txn._mx_abortexception !== undefined) { reject(txn._mx_abortexception); } resolve(); }; txn.onerror = () => { if (txn._mx_abortexception !== undefined) { reject(txn._mx_abortexception); } reject(); }; txn.onabort = () => reject(txn._mx_abortexception); }); }
src/crypto/store/indexeddb-crypto-store-backend.js
/* Copyright 2017 Vector Creations Ltd Copyright 2018 New Vector Ltd Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import Promise from 'bluebird'; import utils from '../../utils'; export const VERSION = 6; /** * Implementation of a CryptoStore which is backed by an existing * IndexedDB connection. Generally you want IndexedDBCryptoStore * which connects to the database and defers to one of these. * * @implements {module:crypto/store/base~CryptoStore} */ export class Backend { /** * @param {IDBDatabase} db */ constructor(db) { this._db = db; // make sure we close the db on `onversionchange` - otherwise // attempts to delete the database will block (and subsequent // attempts to re-create it will also block). db.onversionchange = (ev) => { console.log(`versionchange for indexeddb ${this._dbName}: closing`); db.close(); }; } /** * Look for an existing outgoing room key request, and if none is found, * add a new one * * @param {module:crypto/store/base~OutgoingRoomKeyRequest} request * * @returns {Promise} resolves to * {@link module:crypto/store/base~OutgoingRoomKeyRequest}: either the * same instance as passed in, or the existing one. */ getOrAddOutgoingRoomKeyRequest(request) { const requestBody = request.requestBody; const deferred = Promise.defer(); const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); txn.onerror = deferred.reject; // first see if we already have an entry for this request. this._getOutgoingRoomKeyRequest(txn, requestBody, (existing) => { if (existing) { // this entry matches the request - return it. console.log( `already have key request outstanding for ` + `${requestBody.room_id} / ${requestBody.session_id}: ` + `not sending another`, ); deferred.resolve(existing); return; } // we got to the end of the list without finding a match // - add the new request. console.log( `enqueueing key request for ${requestBody.room_id} / ` + requestBody.session_id, ); txn.oncomplete = () => { deferred.resolve(request); }; const store = txn.objectStore("outgoingRoomKeyRequests"); store.add(request); }); return deferred.promise; } /** * Look for an existing room key request * * @param {module:crypto~RoomKeyRequestBody} requestBody * existing request to look for * * @return {Promise} resolves to the matching * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * not found */ getOutgoingRoomKeyRequest(requestBody) { const deferred = Promise.defer(); const txn = this._db.transaction("outgoingRoomKeyRequests", "readonly"); txn.onerror = deferred.reject; this._getOutgoingRoomKeyRequest(txn, requestBody, (existing) => { deferred.resolve(existing); }); return deferred.promise; } /** * look for an existing room key request in the db * * @private * @param {IDBTransaction} txn database transaction * @param {module:crypto~RoomKeyRequestBody} requestBody * existing request to look for * @param {Function} callback function to call with the results of the * search. Either passed a matching * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * not found. */ _getOutgoingRoomKeyRequest(txn, requestBody, callback) { const store = txn.objectStore("outgoingRoomKeyRequests"); const idx = store.index("session"); const cursorReq = idx.openCursor([ requestBody.room_id, requestBody.session_id, ]); cursorReq.onsuccess = (ev) => { const cursor = ev.target.result; if(!cursor) { // no match found callback(null); return; } const existing = cursor.value; if (utils.deepCompare(existing.requestBody, requestBody)) { // got a match callback(existing); return; } // look at the next entry in the index cursor.continue(); }; } /** * Look for room key requests by state * * @param {Array<Number>} wantedStates list of acceptable states * * @return {Promise} resolves to the a * {@link module:crypto/store/base~OutgoingRoomKeyRequest}, or null if * there are no pending requests in those states. If there are multiple * requests in those states, an arbitrary one is chosen. */ getOutgoingRoomKeyRequestByState(wantedStates) { if (wantedStates.length === 0) { return Promise.resolve(null); } // this is a bit tortuous because we need to make sure we do the lookup // in a single transaction, to avoid having a race with the insertion // code. // index into the wantedStates array let stateIndex = 0; let result; function onsuccess(ev) { const cursor = ev.target.result; if (cursor) { // got a match result = cursor.value; return; } // try the next state in the list stateIndex++; if (stateIndex >= wantedStates.length) { // no matches return; } const wantedState = wantedStates[stateIndex]; const cursorReq = ev.target.source.openCursor(wantedState); cursorReq.onsuccess = onsuccess; } const txn = this._db.transaction("outgoingRoomKeyRequests", "readonly"); const store = txn.objectStore("outgoingRoomKeyRequests"); const wantedState = wantedStates[stateIndex]; const cursorReq = store.index("state").openCursor(wantedState); cursorReq.onsuccess = onsuccess; return promiseifyTxn(txn).then(() => result); } /** * Look for an existing room key request by id and state, and update it if * found * * @param {string} requestId ID of request to update * @param {number} expectedState state we expect to find the request in * @param {Object} updates name/value map of updates to apply * * @returns {Promise} resolves to * {@link module:crypto/store/base~OutgoingRoomKeyRequest} * updated request, or null if no matching row was found */ updateOutgoingRoomKeyRequest(requestId, expectedState, updates) { let result = null; function onsuccess(ev) { const cursor = ev.target.result; if (!cursor) { return; } const data = cursor.value; if (data.state != expectedState) { console.warn( `Cannot update room key request from ${expectedState} ` + `as it was already updated to ${data.state}`, ); return; } Object.assign(data, updates); cursor.update(data); result = data; } const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); const cursorReq = txn.objectStore("outgoingRoomKeyRequests") .openCursor(requestId); cursorReq.onsuccess = onsuccess; return promiseifyTxn(txn).then(() => result); } /** * Look for an existing room key request by id and state, and delete it if * found * * @param {string} requestId ID of request to update * @param {number} expectedState state we expect to find the request in * * @returns {Promise} resolves once the operation is completed */ deleteOutgoingRoomKeyRequest(requestId, expectedState) { const txn = this._db.transaction("outgoingRoomKeyRequests", "readwrite"); const cursorReq = txn.objectStore("outgoingRoomKeyRequests") .openCursor(requestId); cursorReq.onsuccess = (ev) => { const cursor = ev.target.result; if (!cursor) { return; } const data = cursor.value; if (data.state != expectedState) { console.warn( `Cannot delete room key request in state ${data.state} ` + `(expected ${expectedState})`, ); return; } cursor.delete(); }; return promiseifyTxn(txn); } // Olm Account getAccount(txn, func) { const objectStore = txn.objectStore("account"); const getReq = objectStore.get("-"); getReq.onsuccess = function() { try { func(getReq.result || null); } catch (e) { abortWithException(txn, e); } }; } storeAccount(txn, newData) { const objectStore = txn.objectStore("account"); objectStore.put(newData, "-"); } // Olm Sessions countEndToEndSessions(txn, func) { const objectStore = txn.objectStore("sessions"); const countReq = objectStore.count(); countReq.onsuccess = function() { func(countReq.result); }; } getEndToEndSessions(deviceKey, txn, func) { const objectStore = txn.objectStore("sessions"); const idx = objectStore.index("deviceKey"); const getReq = idx.openCursor(deviceKey); const results = {}; getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { results[cursor.value.sessionId] = cursor.value.session; cursor.continue(); } else { try { func(results); } catch (e) { abortWithException(txn, e); } } }; } getEndToEndSession(deviceKey, sessionId, txn, func) { const objectStore = txn.objectStore("sessions"); const getReq = objectStore.get([deviceKey, sessionId]); getReq.onsuccess = function() { try { if (getReq.result) { func(getReq.result.session); } else { func(null); } } catch (e) { abortWithException(txn, e); } }; } storeEndToEndSession(deviceKey, sessionId, session, txn) { const objectStore = txn.objectStore("sessions"); objectStore.put({deviceKey, sessionId, session}); } // Inbound group sessions getEndToEndInboundGroupSession(senderCurve25519Key, sessionId, txn, func) { const objectStore = txn.objectStore("inbound_group_sessions"); const getReq = objectStore.get([senderCurve25519Key, sessionId]); getReq.onsuccess = function() { try { if (getReq.result) { func(getReq.result.session); } else { func(null); } } catch (e) { abortWithException(txn, e); } }; } getAllEndToEndInboundGroupSessions(txn, func) { const objectStore = txn.objectStore("inbound_group_sessions"); const getReq = objectStore.openCursor(); getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { try { func({ senderKey: cursor.value.senderCurve25519Key, sessionId: cursor.value.sessionId, sessionData: cursor.value.session, }); } catch (e) { abortWithException(txn, e); } cursor.continue(); } else { try { func(null); } catch (e) { abortWithException(txn, e); } } }; } addEndToEndInboundGroupSession(senderCurve25519Key, sessionId, sessionData, txn) { const objectStore = txn.objectStore("inbound_group_sessions"); const addReq = objectStore.add({ senderCurve25519Key, sessionId, session: sessionData, }); addReq.onerror = () => { abortWithException(txn, new Error( "Failed to add inbound group session - session may already exist: " + addReq.error, )); }; } storeEndToEndInboundGroupSession(senderCurve25519Key, sessionId, sessionData, txn) { const objectStore = txn.objectStore("inbound_group_sessions"); objectStore.put({ senderCurve25519Key, sessionId, session: sessionData, }); } getEndToEndDeviceData(txn, func) { const objectStore = txn.objectStore("device_data"); const getReq = objectStore.get("-"); getReq.onsuccess = function() { try { func(getReq.result || null); } catch (e) { abortWithException(txn, e); } }; } storeEndToEndDeviceData(deviceData, txn) { const objectStore = txn.objectStore("device_data"); objectStore.put(deviceData, "-"); } storeEndToEndRoom(roomId, roomInfo, txn) { const objectStore = txn.objectStore("rooms"); objectStore.put(roomInfo, roomId); } getEndToEndRooms(txn, func) { const rooms = {}; const objectStore = txn.objectStore("rooms"); const getReq = objectStore.openCursor(); getReq.onsuccess = function() { const cursor = getReq.result; if (cursor) { rooms[cursor.key] = cursor.value; cursor.continue(); } else { try { func(rooms); } catch (e) { abortWithException(txn, e); } } }; } doTxn(mode, stores, func) { const txn = this._db.transaction(stores, mode); const promise = promiseifyTxn(txn); const result = func(txn); return promise.then(() => { return result; }); } } export function upgradeDatabase(db, oldVersion) { console.log( `Upgrading IndexedDBCryptoStore from version ${oldVersion}` + ` to ${VERSION}`, ); if (oldVersion < 1) { // The database did not previously exist. createDatabase(db); } if (oldVersion < 2) { db.createObjectStore("account"); } if (oldVersion < 3) { const sessionsStore = db.createObjectStore("sessions", { keyPath: ["deviceKey", "sessionId"], }); sessionsStore.createIndex("deviceKey", "deviceKey"); } if (oldVersion < 4) { db.createObjectStore("inbound_group_sessions", { keyPath: ["senderCurve25519Key", "sessionId"], }); } if (oldVersion < 5) { db.createObjectStore("device_data"); } if (oldVersion < 6) { db.createObjectStore("rooms"); } // Expand as needed. } function createDatabase(db) { const outgoingRoomKeyRequestsStore = db.createObjectStore("outgoingRoomKeyRequests", { keyPath: "requestId" }); // we assume that the RoomKeyRequestBody will have room_id and session_id // properties, to make the index efficient. outgoingRoomKeyRequestsStore.createIndex("session", ["requestBody.room_id", "requestBody.session_id"], ); outgoingRoomKeyRequestsStore.createIndex("state", "state"); } /* * Aborts a transaction with a given exception * The transaction promise will be rejected with this exception. */ function abortWithException(txn, e) { // We cheekily stick our exception onto the transaction object here // We could alternatively make the thing we pass back to the app // an object containing the transaction and exception. txn._mx_abortexception = e; try { txn.abort(); } catch (e) { // sometimes we won't be able to abort the transaction // (ie. if it's aborted or completed) } } function promiseifyTxn(txn) { return new Promise((resolve, reject) => { txn.oncomplete = () => { if (txn._mx_abortexception !== undefined) { reject(txn._mx_abortexception); } resolve(); }; txn.onerror = () => { if (txn._mx_abortexception !== undefined) { reject(txn._mx_abortexception); } reject(); }; txn.onabort = () => reject(txn._mx_abortexception); }); }
Ignore inserts of duplicate inbound group sessions Rather than aborting the whole txn This is causing e2e migration fails in the wild
src/crypto/store/indexeddb-crypto-store-backend.js
Ignore inserts of duplicate inbound group sessions
<ide><path>rc/crypto/store/indexeddb-crypto-store-backend.js <ide> senderCurve25519Key, sessionId, session: sessionData, <ide> }); <ide> addReq.onerror = () => { <del> abortWithException(txn, new Error( <del> "Failed to add inbound group session - session may already exist: " + <del> addReq.error, <del> )); <add> if (addReq.name === 'ConstraintError') { <add> console.log( <add> "Ignoring duplicate inbound group session: " + <add> senderCurve25519Key + " / " + sessionId, <add> ); <add> } else { <add> abortWithException(txn, new Error( <add> "Failed to add inbound group session" + addReq.error, <add> )); <add> } <ide> }; <ide> } <ide>
JavaScript
mit
bb549b24d9d5e12dffb9a7a27573dd5d95446a6d
0
Themacprod/sshcmd
/* global module:true */ "use strict"; var React = require("react"), _ = require("lodash"), request = require("superagent"); module.exports = React.createClass({ getConsecutiveOffsetChunk: function() { var difference = -1, temp = [], result = []; _.forEach(this.props.data, function(data, index) { if (difference !== (data.offset - index)) { if (difference !== -1) { result.push(temp); temp = []; } difference = data.offset - index; } temp.push(data.offset); }); if (temp.length) { result.push(temp); } // From [0,1,2] [4,5] [7] get [0,3] [4,2] [7,1] return _.map(result, function(chunk) { return { startoffset: chunk[0], count: chunk.length }; }); }, toHexadecimal: function(integer) { if (integer < 16) { return "0x0" + integer.toString(16).toUpperCase(); } return "0x" + integer.toString(16).toUpperCase(); }, createLine: function(data, index) { return React.DOM.tbody( { key: data.offset }, React.DOM.tr( null, React.DOM.th( { className: "offset", scope: "row" }, this.toHexadecimal(data.offset) ), React.DOM.td( null, data.name ), React.DOM.td( { className: "value" }, this.state.readData[index] ) ) ); }, readData: function(data) { request .post("/api/i2cread/") .send({ ip: this.props.ip, cmd: "I2cRead", bus: this.props.bus, address: this.props.address, offsetsize: this.props.offsetsize, startoffset: data.startoffset, datacount: data.count }) .end(function(err, res) { if (err) { console.log("Read failed!"); } else { var startIndex = _.findIndex(this.props.data, function(o) { return o.offset === data.startoffset; }); for (var i = 0; i < data.count; i += 1) { this.state.readData[startIndex + i] = res.body.data[i]; } this.forceUpdate(); } }.bind(this)); }, componentDidMount: function() { _.forEach(this.getConsecutiveOffsetChunk(), function(data) { this.readData(data); }.bind(this)); }, getInitialState: function() { return { readData: _.fill(Array(this.props.data.length), "-") }; }, render: function() { return React.DOM.div( null, React.DOM.div( { className: "registerlist" }, React.DOM.table( { className: "table table-sm table-bordered" }, React.DOM.thead( null, React.DOM.tr( { className: "table-dark" }, React.DOM.th( null, "Offset" ), React.DOM.th( null, "Name" ), React.DOM.th( null, "Value" ) ) ), _.map(this.props.data, function(data, index) { return this.createLine(data, index); }.bind(this)) ) ) ); } });
react_components/register.js
/* global module:true */ "use strict"; var React = require("react"), _ = require("lodash"), request = require("superagent"); module.exports = React.createClass({ getConsecutiveOffsetChunk: function() { var difference = -1, temp = [], result = []; _.forEach(this.props.data, function(data, index) { if (difference !== (data.offset - index)) { if (difference !== -1) { result.push(temp); temp = []; } difference = data.offset - index; } temp.push(data.offset); }); if (temp.length) { result.push(temp); } // From [0,1,2] [4,5] [7] get [0,3] [4,2] [7,1] return _.map(result, function(chunk) { return { startoffset: chunk[0], count: chunk.length }; }); }, toHexadecimal: function(integer) { if (integer < 16) { return "0x0" + integer.toString(16).toUpperCase(); } return "0x" + integer.toString(16).toUpperCase(); }, createLine: function(data, index) { return React.DOM.tbody( { key: data.offset }, React.DOM.tr( null, React.DOM.th( { className: "offset", scope: "row" }, this.toHexadecimal(data.offset) ), React.DOM.td( null, data.name ), React.DOM.td( { className: "value" }, this.state.readData[index] ) ) ); }, readData: function(data) { request .post("/api/i2cread/") .send({ ip: this.props.ip, cmd: "I2cRead", bus: this.props.bus, address: this.props.address, offsetsize: this.props.offsetsize, startoffset: data.startoffset, datacount: data.count }) .end(function(err, res) { if (err) { console.log("Read failed!"); } else { var startIndex = _.findIndex(this.props.data, function(o) { return o.offset === data.startoffset; }); for (var i = 0; i < data.count; i += 1) { this.state.readData[startIndex + i] = res.body.data[i]; } this.forceUpdate(); } }.bind(this)); }, componentDidMount: function() { _.forEach(this.getConsecutiveOffsetChunk(), function(data) { this.readData(data); }.bind(this)); }, getInitialState: function() { return { readData: _.fill(Array(this.props.data.length), "-") }; }, render: function() { return React.DOM.div( null, React.DOM.div( { className: "registerlist" }, React.DOM.table( { className: "table table-sm table-bordered" }, React.DOM.thead( null, React.DOM.tr( { className: "table-dark" }, React.DOM.th( null, "Offset" ), React.DOM.th( null, "Name" ), React.DOM.th( null, "Value" ) ) ), _.map(this.props.data, function(data, index) { return this.createLine(data, index); }.bind(this)) ) ) ); } });
Beautify code
react_components/register.js
Beautify code
<ide><path>eact_components/register.js <ide> .post("/api/i2cread/") <ide> .send({ <ide> ip: this.props.ip, <del> cmd: "I2cRead", <add> cmd: "I2cRead", <ide> bus: this.props.bus, <ide> address: this.props.address, <ide> offsetsize: this.props.offsetsize, <ide> startoffset: data.startoffset, <del> datacount: data.count <del> }) <add> datacount: data.count <add> }) <ide> .end(function(err, res) { <ide> if (err) { <ide> console.log("Read failed!");
Java
apache-2.0
f3d53d77ed242ab03bdfb44cbd41fe66cfab9f11
0
UAK-35/wro4j,dacofr/wro4j,dacofr/wro4j,wro4j/wro4j,UAK-35/wro4j,UAK-35/wro4j,wro4j/wro4j,dacofr/wro4j,dacofr/wro4j,wro4j/wro4j,UAK-35/wro4j
/* * Copyright (c) 2008. All rights reserved. */ package ro.isdc.wro.manager; import java.beans.PropertyChangeListener; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.isdc.wro.WroRuntimeException; import ro.isdc.wro.cache.CacheEntry; import ro.isdc.wro.cache.CacheStrategy; import ro.isdc.wro.cache.ContentHashEntry; import ro.isdc.wro.config.Context; import ro.isdc.wro.config.WroConfigurationChangeListener; import ro.isdc.wro.config.jmx.WroConfiguration; import ro.isdc.wro.http.HttpHeader; import ro.isdc.wro.http.UnauthorizedRequestException; import ro.isdc.wro.manager.callback.LifecycleCallbackRegistry; import ro.isdc.wro.model.WroModel; import ro.isdc.wro.model.factory.WroModelFactory; import ro.isdc.wro.model.factory.WroModelFactoryDecorator; import ro.isdc.wro.model.group.Group; import ro.isdc.wro.model.group.GroupExtractor; import ro.isdc.wro.model.group.Inject; import ro.isdc.wro.model.group.processor.GroupsProcessor; import ro.isdc.wro.model.resource.ResourceType; import ro.isdc.wro.model.resource.locator.factory.UriLocatorFactory; import ro.isdc.wro.model.resource.processor.ProcessorsUtils; import ro.isdc.wro.model.resource.processor.factory.ProcessorsFactory; import ro.isdc.wro.model.resource.processor.impl.css.CssUrlRewritingProcessor; import ro.isdc.wro.model.resource.util.HashBuilder; import ro.isdc.wro.model.resource.util.NamingStrategy; import ro.isdc.wro.util.DestroyableLazyInitializer; import ro.isdc.wro.util.SchedulerHelper; import ro.isdc.wro.util.StopWatch; import ro.isdc.wro.util.WroUtil; /** * Contains all the factories used by optimizer in order to perform the logic. * * @author Alex Objelean * @created Created on Oct 30, 2008 */ public class WroManager implements WroConfigurationChangeListener, CacheChangeCallbackAware { private static final Logger LOG = LoggerFactory.getLogger(WroManager.class); /** * ResourcesModel factory. */ WroModelFactory modelFactory; /** * GroupExtractor. */ private GroupExtractor groupExtractor; /** * HashBuilder for creating a hash based on the processed content. */ private HashBuilder hashBuilder; /** * A cacheStrategy used for caching processed results. <GroupName, processed result>. */ CacheStrategy<CacheEntry, ContentHashEntry> cacheStrategy; /** * A callback to be notified about the cache change. */ PropertyChangeListener cacheChangeListener; /** * Schedules the cache update. */ private final SchedulerHelper cacheSchedulerHelper; /** * Schedules the model update. */ private final SchedulerHelper modelSchedulerHelper; private ProcessorsFactory processorsFactory; private UriLocatorFactory uriLocatorFactory; /** * Rename the file name based on its original name and content. */ private NamingStrategy namingStrategy; @Inject private LifecycleCallbackRegistry callbackRegistry; @Inject private GroupsProcessor groupsProcessor; public WroManager() { cacheSchedulerHelper = SchedulerHelper.create(new DestroyableLazyInitializer<Runnable>() { @Override protected Runnable initialize() { return new ReloadCacheRunnable(WroManager.this); } }, ReloadCacheRunnable.class.getSimpleName()); modelSchedulerHelper = SchedulerHelper.create(new DestroyableLazyInitializer<Runnable>() { @Override protected Runnable initialize() { return new ReloadModelRunnable(WroManager.this); } }, ReloadModelRunnable.class.getSimpleName()); } /** * Perform processing of the uri. * * @throws IOException when any IO related problem occurs or if the request cannot be processed. */ public final void process() throws IOException { validate(); if (isProxyResourceRequest()) { serveProxyResourceRequest(); } else { serveProcessedBundle(); } } /** * Check if this is a request for a proxy resource - a resource which url is overwritten by wro4j. */ private boolean isProxyResourceRequest() { final HttpServletRequest request = Context.get().getRequest(); return request != null && StringUtils.contains(request.getRequestURI(), CssUrlRewritingProcessor.PATH_RESOURCES); } private boolean isGzipAllowed() { return Context.get().getConfig().isGzipEnabled() && isGzipSupported(); } /** * Write to stream the content of the processed resource bundle. * * @param model the model used to build stream. */ private void serveProcessedBundle() throws IOException { final HttpServletRequest request = Context.get().getRequest(); final HttpServletResponse response = Context.get().getResponse(); OutputStream os = null; try { // find names & type final ResourceType type = groupExtractor.getResourceType(request); final String groupName = groupExtractor.getGroupName(request); final boolean minimize = groupExtractor.isMinimized(request); if (groupName == null || type == null) { throw new WroRuntimeException("No groups found for request: " + request.getRequestURI()); } initAggregatedFolderPath(request, type); // reschedule cache & model updates final WroConfiguration config = Context.get().getConfig(); cacheSchedulerHelper.scheduleWithPeriod(config.getCacheUpdatePeriod()); modelSchedulerHelper.scheduleWithPeriod(config.getModelUpdatePeriod()); final ContentHashEntry contentHashEntry = getContentHashEntry(groupName, type, minimize); // TODO move ETag check in wroManagerFactory final String ifNoneMatch = request.getHeader(HttpHeader.IF_NONE_MATCH.toString()); // enclose etag value in quotes to be compliant with the RFC final String etagValue = String.format("\"%s\"", contentHashEntry.getHash()); if (etagValue != null && etagValue.equals(ifNoneMatch)) { LOG.debug("ETag hash detected: {}. Sending {} status code", etagValue, HttpServletResponse.SC_NOT_MODIFIED); response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); // because we cannot return null, return a stream containing nothing. // TODO close output stream? return; } /** * Set contentType before actual content is written, solves <br/> * <a href="http://code.google.com/p/wro4j/issues/detail?id=341">issue341</a> */ if (type != null) { response.setContentType(type.getContentType() + "; charset=" + Context.get().getConfig().getEncoding()); } // set ETag header response.setHeader(HttpHeader.ETAG.toString(), etagValue); os = response.getOutputStream(); if (contentHashEntry.getRawContent() != null) { // Do not set content length because we don't know the length in case it is gzipped. This could cause an // unnecessary overhead caused by some browsers which wait for the rest of the content-length until timeout. // make the input stream encoding aware. // use gziped response if supported if (isGzipAllowed()) { // add gzip header and gzip response response.setHeader(HttpHeader.CONTENT_ENCODING.toString(), "gzip"); response.setHeader("Vary", "Accept-Encoding"); IOUtils.write(contentHashEntry.getGzippedContent(), os); } else { IOUtils.write(contentHashEntry.getRawContent(), os); } } } finally { if(os != null) IOUtils.closeQuietly(os); } } /** * Set the aggregatedFolderPath if required. */ private void initAggregatedFolderPath(final HttpServletRequest request, final ResourceType type) { if (ResourceType.CSS == type && Context.get().getAggregatedFolderPath() == null) { final String requestUri = request.getRequestURI(); final String cssFolder = StringUtils.removeEnd(requestUri, FilenameUtils.getName(requestUri)); final String aggregatedFolder = StringUtils.removeStart(cssFolder, request.getContextPath()); Context.get().setAggregatedFolderPath(aggregatedFolder); } } /** * Encodes a fingerprint of the resource into the path. The result may look like this: ${fingerprint}/myGroup.js * * @return a path to the resource with the fingerprint encoded as a folder name. */ public final String encodeVersionIntoGroupPath(final String groupName, final ResourceType resourceType, final boolean minimize) { try { final ContentHashEntry contentHashEntry = getContentHashEntry(groupName, resourceType, minimize); final String groupUrl = groupExtractor.encodeGroupUrl(groupName, resourceType, minimize); // encode the fingerprint of the resource into the resource path return formatVersionedResource(contentHashEntry.getHash(), groupUrl); } catch (final IOException e) { return ""; } } /** * Format the version of the resource in the path. Default implementation use hash as a folder: <hash>/groupName.js. * The implementation can be changed to follow a different versioning style, like version parameter: * /groupName.js?version=<hash> * * @param hash Hash of the resource. * @param resourcePath Path of the resource. * @return formatted versioned path of the resource. */ protected String formatVersionedResource(final String hash, final String resourcePath) { return String.format("%s/%s", hash, resourcePath); } /** * @return {@link ContentHashEntry} object. */ private ContentHashEntry getContentHashEntry(final String groupName, final ResourceType type, final boolean minimize) throws IOException { final CacheEntry cacheEntry = new CacheEntry(groupName, type, minimize); LOG.debug("Searching cache entry: {}", cacheEntry); // Cache based on uri ContentHashEntry contentHashEntry = cacheStrategy.get(cacheEntry); if (contentHashEntry == null) { LOG.debug("Cache is empty. Perform processing..."); // process groups & put result in the cache // find processed result for a group final WroModel model = modelFactory.create(); if (model == null) { throw new WroRuntimeException("Cannot build a valid wro model"); } final Group group = model.getGroupByName(groupName); final String content = groupsProcessor.process(group, type, minimize); contentHashEntry = getContentHashEntryByContent(content); if (!Context.get().getConfig().isDisableCache()) { cacheStrategy.put(cacheEntry, contentHashEntry); } } return contentHashEntry; } /** * Creates a {@link ContentHashEntry} based on provided content. */ ContentHashEntry getContentHashEntryByContent(final String content) throws IOException { String hash = null; if (content != null) { LOG.debug("Content to fingerprint: [{}]", StringUtils.abbreviate(content, 40)); hash = hashBuilder.getHash(new ByteArrayInputStream(content.getBytes())); } final ContentHashEntry entry = ContentHashEntry.valueOf(content, hash); LOG.debug("computed entry: {}", entry); return entry; } /** * Serve images and other external resources referred by bundled resources. * * @param request {@link HttpServletRequest} object. * @param outputStream where the stream will be written. * @throws IOException if no stream could be resolved. */ private void serveProxyResourceRequest() throws IOException { final HttpServletRequest request = Context.get().getRequest(); final OutputStream outputStream = Context.get().getResponse().getOutputStream(); final String resourceId = request.getParameter(CssUrlRewritingProcessor.PARAM_RESOURCE_ID); LOG.debug("locating stream for resourceId: {}", resourceId); final CssUrlRewritingProcessor processor = ProcessorsUtils.findPreProcessorByClass(CssUrlRewritingProcessor.class, processorsFactory.getPreProcessors()); if (processor != null && !processor.isUriAllowed(resourceId)) { throw new UnauthorizedRequestException("Unauthorized resource request detected! " + request.getRequestURI()); } final InputStream is = uriLocatorFactory.locate(resourceId); if (is == null) { throw new WroRuntimeException("Cannot process request with uri: " + request.getRequestURI()); } IOUtils.copy(is, outputStream); IOUtils.closeQuietly(is); IOUtils.closeQuietly(outputStream); } /** * {@inheritDoc} */ public final void onCachePeriodChanged(final long period) { LOG.info("onCachePeriodChanged with value {} has been triggered!", period); cacheSchedulerHelper.scheduleWithPeriod(period); // flush the cache by destroying it. cacheStrategy.clear(); } /** * {@inheritDoc} */ public final void onModelPeriodChanged(final long period) { LOG.info("onModelPeriodChanged with value {} has been triggered!", period); //trigger model destroy getModelFactory().destroy(); modelSchedulerHelper.scheduleWithPeriod(period); } /** * Called when {@link WroManager} is being taken out of service. */ public final void destroy() { try { cacheSchedulerHelper.destroy(); modelSchedulerHelper.destroy(); cacheStrategy.destroy(); modelFactory.destroy(); } catch (final Exception e) { LOG.error("Exception occured during manager destroy!!!"); } finally { LOG.info("WroManager destroyed"); } } /** * Check if all dependencies are set. */ private void validate() { Validate.notNull(cacheStrategy, "cacheStrategy was not set!"); Validate.notNull(groupsProcessor, "groupsProcessor was not set!"); Validate.notNull(uriLocatorFactory, "uriLocatorFactory was not set!"); Validate.notNull(processorsFactory, "processorsFactory was not set!"); Validate.notNull(groupExtractor, "GroupExtractor was not set!"); Validate.notNull(modelFactory, "ModelFactory was not set!"); Validate.notNull(cacheStrategy, "cacheStrategy was not set!"); Validate.notNull(hashBuilder, "HashBuilder was not set!"); } /** * {@inheritDoc} */ public final void registerCacheChangeListener(final PropertyChangeListener cacheChangeListener) { this.cacheChangeListener = cacheChangeListener; } /** * @return true if Gzip is Supported */ private boolean isGzipSupported() { return WroUtil.isGzipSupported(Context.get().getRequest()); } /** * @param groupExtractor the uriProcessor to set */ public final WroManager setGroupExtractor(final GroupExtractor groupExtractor) { Validate.notNull(groupExtractor); this.groupExtractor = groupExtractor; return this; } /** * @param modelFactory the modelFactory to set */ public final WroManager setModelFactory(final WroModelFactory modelFactory) { Validate.notNull(modelFactory); // decorate with callback registry call this.modelFactory = new WroModelFactoryDecorator(modelFactory) { @Override public WroModel create() { callbackRegistry.onBeforeModelCreated(); try { return super.create(); } finally { callbackRegistry.onAfterModelCreated(); } } }; return this; } /** * @param cacheStrategy the cache to set */ public final WroManager setCacheStrategy(final CacheStrategy<CacheEntry, ContentHashEntry> cacheStrategy) { Validate.notNull(cacheStrategy); this.cacheStrategy = cacheStrategy; return this; } /** * @param contentDigester the contentDigester to set */ public WroManager setHashBuilder(final HashBuilder contentDigester) { Validate.notNull(contentDigester); this.hashBuilder = contentDigester; return this; } /** * @return the modelFactory */ public WroModelFactory getModelFactory() { return modelFactory; } /** * @return the processorsFactory used by this WroManager. */ public ProcessorsFactory getProcessorsFactory() { return processorsFactory; } /** * @param processorsFactory the processorsFactory to set */ public WroManager setProcessorsFactory(final ProcessorsFactory processorsFactory) { this.processorsFactory = processorsFactory; return this; } /** * @param uriLocatorFactory the uriLocatorFactory to set */ public WroManager setUriLocatorFactory(final UriLocatorFactory uriLocatorFactory) { this.uriLocatorFactory = uriLocatorFactory; return this; } /** * @return the cacheStrategy */ public CacheStrategy<CacheEntry, ContentHashEntry> getCacheStrategy() { return cacheStrategy; } /** * @return the uriLocatorFactory */ public UriLocatorFactory getUriLocatorFactory() { return uriLocatorFactory; } /** * * @return The strategy used to rename bundled resources. */ public final NamingStrategy getNamingStrategy() { return this.namingStrategy; } GroupsProcessor getGroupsProcessor() { return this.groupsProcessor; } /** * @return the holder of registered callbacks. Use it to register custom callbacks. */ public LifecycleCallbackRegistry getCallbackRegistry() { return callbackRegistry; } /** * @param namingStrategy the namingStrategy to set */ public final WroManager setNamingStrategy(final NamingStrategy namingStrategy) { Validate.notNull(namingStrategy); this.namingStrategy = namingStrategy; return this; } /** * {@inheritDoc} */ @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } }
wro4j-core/src/main/java/ro/isdc/wro/manager/WroManager.java
/* * Copyright (c) 2008. All rights reserved. */ package ro.isdc.wro.manager; import java.beans.PropertyChangeListener; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.isdc.wro.WroRuntimeException; import ro.isdc.wro.cache.CacheEntry; import ro.isdc.wro.cache.CacheStrategy; import ro.isdc.wro.cache.ContentHashEntry; import ro.isdc.wro.config.Context; import ro.isdc.wro.config.WroConfigurationChangeListener; import ro.isdc.wro.config.jmx.WroConfiguration; import ro.isdc.wro.http.HttpHeader; import ro.isdc.wro.http.UnauthorizedRequestException; import ro.isdc.wro.manager.callback.LifecycleCallbackRegistry; import ro.isdc.wro.model.WroModel; import ro.isdc.wro.model.factory.WroModelFactory; import ro.isdc.wro.model.factory.WroModelFactoryDecorator; import ro.isdc.wro.model.group.Group; import ro.isdc.wro.model.group.GroupExtractor; import ro.isdc.wro.model.group.Inject; import ro.isdc.wro.model.group.processor.GroupsProcessor; import ro.isdc.wro.model.resource.ResourceType; import ro.isdc.wro.model.resource.locator.factory.UriLocatorFactory; import ro.isdc.wro.model.resource.processor.ProcessorsUtils; import ro.isdc.wro.model.resource.processor.factory.ProcessorsFactory; import ro.isdc.wro.model.resource.processor.impl.css.CssUrlRewritingProcessor; import ro.isdc.wro.model.resource.util.HashBuilder; import ro.isdc.wro.model.resource.util.NamingStrategy; import ro.isdc.wro.util.DestroyableLazyInitializer; import ro.isdc.wro.util.SchedulerHelper; import ro.isdc.wro.util.StopWatch; import ro.isdc.wro.util.WroUtil; /** * Contains all the factories used by optimizer in order to perform the logic. * * @author Alex Objelean * @created Created on Oct 30, 2008 */ public class WroManager implements WroConfigurationChangeListener, CacheChangeCallbackAware { private static final Logger LOG = LoggerFactory.getLogger(WroManager.class); /** * ResourcesModel factory. */ WroModelFactory modelFactory; /** * GroupExtractor. */ private GroupExtractor groupExtractor; /** * HashBuilder for creating a hash based on the processed content. */ private HashBuilder hashBuilder; /** * A cacheStrategy used for caching processed results. <GroupName, processed result>. */ CacheStrategy<CacheEntry, ContentHashEntry> cacheStrategy; /** * A callback to be notified about the cache change. */ PropertyChangeListener cacheChangeListener; /** * Schedules the cache update. */ private final SchedulerHelper cacheSchedulerHelper; /** * Schedules the model update. */ private final SchedulerHelper modelSchedulerHelper; private ProcessorsFactory processorsFactory; private UriLocatorFactory uriLocatorFactory; /** * Rename the file name based on its original name and content. */ private NamingStrategy namingStrategy; @Inject private LifecycleCallbackRegistry callbackRegistry; @Inject private GroupsProcessor groupsProcessor; public WroManager() { cacheSchedulerHelper = SchedulerHelper.create(new DestroyableLazyInitializer<Runnable>() { @Override protected Runnable initialize() { return new ReloadCacheRunnable(WroManager.this); } }, ReloadCacheRunnable.class.getSimpleName()); modelSchedulerHelper = SchedulerHelper.create(new DestroyableLazyInitializer<Runnable>() { @Override protected Runnable initialize() { return new ReloadModelRunnable(WroManager.this); } }, ReloadModelRunnable.class.getSimpleName()); } /** * Perform processing of the uri. * * @throws IOException when any IO related problem occurs or if the request cannot be processed. */ public final void process() throws IOException { validate(); if (isProxyResourceRequest()) { serveProxyResourceRequest(); } else { serveProcessedBundle(); } } /** * Check if this is a request for a proxy resource - a resource which url is overwritten by wro4j. */ private boolean isProxyResourceRequest() { final HttpServletRequest request = Context.get().getRequest(); return request != null && StringUtils.contains(request.getRequestURI(), CssUrlRewritingProcessor.PATH_RESOURCES); } private boolean isGzipAllowed() { return Context.get().getConfig().isGzipEnabled() && isGzipSupported(); } /** * Write to stream the content of the processed resource bundle. * * @param model the model used to build stream. */ private void serveProcessedBundle() throws IOException { final HttpServletRequest request = Context.get().getRequest(); final HttpServletResponse response = Context.get().getResponse(); final OutputStream os = response.getOutputStream(); try { // find names & type final ResourceType type = groupExtractor.getResourceType(request); final String groupName = groupExtractor.getGroupName(request); final boolean minimize = groupExtractor.isMinimized(request); if (groupName == null || type == null) { throw new WroRuntimeException("No groups found for request: " + request.getRequestURI()); } initAggregatedFolderPath(request, type); // reschedule cache & model updates final WroConfiguration config = Context.get().getConfig(); cacheSchedulerHelper.scheduleWithPeriod(config.getCacheUpdatePeriod()); modelSchedulerHelper.scheduleWithPeriod(config.getModelUpdatePeriod()); final ContentHashEntry contentHashEntry = getContentHashEntry(groupName, type, minimize); // TODO move ETag check in wroManagerFactory final String ifNoneMatch = request.getHeader(HttpHeader.IF_NONE_MATCH.toString()); // enclose etag value in quotes to be compliant with the RFC final String etagValue = String.format("\"%s\"", contentHashEntry.getHash()); if (etagValue != null && etagValue.equals(ifNoneMatch)) { LOG.debug("ETag hash detected: {}. Sending {} status code", etagValue, HttpServletResponse.SC_NOT_MODIFIED); response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); // because we cannot return null, return a stream containing nothing. // TODO close output stream? return; } /** * Set contentType before actual content is written, solves <br/> * <a href="http://code.google.com/p/wro4j/issues/detail?id=341">issue341</a> */ if (type != null) { response.setContentType(type.getContentType() + "; charset=" + Context.get().getConfig().getEncoding()); } if (contentHashEntry.getRawContent() != null) { // Do not set content length because we don't know the length in case it is gzipped. This could cause an // unnecessary overhead caused by some browsers which wait for the rest of the content-length until timeout. // make the input stream encoding aware. // use gziped response if supported if (isGzipAllowed()) { // add gzip header and gzip response response.setHeader(HttpHeader.CONTENT_ENCODING.toString(), "gzip"); response.setHeader("Vary", "Accept-Encoding"); IOUtils.write(contentHashEntry.getGzippedContent(), os); } else { IOUtils.write(contentHashEntry.getRawContent(), os); } } // set ETag header response.setHeader(HttpHeader.ETAG.toString(), etagValue); } finally { IOUtils.closeQuietly(os); } } /** * Set the aggregatedFolderPath if required. */ private void initAggregatedFolderPath(final HttpServletRequest request, final ResourceType type) { if (ResourceType.CSS == type && Context.get().getAggregatedFolderPath() == null) { final String requestUri = request.getRequestURI(); final String cssFolder = StringUtils.removeEnd(requestUri, FilenameUtils.getName(requestUri)); final String aggregatedFolder = StringUtils.removeStart(cssFolder, request.getContextPath()); Context.get().setAggregatedFolderPath(aggregatedFolder); } } /** * Encodes a fingerprint of the resource into the path. The result may look like this: ${fingerprint}/myGroup.js * * @return a path to the resource with the fingerprint encoded as a folder name. */ public final String encodeVersionIntoGroupPath(final String groupName, final ResourceType resourceType, final boolean minimize) { try { final ContentHashEntry contentHashEntry = getContentHashEntry(groupName, resourceType, minimize); final String groupUrl = groupExtractor.encodeGroupUrl(groupName, resourceType, minimize); // encode the fingerprint of the resource into the resource path return formatVersionedResource(contentHashEntry.getHash(), groupUrl); } catch (final IOException e) { return ""; } } /** * Format the version of the resource in the path. Default implementation use hash as a folder: <hash>/groupName.js. * The implementation can be changed to follow a different versioning style, like version parameter: * /groupName.js?version=<hash> * * @param hash Hash of the resource. * @param resourcePath Path of the resource. * @return formatted versioned path of the resource. */ protected String formatVersionedResource(final String hash, final String resourcePath) { return String.format("%s/%s", hash, resourcePath); } /** * @return {@link ContentHashEntry} object. */ private ContentHashEntry getContentHashEntry(final String groupName, final ResourceType type, final boolean minimize) throws IOException { final CacheEntry cacheEntry = new CacheEntry(groupName, type, minimize); LOG.debug("Searching cache entry: {}", cacheEntry); // Cache based on uri ContentHashEntry contentHashEntry = cacheStrategy.get(cacheEntry); if (contentHashEntry == null) { LOG.debug("Cache is empty. Perform processing..."); // process groups & put result in the cache // find processed result for a group final WroModel model = modelFactory.create(); if (model == null) { throw new WroRuntimeException("Cannot build a valid wro model"); } final Group group = model.getGroupByName(groupName); final String content = groupsProcessor.process(group, type, minimize); contentHashEntry = getContentHashEntryByContent(content); if (!Context.get().getConfig().isDisableCache()) { cacheStrategy.put(cacheEntry, contentHashEntry); } } return contentHashEntry; } /** * Creates a {@link ContentHashEntry} based on provided content. */ ContentHashEntry getContentHashEntryByContent(final String content) throws IOException { String hash = null; if (content != null) { LOG.debug("Content to fingerprint: [{}]", StringUtils.abbreviate(content, 40)); hash = hashBuilder.getHash(new ByteArrayInputStream(content.getBytes())); } final ContentHashEntry entry = ContentHashEntry.valueOf(content, hash); LOG.debug("computed entry: {}", entry); return entry; } /** * Serve images and other external resources referred by bundled resources. * * @param request {@link HttpServletRequest} object. * @param outputStream where the stream will be written. * @throws IOException if no stream could be resolved. */ private void serveProxyResourceRequest() throws IOException { final HttpServletRequest request = Context.get().getRequest(); final OutputStream outputStream = Context.get().getResponse().getOutputStream(); final String resourceId = request.getParameter(CssUrlRewritingProcessor.PARAM_RESOURCE_ID); LOG.debug("locating stream for resourceId: {}", resourceId); final CssUrlRewritingProcessor processor = ProcessorsUtils.findPreProcessorByClass(CssUrlRewritingProcessor.class, processorsFactory.getPreProcessors()); if (processor != null && !processor.isUriAllowed(resourceId)) { throw new UnauthorizedRequestException("Unauthorized resource request detected! " + request.getRequestURI()); } final InputStream is = uriLocatorFactory.locate(resourceId); if (is == null) { throw new WroRuntimeException("Cannot process request with uri: " + request.getRequestURI()); } IOUtils.copy(is, outputStream); IOUtils.closeQuietly(is); IOUtils.closeQuietly(outputStream); } /** * {@inheritDoc} */ public final void onCachePeriodChanged(final long period) { LOG.info("onCachePeriodChanged with value {} has been triggered!", period); cacheSchedulerHelper.scheduleWithPeriod(period); // flush the cache by destroying it. cacheStrategy.clear(); } /** * {@inheritDoc} */ public final void onModelPeriodChanged(final long period) { LOG.info("onModelPeriodChanged with value {} has been triggered!", period); //trigger model destroy getModelFactory().destroy(); modelSchedulerHelper.scheduleWithPeriod(period); } /** * Called when {@link WroManager} is being taken out of service. */ public final void destroy() { try { cacheSchedulerHelper.destroy(); modelSchedulerHelper.destroy(); cacheStrategy.destroy(); modelFactory.destroy(); } catch (final Exception e) { LOG.error("Exception occured during manager destroy!!!"); } finally { LOG.info("WroManager destroyed"); } } /** * Check if all dependencies are set. */ private void validate() { Validate.notNull(cacheStrategy, "cacheStrategy was not set!"); Validate.notNull(groupsProcessor, "groupsProcessor was not set!"); Validate.notNull(uriLocatorFactory, "uriLocatorFactory was not set!"); Validate.notNull(processorsFactory, "processorsFactory was not set!"); Validate.notNull(groupExtractor, "GroupExtractor was not set!"); Validate.notNull(modelFactory, "ModelFactory was not set!"); Validate.notNull(cacheStrategy, "cacheStrategy was not set!"); Validate.notNull(hashBuilder, "HashBuilder was not set!"); } /** * {@inheritDoc} */ public final void registerCacheChangeListener(final PropertyChangeListener cacheChangeListener) { this.cacheChangeListener = cacheChangeListener; } /** * @return true if Gzip is Supported */ private boolean isGzipSupported() { return WroUtil.isGzipSupported(Context.get().getRequest()); } /** * @param groupExtractor the uriProcessor to set */ public final WroManager setGroupExtractor(final GroupExtractor groupExtractor) { Validate.notNull(groupExtractor); this.groupExtractor = groupExtractor; return this; } /** * @param modelFactory the modelFactory to set */ public final WroManager setModelFactory(final WroModelFactory modelFactory) { Validate.notNull(modelFactory); // decorate with callback registry call this.modelFactory = new WroModelFactoryDecorator(modelFactory) { @Override public WroModel create() { callbackRegistry.onBeforeModelCreated(); try { return super.create(); } finally { callbackRegistry.onAfterModelCreated(); } } }; return this; } /** * @param cacheStrategy the cache to set */ public final WroManager setCacheStrategy(final CacheStrategy<CacheEntry, ContentHashEntry> cacheStrategy) { Validate.notNull(cacheStrategy); this.cacheStrategy = cacheStrategy; return this; } /** * @param contentDigester the contentDigester to set */ public WroManager setHashBuilder(final HashBuilder contentDigester) { Validate.notNull(contentDigester); this.hashBuilder = contentDigester; return this; } /** * @return the modelFactory */ public WroModelFactory getModelFactory() { return modelFactory; } /** * @return the processorsFactory used by this WroManager. */ public ProcessorsFactory getProcessorsFactory() { return processorsFactory; } /** * @param processorsFactory the processorsFactory to set */ public WroManager setProcessorsFactory(final ProcessorsFactory processorsFactory) { this.processorsFactory = processorsFactory; return this; } /** * @param uriLocatorFactory the uriLocatorFactory to set */ public WroManager setUriLocatorFactory(final UriLocatorFactory uriLocatorFactory) { this.uriLocatorFactory = uriLocatorFactory; return this; } /** * @return the cacheStrategy */ public CacheStrategy<CacheEntry, ContentHashEntry> getCacheStrategy() { return cacheStrategy; } /** * @return the uriLocatorFactory */ public UriLocatorFactory getUriLocatorFactory() { return uriLocatorFactory; } /** * * @return The strategy used to rename bundled resources. */ public final NamingStrategy getNamingStrategy() { return this.namingStrategy; } GroupsProcessor getGroupsProcessor() { return this.groupsProcessor; } /** * @return the holder of registered callbacks. Use it to register custom callbacks. */ public LifecycleCallbackRegistry getCallbackRegistry() { return callbackRegistry; } /** * @param namingStrategy the namingStrategy to set */ public final WroManager setNamingStrategy(final NamingStrategy namingStrategy) { Validate.notNull(namingStrategy); this.namingStrategy = namingStrategy; return this; } /** * {@inheritDoc} */ @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } }
ETag must also set before any content, get the outputstream as late as possible
wro4j-core/src/main/java/ro/isdc/wro/manager/WroManager.java
ETag must also set before any content, get the outputstream as late as possible
<ide><path>ro4j-core/src/main/java/ro/isdc/wro/manager/WroManager.java <ide> final HttpServletRequest request = Context.get().getRequest(); <ide> final HttpServletResponse response = Context.get().getResponse(); <ide> <del> final OutputStream os = response.getOutputStream(); <add> OutputStream os = null; <ide> try { <ide> // find names & type <ide> final ResourceType type = groupExtractor.getResourceType(request); <ide> if (type != null) { <ide> response.setContentType(type.getContentType() + "; charset=" + Context.get().getConfig().getEncoding()); <ide> } <add> // set ETag header <add> response.setHeader(HttpHeader.ETAG.toString(), etagValue); <add> <add> os = response.getOutputStream(); <ide> if (contentHashEntry.getRawContent() != null) { <ide> // Do not set content length because we don't know the length in case it is gzipped. This could cause an <ide> // unnecessary overhead caused by some browsers which wait for the rest of the content-length until timeout. <ide> IOUtils.write(contentHashEntry.getRawContent(), os); <ide> } <ide> } <del> <del> // set ETag header <del> response.setHeader(HttpHeader.ETAG.toString(), etagValue); <ide> } finally { <del> IOUtils.closeQuietly(os); <add> if(os != null) <add> IOUtils.closeQuietly(os); <ide> } <ide> } <ide>
Java
mit
c44d2c710f82246228ee54c58362ffa95cfd7abf
0
paveyry/LyreLand,paveyry/LyreLand
package generation; import analysis.harmonic.ChordDegree; import training.probability.MarkovMatrix; import java.util.*; /** * Fixme : This class will later be use to generate real and complete bars * using the probabilities learnt from a given category ... * It is added now to highlight the future architecture of the generation of a Score. */ public class Rhythm { private ArrayList<ChordDegree> degrees_; private ArrayList<GeneratedNote> generatedNotes_; private HashMap<ArrayList<ChordDegree>, MarkovMatrix<Double>> rhythmMatrices_; private double barRhythmValue_; private Random generator_; public Rhythm(ArrayList<ChordDegree> degrees, HashMap<ArrayList<ChordDegree>, MarkovMatrix<Double>> rhythmMatrices, double barRhythmValue, Random generator) { this.degrees_ = degrees; this.generatedNotes_ = new ArrayList<>(); this.rhythmMatrices_ = rhythmMatrices; this.barRhythmValue_ = barRhythmValue; this.generator_ = generator; } public ArrayList<GeneratedNote> generateRhythms() { rhythmMatrices_.forEach((a, b) -> {if (b.getTransitionMatrix().size() == 0) System.out.println("BITE: " + a);}); ArrayList<ChordDegree> context = new ArrayList<>(Collections.nCopies(2, null)); ChordDegree degree = degrees_.get(0); context.add(degree); for (int i = 1; i <= degrees_.size(); i++) { double rhythmslength = barRhythmValue_ / degree.getBarFractionDen(); if (i < degrees_.size()) degree = degrees_.get(i); else degree = null; context.remove(0); context.add(degree); //System.out.println("Matrices:"); //System.out.println(context.get(0) + ", " + context.get(1) + ", " + context.get(2) + " : " + rhythmMatrices_.get(context)); ArrayList<Double> rhythms = getRhythms(rhythmMatrices_.get(context), rhythmslength); fillGeneratedNotes(generatedNotes_, context.get(1), rhythms); } return generatedNotes_; } private ArrayList<Double> getRhythms(MarkovMatrix<Double> rhythmMatrix, double rhythmslength) { double currentRhythmlength = 0; ArrayList<Double> rhythms = new ArrayList<>(); ArrayList<Double> context = new ArrayList<>(); context.add(null); int iter = 0; while (currentRhythmlength != rhythmslength) { Double newRhythm = 0.0; newRhythm = rhythmMatrix.getRandomValue(context, generator_); if (newRhythm == null) { rhythms.add(rhythmslength - currentRhythmlength); break; } context.remove(0); context.add(newRhythm); rhythms.add(newRhythm); currentRhythmlength += newRhythm; if (currentRhythmlength > rhythmslength) { if (iter > 1000) { System.out.println("Bad fix"); newRhythm = rhythms.get(rhythms.size() - 1); rhythms.remove(rhythms.size() - 1); newRhythm = newRhythm - (currentRhythmlength - rhythmslength); rhythms.add(newRhythm); break; } else { currentRhythmlength = 0; rhythms.clear(); context.clear(); context.add(null); iter++; } } } return rhythms; } private static void fillGeneratedNotes(ArrayList<GeneratedNote> generatedNotes, ChordDegree degree, ArrayList<Double> rhythms) { rhythms.forEach(d -> generatedNotes.add(new GeneratedNote(degree, 1, d))); } }
src/generation/Rhythm.java
package generation; import analysis.harmonic.ChordDegree; import training.probability.MarkovMatrix; import java.util.*; /** * Fixme : This class will later be use to generate real and complete bars * using the probabilities learnt from a given category ... * It is added now to highlight the future architecture of the generation of a Score. */ public class Rhythm { private ArrayList<ChordDegree> degrees_; private ArrayList<GeneratedNote> generatedNotes_; private HashMap<ArrayList<ChordDegree>, MarkovMatrix<Double>> rhythmMatrices_; private double barRhythmValue_; private Random generator_; public Rhythm(ArrayList<ChordDegree> degrees, HashMap<ArrayList<ChordDegree>, MarkovMatrix<Double>> rhythmMatrices, double barRhythmValue, Random generator) { this.degrees_ = degrees; this.generatedNotes_ = new ArrayList<>(); this.rhythmMatrices_ = rhythmMatrices; this.barRhythmValue_ = barRhythmValue; this.generator_ = generator; } public ArrayList<GeneratedNote> generateRhythms() { rhythmMatrices_.forEach((a, b) -> {if (b.getTransitionMatrix().size() == 0) System.out.println("BITE: " + a);}); ArrayList<ChordDegree> context = new ArrayList<>(Collections.nCopies(2, null)); ChordDegree degree = degrees_.get(0); context.add(degree); for (int i = 1; i <= degrees_.size(); i++) { double rhythmslength = barRhythmValue_ / degree.getBarFractionDen(); if (i < degrees_.size()) degree = degrees_.get(i); else degree = null; context.remove(0); context.add(degree); //System.out.println("Matrices:"); //System.out.println(context.get(0) + ", " + context.get(1) + ", " + context.get(2) + " : " + rhythmMatrices_.get(context)); ArrayList<Double> rhythms = getRhythms(rhythmMatrices_.get(context), rhythmslength); fillGeneratedNotes(generatedNotes_, context.get(1), rhythms); } return generatedNotes_; } private ArrayList<Double> getRhythms(MarkovMatrix<Double> rhythmMatrix, double rhythmslength) { double currentRhythmlength = 0; ArrayList<Double> rhythms = new ArrayList<>(); ArrayList<Double> context = new ArrayList<>(); context.add(null); while (currentRhythmlength != rhythmslength) { Double newRhythm = 0.0; newRhythm = rhythmMatrix.getRandomValue(context, generator_); if (newRhythm == null) { rhythms.add(rhythmslength - currentRhythmlength); break; } context.remove(0); context.add(newRhythm); rhythms.add(newRhythm); currentRhythmlength += newRhythm; if (currentRhythmlength > rhythmslength) { currentRhythmlength = 0; rhythms.clear(); context.clear(); context.add(null); } } return rhythms; } private static void fillGeneratedNotes(ArrayList<GeneratedNote> generatedNotes, ChordDegree degree, ArrayList<Double> rhythms) { rhythms.forEach(d -> generatedNotes.add(new GeneratedNote(degree, 1, d))); } }
generation: rhythm: bad fix when there is no solution
src/generation/Rhythm.java
generation: rhythm: bad fix when there is no solution
<ide><path>rc/generation/Rhythm.java <ide> ArrayList<Double> rhythms = new ArrayList<>(); <ide> ArrayList<Double> context = new ArrayList<>(); <ide> context.add(null); <add> int iter = 0; <ide> while (currentRhythmlength != rhythmslength) { <ide> Double newRhythm = 0.0; <ide> newRhythm = rhythmMatrix.getRandomValue(context, generator_); <ide> rhythms.add(newRhythm); <ide> currentRhythmlength += newRhythm; <ide> if (currentRhythmlength > rhythmslength) { <del> currentRhythmlength = 0; <del> rhythms.clear(); <del> context.clear(); <del> context.add(null); <add> if (iter > 1000) { <add> System.out.println("Bad fix"); <add> newRhythm = rhythms.get(rhythms.size() - 1); <add> rhythms.remove(rhythms.size() - 1); <add> newRhythm = newRhythm - (currentRhythmlength - rhythmslength); <add> rhythms.add(newRhythm); <add> break; <add> } <add> else { <add> currentRhythmlength = 0; <add> rhythms.clear(); <add> context.clear(); <add> context.add(null); <add> iter++; <add> } <ide> } <ide> } <ide> return rhythms;
JavaScript
mit
d14d732938e1ffcc6afe6857a3b09d32a428be46
0
kellym/angular-collection,justinwinslow/angular-collection
(function(angular, _){ 'use strict'; // Create local references to array methods we'll want to use later. var array = []; var push = array.push; var slice = array.slice; var splice = array.splice; var Model, Collection; angular.module('ngCollection', []) .directive('ngCollectionRepeat', ['$parse', '$animate', function($parse, $animate) { return { restrict: 'A', transclude: 'element', multiElement: true, priority: 1000, terminal: true, $$tlb: true, link: function($scope, $element, $attr, ctrl, $transclude){ var expression = $attr.ngCollectionRepeat; var match = expression.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)?\s*$/); var modelAlias, collectionName; modelAlias = match[1]; // Expose model in child scope as this collectionName = match[2]; // Name of the collection in the scope // Store elements from previous run so we can destroy them var previousElements = []; $scope.$watchCollection(collectionName, function ngRepeatAction(collection){ var previousNode = $element[0]; // Dump existing DOM nodes if (previousElements.length) { _.each(previousElements, function(element){ $animate.leave(element, function(){ element = null; }); }); // Clear array previousElements = []; } if (collection) { for (var index = 0, length = collection.length; index < length; index++) { var model = collection.models[index]; var childScope = $scope.$new(); // Add model to the scope childScope[modelAlias] = model.attributes; // Add a reference to the model so you can use it in your controllers childScope.$this = model; // Add logic helpers to scope childScope.$index = index; childScope.$first = (index === 0); childScope.$last = (index === (collection.length - 1)); childScope.$middle = !(childScope.$first || childScope.$last); // jshint bitwise: false childScope.$odd = !(childScope.$even = (index&1) === 0); // jshint bitwise: true // Build the DOM element $transclude(childScope, function(clone) { $animate.enter(clone, null, angular.element(previousNode)); previousNode = clone; previousElements.push(clone); }); } } }); } }; }]) .factory('$model', ['$http', '$q', function($http, $q){ Model = function(url, model){ this.url = url || '/'; // Instantiate resource var defaultParams = (model && model.id) ? {id: model.id} : {}; // Store the model this.attributes = model || {}; // Expose resource promise and resolved this.$resolved = true; // Immediately resolve a promise to use as this.$promise var defer = $q.defer(); defer.resolve(this.attributes); this.$promise = defer.promise; this.get = function(id){ id = id || this.attributes.id; var get = $http.get(this.url + '/' + id); var that = this; // Update exposed promise and resolution indication this.$resolved = false; this.$promise = get; get.then(function(response){ that.update(response.data); }); get.finally(function(){ that.$resolved = true; }); return this; }; this.save = function(){ var save = (this.attributes.id) ? $http.put(this.url + '/' + this.attributes.id, this.attributes) : $http.post(this.url, this.attributes); var that = this; // Update exposed promise and resolution indication this.$resolved = false; this.$promise = save; save.then(function(response){ that.update(response.data); }); save.finally(function(){ that.$resolved = true; }); return this; }; // NOTE - it's possible we'll want to save the original attributes object // but I can't think of good reason at the moment and this works fine this.update = function(attributes) { var keys = _.keys(attributes); // Remove any keys not present in the new data for (var key in this.attributes) { if (keys.indexOf(key) < 0) { delete this.attributes[key]; } } // Merge the new data into the model _.extend(this.attributes, attributes); }; this.remove = this.del = function(){ var remove; var that = this; // Remove model from collection if it's in one if (this.$collection) { this.$collection.remove(this); } if (this.attributes.id) { remove = $http.delete(url + '/' + this.attributes.id); } else { var defer = $q.defer(); remove = defer.promise; defer.resolve(); } // Update exposed promise and resolution indication this.$resolved = false; this.$promise = remove; remove.finally(function(){ that.$resolved = true; }); return this; }; }; // Return the constructor return function(url, model){ return new Model(url, model); }; }]) .factory('$collection', ['$http', '$q', '$model', function($http, $q, $model){ // Collection constructor Collection = function(url, defaultParams, collection){ this.url = url || '/'; defaultParams = defaultParams || {}; // Store models for manipulation and display this.models = []; // Store length so we can look it up faster/more easily this.length = 0; // Expose resource promise and resolved this.$resolved = true; // Immediately resolve a promise to use as this.$promise var defer = $q.defer(); defer.resolve(this.models); this.$promise = defer.promise; var updateLength = function(){ this.length = this.models.length; }; // determines how old this data is since it returned from the server this.getAge = function() { return this.$resolved && this.resolvedAt ? new Date() - this.resolvedAt : -1; }; // sets the resolvedAt time to determine the age of the data var setAge = function(promise) { var that = this; this.requestedAt = +new Date(); promise.then(function () { that.resolvedAt = +new Date(); }); return this; }; // Expose method for querying collection of models this.query = function(params){ params = $.extend({}, defaultParams, params); var that = this; var query = $http.get(this.url, {params: params}); // Update data age info setAge.call(this, query); // Update exposed promise and resolution indication this.$resolved = false; this.$promise = query; // Update models query.then(function(response){ // Clear out models that.models.length = 0; that.length = 0; var models = response.data; // Loop through models _.each(models, function(model){ // Push new model that.push(model); }); }); query.finally(function(){ that.$resolved = true; }); return this; }; this.sync = function(options) { options = options || {}; // If the consumer set a minimum age, let's just return // if the data isn't old enough if (options.minAge && options.minAge > this.getAge()) { return this; } var that = this; var sync = $http.get(this.url, {params: defaultParams}); // Update data age info setAge.call(this, sync); // Update exposed promise and resolution indication this.$resolved = false; this.$promise = sync; // Update models sync.then(function(response){ var ids = []; _.each(response.data, function(attributes){ var id = attributes.id; var model = that.find({id: id}); if (id) ids.push(id); if (model) { model.update(attributes); } else { that.add(attributes); } }); // Remove any models that aren't present in the lastest data _.each(_.clone(that.models), function(model){ try { if (ids.indexOf(model.attributes.id) < 0) { that.remove(model); } } catch(e) { throw 'Issue with model: ' + JSON.stringify(model); } }); }); sync.finally(function(){ that.$resolved = true; }); return this; }; this.push = this.add = function(model){ if (model instanceof Model) { // Add the model if it doesn't exist if (this.models.indexOf(model) < 0) { // Add collection reference model.$collection = this; // Push it to the models this.models.push(model); } } else if (model) { // Instantiate new model model = $model(this.url, model); // Add this collection reference to it model.$collection = this; // Push it to the models this.models.push(model); } // Update length property updateLength.apply(this); return model; }; // Remove a specific model from the collection this.remove = function(model){ this.models.splice(this.models.indexOf(model), 1); updateLength.apply(this); return this; }; // Save all models this.save = function(){ var that = this; var defer = $q.defer(); var counter = 0; // Update promise and resolved indicator this.$resolved = false; this.$promise = defer.promise; if (this.length) { // Save each model individually this.each(function(model){ model.save().then(function(){ // Increment counter counter++; // If all saves have finished, resolve the promise if (counter === that.length) { defer.resolve(that.models); that.$resolved = true; } }); }); } else { // Resolve immediately if there are no models defer.resolve(); } defer.promise.finally(function(){ that.$resolved = true; }); return this; }; this.find = this.findWhere = function(attrs) { if (_.isFunction(attrs)) { return _.find(this.models, attrs); } return _.find(this.models, function(model){ for (var key in attrs) { if (attrs[key] !== model.attributes[key]) return false; } return true; }); }; this.pluck = function(property){ var values = []; this.each(function(model){ if (model.attributes[property]){ values.push(model.attributes[property]); } }); return values; }; this.at = function(index){ return this.models[index]; }; // If a collection has been supplied, let's use that if (collection && collection.length) { // Loop through models _.each(collection, function(model){ // Push new model this.push(model); }, this); } return this; }; // Stolen straight from Backbone var methods = ['forEach', 'each', 'first', 'last', 'indexOf']; _.each(methods, function(method) { Collection.prototype[method] = function() { // Slice returns arguments as an array var args = slice.call(arguments); // Add the models as the first value in args args.unshift(this.models); // Return the _ method with appropriate context and arguments return _[method].apply(_, args); }; }); // Return the constructor return function(url, defaultParams, collection){ return new Collection(url, defaultParams, collection); }; }]); })(window.angular, window._);
angular-collection.js
(function(angular, _){ 'use strict'; // Create local references to array methods we'll want to use later. var array = []; var push = array.push; var slice = array.slice; var splice = array.splice; var Model, Collection; angular.module('ngCollection', []) .directive('ngCollectionRepeat', ['$parse', '$animate', function($parse, $animate) { return { restrict: 'A', transclude: 'element', multiElement: true, priority: 1000, terminal: true, $$tlb: true, link: function($scope, $element, $attr, ctrl, $transclude){ var expression = $attr.ngCollectionRepeat; var match = expression.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)?\s*$/); var modelAlias, collectionName; modelAlias = match[1]; // Expose model in child scope as this collectionName = match[2]; // Name of the collection in the scope // Store elements from previous run so we can destroy them var previousElements = []; $scope.$watchCollection(collectionName, function ngRepeatAction(collection){ var previousNode = $element[0]; // Dump existing DOM nodes if (previousElements.length) { _.each(previousElements, function(element){ $animate.leave(element, function(){ element = null; }); }); // Clear array previousElements = []; } if (collection) { for (var index = 0, length = collection.length; index < length; index++) { var model = collection.models[index]; var childScope = $scope.$new(); // Add model to the scope childScope[modelAlias] = model.attributes; // Add a reference to the model so you can use it in your controllers childScope.$this = model; // Add logic helpers to scope childScope.$index = index; childScope.$first = (index === 0); childScope.$last = (index === (collection.length - 1)); childScope.$middle = !(childScope.$first || childScope.$last); // jshint bitwise: false childScope.$odd = !(childScope.$even = (index&1) === 0); // jshint bitwise: true // Build the DOM element $transclude(childScope, function(clone) { $animate.enter(clone, null, angular.element(previousNode)); previousNode = clone; previousElements.push(clone); }); } } }); } }; }]) .factory('$model', ['$http', '$q', function($http, $q){ Model = function(url, model){ this.url = url || '/'; // Instantiate resource var defaultParams = (model && model.id) ? {id: model.id} : {}; // Store the model this.attributes = model || {}; // Expose resource promise and resolved this.$resolved = true; // Immediately resolve a promise to use as this.$promise var defer = $q.defer(); defer.resolve(this.attributes); this.$promise = defer.promise; this.get = function(id){ id = id || this.attributes.id; var get = $http.get(this.url + '/' + id); var that = this; // Update exposed promise and resolution indication this.$resolved = false; this.$promise = get; get.then(function(response){ that.update(response.data); }); get.finally(function(){ that.$resolved = true; }); return this; }; this.save = function(){ var save = (this.attributes.id) ? $http.put(this.url + '/' + this.attributes.id, this.attributes) : $http.post(this.url, this.attributes); var that = this; // Update exposed promise and resolution indication this.$resolved = false; this.$promise = save; save.then(function(response){ that.update(response.data); }); save.finally(function(){ that.$resolved = true; }); return this; }; // NOTE - it's possible we'll want to save the original attributes object // but I can't think of good reason at the moment and this works fine this.update = function(attributes) { var keys = _.keys(attributes); // Remove any keys not present in the new data for (var key in this.attributes) { if (keys.indexOf(key) < 0) { delete this.attributes[key]; } } // Merge the new data into the model _.extend(this.attributes, attributes); }; this.remove = this.del = function(){ var remove; var that = this; // Remove model from collection if it's in one if (this.$collection) { this.$collection.remove(this); } if (this.attributes.id) { remove = $http.delete(url + '/' + this.attributes.id); } else { var defer = $q.defer(); remove = defer.promise; defer.resolve(); } // Update exposed promise and resolution indication this.$resolved = false; this.$promise = remove; remove.finally(function(){ that.$resolved = true; }); return this; }; }; // Return the constructor return function(url, model){ return new Model(url, model); }; }]) .factory('$collection', ['$http', '$q', '$model', function($http, $q, $model){ // Collection constructor Collection = function(url, defaultParams, collection){ this.url = url || '/'; defaultParams = defaultParams || {}; // Store models for manipulation and display this.models = []; // Store length so we can look it up faster/more easily this.length = 0; // Expose resource promise and resolved this.$resolved = true; // Immediately resolve a promise to use as this.$promise var defer = $q.defer(); defer.resolve(this.models); this.$promise = defer.promise; var updateLength = function(){ this.length = this.models.length; }; // determines how old this data is since it returned from the server this.getAge = function() { return this.$resolved && this.resolvedAt ? new Date() - this.resolvedAt : -1; }; // sets the resolvedAt time to determine the age of the data var setAge = function(promise) { var that = this; this.requestedAt = +new Date(); promise.then(function () { that.resolvedAt = +new Date(); }); return this; }; // Expose method for querying collection of models this.query = function(params){ params = $.extend({}, defaultParams, params); var that = this; var query = $http.get(this.url, {params: params}); // Update data age info setAge.call(this, query); // Update exposed promise and resolution indication this.$resolved = false; this.$promise = query; // Update models query.then(function(response){ // Clear out models that.models.length = 0; that.length = 0; var models = response.data; // Loop through models _.each(models, function(model){ // Push new model that.push(model); }); }); query.finally(function(){ that.$resolved = true; }); return this; }; this.sync = function(options) { options = options || {}; // If the consumer set a minimum age, let's just return // if the data isn't old enough if (options.minAge && options.minAge > this.getAge()) { return this; } var that = this; var sync = $http.get(this.url, {params: defaultParams}); // Update data age info setAge.call(this, sync); // Update exposed promise and resolution indication this.$resolved = false; this.$promise = sync; // Update models sync.then(function(response){ var ids = []; _.each(response.data, function(attributes){ var id = attributes.id; var model = that.find({id: id}); if (id) ids.push(id); if (model) { model.update(attributes); } else { that.add(attributes); } }); // Remove any models that aren't present in the lastest data that.each(function(model){ try { if (ids.indexOf(model.attributes.id) < 0) { that.remove(model); } } catch(e) { throw 'Issue with model: ' + JSON.stringify(model); } }); }); sync.finally(function(){ that.$resolved = true; }); return this; }; this.push = this.add = function(model){ if (model instanceof Model) { // Add the model if it doesn't exist if (this.models.indexOf(model) < 0) { // Add collection reference model.$collection = this; // Push it to the models this.models.push(model); } } else if (model) { // Instantiate new model model = $model(this.url, model); // Add this collection reference to it model.$collection = this; // Push it to the models this.models.push(model); } // Update length property updateLength.apply(this); return model; }; // Remove a specific model from the collection this.remove = function(model){ this.models.splice(this.models.indexOf(model), 1); updateLength.apply(this); return this; }; // Save all models this.save = function(){ var that = this; var defer = $q.defer(); var counter = 0; // Update promise and resolved indicator this.$resolved = false; this.$promise = defer.promise; if (this.length) { // Save each model individually this.each(function(model){ model.save().then(function(){ // Increment counter counter++; // If all saves have finished, resolve the promise if (counter === that.length) { defer.resolve(that.models); that.$resolved = true; } }); }); } else { // Resolve immediately if there are no models defer.resolve(); } defer.promise.finally(function(){ that.$resolved = true; }); return this; }; this.find = this.findWhere = function(attrs) { if (_.isFunction(attrs)) { return _.find(this.models, attrs); } return _.find(this.models, function(model){ for (var key in attrs) { if (attrs[key] !== model.attributes[key]) return false; } return true; }); }; this.pluck = function(property){ var values = []; this.each(function(model){ if (model.attributes[property]){ values.push(model.attributes[property]); } }); return values; }; this.at = function(index){ return this.models[index]; }; // If a collection has been supplied, let's use that if (collection && collection.length) { // Loop through models _.each(collection, function(model){ // Push new model this.push(model); }, this); } return this; }; // Stolen straight from Backbone var methods = ['forEach', 'each', 'first', 'last', 'indexOf']; _.each(methods, function(method) { Collection.prototype[method] = function() { // Slice returns arguments as an array var args = slice.call(arguments); // Add the models as the first value in args args.unshift(this.models); // Return the _ method with appropriate context and arguments return _[method].apply(_, args); }; }); // Return the constructor return function(url, defaultParams, collection){ return new Collection(url, defaultParams, collection); }; }]); })(window.angular, window._);
Don't remove items from array we're iterating over
angular-collection.js
Don't remove items from array we're iterating over
<ide><path>ngular-collection.js <ide> }); <ide> <ide> // Remove any models that aren't present in the lastest data <del> that.each(function(model){ <add> _.each(_.clone(that.models), function(model){ <ide> try { <ide> if (ids.indexOf(model.attributes.id) < 0) { <ide> that.remove(model);
Java
bsd-2-clause
abc8d861b657fd91e73c06fc4cae06f56a71c3bb
0
ninchat/ninchat-java,ninchat/ninchat-java
/* * Copyright (c) 2012-2013, Somia Reality Oy * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.ninchat.client.transport; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonSyntaxException; import com.google.gson.stream.JsonReader; import com.ninchat.client.transport.actions.CloseSession; import com.ninchat.client.transport.actions.ResumeSession; import com.ninchat.client.transport.events.MessageReceived; import com.ninchat.client.transport.payloads.MessagePayload; import java.io.IOException; import java.io.StringReader; import java.net.URI; import java.util.logging.Level; import java.util.logging.Logger; /** * @author Kari Lavikka */ public class WebSocketTransport extends AbstractTransport { private final static Logger logger = Logger.getLogger(WebSocketTransport.class.getName()); private WebSocketAdapter webSocketAdapter; private int payloadFramesLeft; private Event currentEvent; private static final long TIMEOUT_ACTION = 20 * 1000; // TODO: Configurable private static final long TIMEOUT_CHECK_LAST_EVENT = 5 * 1000; // TODO: Configurable private static final long WAIT_BEFORE_PING = 120 * 1000; // TODO: Configurable private static final long WAIT_BEFORE_EVENT_ACK = 5000; private static final long MAX_WAIT_BEFORE_EVENT_ACK = 30000; /** Timeout when connecting to primary host */ private static final int TIMEOUT_CONNECT = 30 * 1000; /** Timeout when connnecting to specific session host */ private static final int TIMEOUT_CONNECT_SESSION_HOST = 15 * 1000; private volatile QueueHog queueHog; private volatile TimeoutMonitor timeoutMonitor; private volatile EventAcknowledger eventAcknowledger; private final Gson gson = new Gson(); private String currentHost; /** * TimeoutWatcher waits in this object */ private final Object messageSentToWebsocketHook = new Object(); /** * This object is notified when network is available again. Triggers reconnect attempt. */ private final Object networkAvailabilityHook = new Object(); public WebSocketTransport() { init(); } @Override public void terminate() { if (status == Status.TERMINATING) { logger.finer("terminate(): Transport status is " + status + ", no point in termination."); return; } setStatus(Status.TERMINATING); if (queueHog != null) { queueHog.interrupt(); try { queueHog.join(10000); // Timeout just for sure. Shouldn't be needed. TODO: Remove. } catch (InterruptedException e) { logger.warning("terminate(): Interrupted while waiting for thread to join."); } queueHog = null; } if (eventAcknowledger != null) { eventAcknowledger.interrupt(); try { eventAcknowledger.join(10000); // Timeout just for sure. Shouldn't be needed. TODO: Remove. } catch (InterruptedException e) { logger.warning("terminate(): Interrupted while waiting for thread to join."); } eventAcknowledger = null; } try { // TODO: Better approach would be to signal QueueHog about termination. It's just not properly implemented. // There's already a graceful shutdown handling for "close_session" and it actually calls this terminate method. webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.FINE, "Can not terminate", e); } super.terminate(); } /** * Prepares this transport for new session. Effectively clears all queues and initializes new worker threads */ protected void init() { super.init(); payloadFramesLeft = 0; currentEvent = null; if (queueHog == null) { queueHog = new QueueHog(); } else { logger.warning("init(): QueueHog is not null!"); } if (eventAcknowledger == null) { eventAcknowledger = new EventAcknowledger(); eventAcknowledger.start(); } else { logger.warning("init(): EventAcknowledger is not null!"); } } public void setWebSocketAdapter(WebSocketAdapter webSocketAdapter) { this.webSocketAdapter = webSocketAdapter; webSocketAdapter.setWebSocketTransport(this); } @Override public Long enqueue(Action action) { QueueHog q = queueHog; if (q == null) { // TODO: I'm not quite sure when this condition occurs and how to cope with it... logger.warning("QueueHog is null. Can't enqueue."); return null; } synchronized (q) { if (!q.isAlive()) { q.start(); } } return super.enqueue(action); } /** * Connects to server. It may be synchronous or asynchronous - depending on WebSocket implementation * * @return true if no errors were encountered */ private boolean connect() { if (status != Status.CLOSED) { logger.fine("Trying to connect but status is not CLOSED. Ignoring."); return false; } if (host == null) { throw new IllegalStateException("No host has been set!"); } try { setStatus(Status.OPENING); currentHost = sessionHost != null ? sessionHost : host; int timeout = sessionHost != null ? TIMEOUT_CONNECT_SESSION_HOST : TIMEOUT_CONNECT; URI uri = new URI("wss://" + currentHost + "/socket"); logger.info("Connecting to " + uri); webSocketAdapter.setURI(uri); webSocketAdapter.connect(timeout); return true; } catch (Exception e) { logger.log(Level.WARNING, "Can not connect", e); sessionHost = null; // Don't try session host again. It may be dead. setStatus(Status.CLOSED); Throwable t = e; while (true) { Throwable cause = t.getCause(); if (cause == null) { for (TransportStatusListener l : transportStatusListeners) { l.onConnectionError(this, t); } break; } t = cause; } } return false; } void onOpen() { toggleTimeoutMonitor(true); setStatus(Status.OPENED); } void onClose(String reason) { toggleTimeoutMonitor(false); setStatus(Status.CLOSED); } private class DummyEvent extends Event { @Override public boolean verify() { return false; } @Override public String getEventName() { return null; } } void onMessage(Object message) { String text = (String)message; // TODO: Support binary frames if (payloadFramesLeft > 0) { logger.finest("Receiving payload: " + text); if (currentEvent instanceof PayloadEvent) { PayloadEvent pe = (PayloadEvent)currentEvent; if (currentEvent instanceof MessageReceived) { Class <? extends MessagePayload> payloadClass = MessagePayload.messageClasses.get(((MessageReceived)currentEvent).getMessageType()); if (payloadClass != null) { try { pe.payloads[pe.payloads.length - payloadFramesLeft] = gson.fromJson(text, payloadClass); } catch (JsonSyntaxException e) { logger.log(Level.WARNING, "Can not parse JSON", e); } } else { logger.warning("Encountered an unsupported message type: " + ((MessageReceived)currentEvent).getMessageType()); } } else { logger.warning("Only message_received event supports payloads atm..."); } } else { logger.warning("Receiving payloadFrame although we should not!?"); } payloadFramesLeft--; } else { if (logger.isLoggable(Level.FINEST)) logger.finest("Receiving header: " + text); if (text == null || text.length() == 0 || text.charAt(0) != '{') { logger.finest("Empty frame!"); // This is probably a keepalive frame that mitigates load balancer's tendency to disconnect idling connections too eagerly. // Let's disconnect if we are currently connected to the primary host and a specific session host is available if (currentHost.equals(host) && sessionHost != null && !sessionHost.equals(host)) { // But let's behave nicely and not disconnect if autoReconnect is not enabled. if (autoReconnect) { logger.info("Received an empty keepalive frame. I'll disconnect from " + currentHost + " and connect to a specific session host: " + sessionHost); try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { // Not interested... } } } return; } try { String eventName = null; if (payloadFramesLeft != 0) { throw new RuntimeException("Mismatch in payload frame counter!"); } // First we have to view received object briefly to figure out a concrete event type and the number of expected payload frames JsonReader reader = new JsonReader(new StringReader(text)); reader.beginObject(); while (reader.hasNext()) { String name = reader.nextName(); if ("event".equals(name)) { eventName = reader.nextString(); } else if ("frames".equals(name)) { payloadFramesLeft = reader.nextInt(); } else { reader.skipValue(); } } if (eventName == null) { logger.warning("Received a header but it does not contain an event type: " + text + " ... ignoring it."); return; } Class<? extends Event> eventClass = EventClassRegistry.eventClasses.get(eventName); if (eventClass == null) { logger.warning("Can not find a concrete class for event: " + eventName + " ... ignoring it."); return; } currentEvent = gson.fromJson(text, eventClass); currentEvent.setReceived(elapsedTime()); if (currentEvent instanceof PayloadEvent) { ((PayloadEvent)currentEvent).payloads = new Payload[payloadFramesLeft]; } if (!(currentEvent instanceof com.ninchat.client.transport.events.Error)) { // Reset reconnect delay only when a normal event (non-error) is received // TODO: Should do this only for the initial event of each transport connection QueueHog q = queueHog; if (q != null) { q.resetReconnectDelay(); } } // We really should not get into these exception handlers. There's a risk that we mess up payload // counters and transport state gets corrupted. TODO: Session should probably get terminated now... } catch (Exception e) { //logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); throw new RuntimeException("Error while parsing websocket message: " + message, e); } } // Debugging. This should never happen if (currentEvent == null) { throw new RuntimeException("currentEvent is null. Should not be!"); } if (payloadFramesLeft <= 0) { // First remove action from queue Action action = removeActionFromQueue(currentEvent); // Then call generic listeners that are bound to transport and model onCompleteEvent(currentEvent); // Finally call specific listener that is bound to individual event. Now model is already updated when // listener gets a notification. if (action != null && action.isExpectActionId()) { acknowledge(action, currentEvent); } } } @Override protected void onCompleteEvent(Event event) { super.onCompleteEvent(event); eventAcknowledger.wakeup(); } private void timeout() { logger.info("Timeout! Closing connection..."); try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Can not disconnect", e); } } private void toggleTimeoutMonitor(boolean run) { if (run) { if (timeoutMonitor == null || !timeoutMonitor.isAlive()) { timeoutMonitor = new TimeoutMonitor(); timeoutMonitor.start(); } } else { if (timeoutMonitor != null) { timeoutMonitor.interrupt(); timeoutMonitor = null; } } } @Override public void setNetworkAvailability(boolean available) { super.setNetworkAvailability(available); if (!available && isConnected()) { // Because Android doesn't allow IO on main thread new Thread() { @Override public void run() { try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Can not disconnect", e); } } }.start(); } synchronized (networkAvailabilityHook) { networkAvailabilityHook.notifyAll(); } synchronized (queue) { queue.notifyAll(); // Just to be sure... } } private class TimeoutMonitor extends Thread { @Override public void run() { try { setName("TimeoutMonitor"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("TimeoutMonitor: started!"); try { while (!isInterrupted()) { Action action = null; // Pick tail of the queue synchronized (queue) { if (!queue.isEmpty()) { action = queue.last(); } } if (action != null && action.getSent() > Long.MIN_VALUE) { // Found an unacknowledged action. This logic is somewhat complicated because actions are // acknowledged when they are picked from queue for processing. Timeout may get triggered // if processing is too slow. This can be worked around by adding yet another queue, but // that would be overly complicated. Currently the problem is mitigated by checking the timestamp // of the previous acknowledged action. If it was just a while ago, we are probably busy // processing the event. An "inEventListener" variable could also be introduced. logger.finer("TimeoutMonitor: Found an unacknowledged action #" + action.getId() + " from queue."); long currentTime = elapsedTime(); long timeLeft = action.getSent() + TIMEOUT_ACTION - currentTime; if (timeLeft < 0) { // Already beyond timeout logger.fine("TimeoutMonitor: Found a timed out action " + action + " which was sent " + (currentTime - action.getSent()) + " ms ago"); long lastAck = currentTime - lastAcknowledgedActionTimestamp.get(); if (currentTime - lastAcknowledgedActionTimestamp.get() < TIMEOUT_CHECK_LAST_EVENT) { long nap = TIMEOUT_CHECK_LAST_EVENT - lastAck; logger.fine("TimeoutMonitor: However, previous event was acknowledged just " + lastAck + " ms ago. Let's wait " + nap + " ms. Maybe we are just so busy handling response events."); sleep(nap); } else { timeout(); } } else { logger.finer("TimeoutMonitor: Waiting " + timeLeft + "ms for timeout."); // Wait until timeout sleep(timeLeft); // And check if action is still unacknowledged boolean acknowledged; synchronized (queue) { acknowledged = !queue.contains(action); } if (!acknowledged) { // Still in queue. Check time left again because it might have been modified during sleep. timeLeft = action.getSent() + TIMEOUT_ACTION - currentTime; if (timeLeft < 0) { logger.fine("TimeoutMonitor: Action #" + action.getId() + " timed out while waiting for acknowledgement! " + action.toString()); timeout(); } } else { logger.finer("TimeoutMonitor: Action #" + action.getId() + " was acknowledged during wait."); } } } else { // There are no unacknowledged actions synchronized (messageSentToWebsocketHook) { messageSentToWebsocketHook.wait(); } } } } catch (InterruptedException e) { logger.fine("TimeoutMonitor: Thread interrupted"); } logger.info("TimeoutMonitor: Thread terminates"); } } private class EventAcknowledger extends Thread { private long ackAt = Long.MIN_VALUE; private long ackAtTheLatest = Long.MIN_VALUE; public synchronized void wakeup() { Event e = lastReceivedEvent; if (e == null) return; if (ackAt == Long.MIN_VALUE) { ackAtTheLatest = e.getReceived() + MAX_WAIT_BEFORE_EVENT_ACK; } ackAt = e.getReceived() + WAIT_BEFORE_EVENT_ACK; this.notify(); } @Override public void run() { try { setName("EventAcknowledger"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("EventAcknowledger: Thread started!"); try { while (!isInterrupted()) { if (shouldAcknowledgeEventId()) { long waitBefore; synchronized (this) { long e = elapsedTime(); if (e > ackAtTheLatest) { waitBefore = 0; } else { waitBefore = ackAt - e; } } if (logger.isLoggable(Level.FINER)) logger.finer("EventAcknowledger: WaitBefore = " + waitBefore); if (waitBefore <= 0) { enqueue(new ResumeSession()); ackAt = Long.MIN_VALUE; ackAtTheLatest = Long.MIN_VALUE; } else { if (logger.isLoggable(Level.FINER)) logger.finer("EventAcknowledger: Waiting " + waitBefore + "ms"); synchronized (this) { this.wait(waitBefore); } } } else { logger.finer("EventAcknowledger: Waiting indefinitely"); synchronized (this) { this.wait(); } } } } catch (InterruptedException e) { logger.fine("EventAcknowledger: Thread interrupted"); } finally { logger.fine("EventAcknowledger: Thread terminates"); } } } private class QueueHog extends Thread { final long initialReconnectDelay = 3000; long reconnectDelay = initialReconnectDelay; @Override public void run() { try { setName("QueueHog"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("QueueHog: Thread started!"); try { pickFromQueue: while (!isInterrupted()) { Action action; // Wait for something to send synchronized (queue) { do { if (lastSentAction == null) { // Pick first if nothing has been sent before action = queue.isEmpty() ? null : queue.first(); } else { // Or the next one action = queue.higher(lastSentAction); } if (action == null) { logger.fine("QueueHog: Got nothing from queue. Waiting for action."); queue.wait(); } } while (action == null); } // Open connection if it is closed while (status != Status.OPENED) { if (status == Status.CLOSED) { synchronized (networkAvailabilityHook) { while (!networkAvailability) { logger.fine("QueueHog: Network is unavailable. Waiting until it is available again."); networkAvailabilityHook.wait(); // TODO: Timeout? } } reconnectDelay = initialReconnectDelay; // TODO: Hmm not sure if this is a proper place to set this logger.fine("QueueHog: calling connect()"); connect(); } synchronized (statusHook) { if (status == Status.OPENING) { logger.info("QueueHog: Waiting for opened connection"); statusHook.wait(); // WebSocket onOpen callback wakes me up } } if (status == Status.CLOSED) { logger.fine("QueueHog: Connection attempt failed"); // If connect failed ... if (!autoReconnect) { logger.fine("QueueHog: Autoreconnect is disabled. Bailing out."); terminate(); return; } logger.fine("QueueHog: Sleeping " + reconnectDelay + "ms before trying again"); synchronized (networkAvailabilityHook) { networkAvailabilityHook.wait(reconnectDelay); } reconnectDelay *= 1.5; } else if (status == Status.OPENED) { if (sessionId != null && lastReceivedEvent != null) { logger.fine("QueueHog: Resuming session"); // If connection was opened and session is is present try { Action r = new ResumeSession(); r.setSessionId(sessionId); r.setEventId(lastReceivedEvent.getId()); JsonElement element = gson.toJsonTree(r); element.getAsJsonObject().addProperty("action", r.getActionName()); String json = gson.toJson(element); logger.finer("QueueHog: sending resume_session to WebSocket: " + json); webSocketAdapter.send(json); // If resume_session fails, we get an error event with error type "session_not_found" rewindQueue(); continue pickFromQueue; } catch (Exception e) { logger.log(Level.WARNING, "Can't send resume_session", e); // TODO: Terminate session gracefully } } else { logger.fine("QueueHog: Got a connection"); } } } // Include action name and payload count JsonElement element = gson.toJsonTree(action); element.getAsJsonObject().addProperty("action", action.getActionName()); if (action instanceof PayloadAction) { element.getAsJsonObject().addProperty("frames", ((PayloadAction)action).getPayloadCount()); } String header = gson.toJson(element); boolean closingRequest = action instanceof CloseSession; if (closingRequest) { logger.fine("QueueHog: Sending close_session action. I'll quit after this action!"); } try { if (logger.isLoggable(Level.FINER)) logger.finer("QueueHog: sending header to WebSocket: " + header); webSocketAdapter.send(header); if (action instanceof PayloadAction) { Payload [] payloads = ((PayloadAction)action).getPayloads(); if (payloads != null && payloads.length >= 1) { for (Payload payload : payloads) { String json = "{}"; if (payload != null) { json = gson.toJson(payload); } if (logger.isLoggable(Level.FINER)) logger.finer("QueueHog: sending payload to WebSocket: " + json); webSocketAdapter.send(json); } } } lastSentActionTimestamp.set(elapsedTime()); if (action.isExpectActionId()) { action.flagSent(); synchronized (messageSentToWebsocketHook) { messageSentToWebsocketHook.notifyAll(); } synchronized (queue) { lastSentAction = action; } } else { // Actions without actionId must not be retransmitted or tracked by TimeoutMonitor synchronized (queue) { queue.remove(action); } } } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Problem with WebSocket.", e); setStatus(Status.CLOSED); } if (closingRequest) { logger.fine("QueueHog: Terminating transport and stopping QueueHog."); terminate(); return; } } } catch (InterruptedException e) { logger.fine("QueueHog: Thread interrupted"); } finally { logger.fine("QueueHog: Thread terminates"); } } public void resetReconnectDelay() { reconnectDelay = initialReconnectDelay; } } }
src/com/ninchat/client/transport/WebSocketTransport.java
/* * Copyright (c) 2012-2013, Somia Reality Oy * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.ninchat.client.transport; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonSyntaxException; import com.google.gson.stream.JsonReader; import com.ninchat.client.transport.actions.CloseSession; import com.ninchat.client.transport.actions.ResumeSession; import com.ninchat.client.transport.events.MessageReceived; import com.ninchat.client.transport.payloads.MessagePayload; import java.io.IOException; import java.io.StringReader; import java.net.URI; import java.util.logging.Level; import java.util.logging.Logger; /** * @author Kari Lavikka */ public class WebSocketTransport extends AbstractTransport { private final static Logger logger = Logger.getLogger(WebSocketTransport.class.getName()); private WebSocketAdapter webSocketAdapter; private int payloadFramesLeft; private Event currentEvent; private static final long TIMEOUT_ACTION = 20 * 1000; // TODO: Configurable private static final long TIMEOUT_CHECK_LAST_EVENT = 5 * 1000; // TODO: Configurable private static final long WAIT_BEFORE_PING = 120 * 1000; // TODO: Configurable private static final long WAIT_BEFORE_EVENT_ACK = 5000; private static final long MAX_WAIT_BEFORE_EVENT_ACK = 30000; /** Timeout when connecting to primary host */ private static final int TIMEOUT_CONNECT = 30 * 1000; /** Timeout when connnecting to specific session host */ private static final int TIMEOUT_CONNECT_SESSION_HOST = 15 * 1000; private volatile QueueHog queueHog; private volatile TimeoutMonitor timeoutMonitor; private volatile EventAcknowledger eventAcknowledger; private final Gson gson = new Gson(); private String currentHost; /** * TimeoutWatcher waits in this object */ private final Object messageSentToWebsocketHook = new Object(); /** * This object is notified when network is available again. Triggers reconnect attempt. */ private final Object networkAvailabilityHook = new Object(); public WebSocketTransport() { init(); } @Override public void terminate() { if (status == Status.TERMINATING) { logger.finer("terminate(): Transport status is " + status + ", no point in termination."); return; } setStatus(Status.TERMINATING); if (queueHog != null) { queueHog.interrupt(); try { queueHog.join(10000); // Timeout just for sure. Shouldn't be needed. TODO: Remove. } catch (InterruptedException e) { logger.warning("terminate(): Interrupted while waiting for thread to join."); } queueHog = null; } if (eventAcknowledger != null) { eventAcknowledger.interrupt(); try { eventAcknowledger.join(10000); // Timeout just for sure. Shouldn't be needed. TODO: Remove. } catch (InterruptedException e) { logger.warning("terminate(): Interrupted while waiting for thread to join."); } eventAcknowledger = null; } try { // TODO: Better approach would be to signal QueueHog about termination. It's just not properly implemented. // There's already a graceful shutdown handling for "close_session" and it actually calls this terminate method. webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.FINE, "Can not terminate", e); } super.terminate(); } /** * Prepares this transport for new session. Effectively clears all queues and initializes new worker threads */ protected void init() { super.init(); payloadFramesLeft = 0; currentEvent = null; if (queueHog == null) { queueHog = new QueueHog(); } else { logger.warning("init(): QueueHog is not null!"); } if (eventAcknowledger == null) { eventAcknowledger = new EventAcknowledger(); eventAcknowledger.start(); } else { logger.warning("init(): EventAcknowledger is not null!"); } } public void setWebSocketAdapter(WebSocketAdapter webSocketAdapter) { this.webSocketAdapter = webSocketAdapter; webSocketAdapter.setWebSocketTransport(this); } @Override public Long enqueue(Action action) { QueueHog q = queueHog; if (q == null) { // TODO: I'm not quite sure when this condition occurs and how to cope with it... logger.warning("QueueHog is null. Can't enqueue."); return null; } synchronized (q) { if (!q.isAlive()) { q.start(); } } return super.enqueue(action); } /** * Connects to server. It may be synchronous or asynchronous - depending on WebSocket implementation * * @return true if no errors were encountered */ private boolean connect() { if (status != Status.CLOSED) { logger.fine("Trying to connect but status is not CLOSED. Ignoring."); return false; } if (host == null) { throw new IllegalStateException("No host has been set!"); } try { setStatus(Status.OPENING); currentHost = sessionHost != null ? sessionHost : host; int timeout = sessionHost != null ? TIMEOUT_CONNECT_SESSION_HOST : TIMEOUT_CONNECT; URI uri = new URI("wss://" + currentHost + "/socket"); logger.info("Connecting to " + uri); webSocketAdapter.setURI(uri); webSocketAdapter.connect(timeout); return true; } catch (Exception e) { logger.log(Level.WARNING, "Can not connect", e); sessionHost = null; // Don't try session host again. It may be dead. setStatus(Status.CLOSED); Throwable t = e; while (true) { Throwable cause = t.getCause(); if (cause == null) { for (TransportStatusListener l : transportStatusListeners) { l.onConnectionError(this, t); } break; } t = cause; } } return false; } void onOpen() { toggleTimeoutMonitor(true); setStatus(Status.OPENED); } void onClose(String reason) { toggleTimeoutMonitor(false); setStatus(Status.CLOSED); } private class DummyEvent extends Event { @Override public boolean verify() { return false; } @Override public String getEventName() { return null; } } void onMessage(Object message) { String text = (String)message; // TODO: Support binary frames if (payloadFramesLeft > 0) { logger.finest("Receiving payload: " + text); if (currentEvent instanceof PayloadEvent) { PayloadEvent pe = (PayloadEvent)currentEvent; if (currentEvent instanceof MessageReceived) { Class <? extends MessagePayload> payloadClass = MessagePayload.messageClasses.get(((MessageReceived)currentEvent).getMessageType()); if (payloadClass != null) { try { pe.payloads[pe.payloads.length - payloadFramesLeft] = gson.fromJson(text, payloadClass); } catch (JsonSyntaxException e) { logger.log(Level.WARNING, "Can not parse JSON", e); } } else { logger.warning("Encountered an unsupported message type: " + ((MessageReceived)currentEvent).getMessageType()); } } else { logger.warning("Only message_received event supports payloads atm..."); } } else { logger.warning("Receiving payloadFrame although we should not!?"); } payloadFramesLeft--; } else { if (logger.isLoggable(Level.FINEST)) logger.finest("Receiving header: " + text); if (text == null || text.length() == 0 || text.charAt(0) != '{') { logger.finest("Empty frame!"); // This is probably a keepalive frame that mitigates load balancer's tendency to disconnect idling connections too eagerly. // Let's disconnect if we are currently connected to the primary host and a specific session host is available if (currentHost.equals(host) && sessionHost != null && !sessionHost.equals(host)) { // But let's behave nicely and not disconnect if autoReconnect is not enabled. if (autoReconnect) { logger.info("Received an empty keepalive frame. I'll disconnect from " + currentHost + " and connect to a specific session host: " + sessionHost); try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { // Not interested... } } } return; } try { String eventName = null; assert payloadFramesLeft == 0; // First we have to view received object briefly to figure out a concrete event type and the number of expected payload frames JsonReader reader = new JsonReader(new StringReader(text)); reader.beginObject(); while (reader.hasNext()) { String name = reader.nextName(); if ("event".equals(name)) { eventName = reader.nextString(); } else if ("frames".equals(name)) { payloadFramesLeft = reader.nextInt(); } else { reader.skipValue(); } } if (eventName == null) { logger.warning("Received a header but it does not contain an event type: " + text + " ... ignoring it."); return; } Class<? extends Event> eventClass = EventClassRegistry.eventClasses.get(eventName); if (eventClass == null) { logger.warning("Can not find a concrete class for event: " + eventName + " ... ignoring it."); return; } currentEvent = gson.fromJson(text, eventClass); currentEvent.setReceived(elapsedTime()); if (currentEvent instanceof PayloadEvent) { ((PayloadEvent)currentEvent).payloads = new Payload[payloadFramesLeft]; } if (!(currentEvent instanceof com.ninchat.client.transport.events.Error)) { // Reset reconnect delay only when a normal event (non-error) is received // TODO: Should do this only for the initial event of each transport connection QueueHog q = queueHog; if (q != null) { q.resetReconnectDelay(); } } // We really should not get into these exception handlers. There's a risk that we mess up payload // counters and transport state gets corrupted. TODO: Session should probably get terminated now... } catch (JsonSyntaxException e) { logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); } catch (IOException e) { logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); } } if (payloadFramesLeft <= 0) { // First remove action from queue Action action = removeActionFromQueue(currentEvent); // Then call generic listeners that are bound to transport and model onCompleteEvent(currentEvent); // Finally call specific listener that is bound to individual event. Now model is already updated when // listener gets a notification. if (action != null && action.isExpectActionId()) { acknowledge(action, currentEvent); } } } @Override protected void onCompleteEvent(Event event) { super.onCompleteEvent(event); eventAcknowledger.wakeup(); } private void timeout() { logger.info("Timeout! Closing connection..."); try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Can not disconnect", e); } } private void toggleTimeoutMonitor(boolean run) { if (run) { if (timeoutMonitor == null || !timeoutMonitor.isAlive()) { timeoutMonitor = new TimeoutMonitor(); timeoutMonitor.start(); } } else { if (timeoutMonitor != null) { timeoutMonitor.interrupt(); timeoutMonitor = null; } } } @Override public void setNetworkAvailability(boolean available) { super.setNetworkAvailability(available); if (!available && isConnected()) { // Because Android doesn't allow IO on main thread new Thread() { @Override public void run() { try { webSocketAdapter.disconnect(); } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Can not disconnect", e); } } }.start(); } synchronized (networkAvailabilityHook) { networkAvailabilityHook.notifyAll(); } synchronized (queue) { queue.notifyAll(); // Just to be sure... } } private class TimeoutMonitor extends Thread { @Override public void run() { try { setName("TimeoutMonitor"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("TimeoutMonitor: started!"); try { while (!isInterrupted()) { Action action = null; // Pick tail of the queue synchronized (queue) { if (!queue.isEmpty()) { action = queue.last(); } } if (action != null && action.getSent() > Long.MIN_VALUE) { // Found an unacknowledged action. This logic is somewhat complicated because actions are // acknowledged when they are picked from queue for processing. Timeout may get triggered // if processing is too slow. This can be worked around by adding yet another queue, but // that would be overly complicated. Currently the problem is mitigated by checking the timestamp // of the previous acknowledged action. If it was just a while ago, we are probably busy // processing the event. An "inEventListener" variable could also be introduced. logger.finer("TimeoutMonitor: Found an unacknowledged action #" + action.getId() + " from queue."); long currentTime = elapsedTime(); long timeLeft = action.getSent() + TIMEOUT_ACTION - currentTime; if (timeLeft < 0) { // Already beyond timeout logger.fine("TimeoutMonitor: Found a timed out action " + action + " which was sent " + (currentTime - action.getSent()) + " ms ago"); long lastAck = currentTime - lastAcknowledgedActionTimestamp.get(); if (currentTime - lastAcknowledgedActionTimestamp.get() < TIMEOUT_CHECK_LAST_EVENT) { long nap = TIMEOUT_CHECK_LAST_EVENT - lastAck; logger.fine("TimeoutMonitor: However, previous event was acknowledged just " + lastAck + " ms ago. Let's wait " + nap + " ms. Maybe we are just so busy handling response events."); sleep(nap); } else { timeout(); } } else { logger.finer("TimeoutMonitor: Waiting " + timeLeft + "ms for timeout."); // Wait until timeout sleep(timeLeft); // And check if action is still unacknowledged boolean acknowledged; synchronized (queue) { acknowledged = !queue.contains(action); } if (!acknowledged) { // Still in queue. Check time left again because it might have been modified during sleep. timeLeft = action.getSent() + TIMEOUT_ACTION - currentTime; if (timeLeft < 0) { logger.fine("TimeoutMonitor: Action #" + action.getId() + " timed out while waiting for acknowledgement! " + action.toString()); timeout(); } } else { logger.finer("TimeoutMonitor: Action #" + action.getId() + " was acknowledged during wait."); } } } else { // There are no unacknowledged actions synchronized (messageSentToWebsocketHook) { messageSentToWebsocketHook.wait(); } } } } catch (InterruptedException e) { logger.fine("TimeoutMonitor: Thread interrupted"); } logger.info("TimeoutMonitor: Thread terminates"); } } private class EventAcknowledger extends Thread { private long ackAt = Long.MIN_VALUE; private long ackAtTheLatest = Long.MIN_VALUE; public synchronized void wakeup() { Event e = lastReceivedEvent; if (e == null) return; if (ackAt == Long.MIN_VALUE) { ackAtTheLatest = e.getReceived() + MAX_WAIT_BEFORE_EVENT_ACK; } ackAt = e.getReceived() + WAIT_BEFORE_EVENT_ACK; this.notify(); } @Override public void run() { try { setName("EventAcknowledger"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("EventAcknowledger: Thread started!"); try { while (!isInterrupted()) { if (shouldAcknowledgeEventId()) { long waitBefore; synchronized (this) { long e = elapsedTime(); if (e > ackAtTheLatest) { waitBefore = 0; } else { waitBefore = ackAt - e; } } if (logger.isLoggable(Level.FINER)) logger.finer("EventAcknowledger: WaitBefore = " + waitBefore); if (waitBefore <= 0) { enqueue(new ResumeSession()); ackAt = Long.MIN_VALUE; ackAtTheLatest = Long.MIN_VALUE; } else { if (logger.isLoggable(Level.FINER)) logger.finer("EventAcknowledger: Waiting " + waitBefore + "ms"); synchronized (this) { this.wait(waitBefore); } } } else { logger.finer("EventAcknowledger: Waiting indefinitely"); synchronized (this) { this.wait(); } } } } catch (InterruptedException e) { logger.fine("EventAcknowledger: Thread interrupted"); } finally { logger.fine("EventAcknowledger: Thread terminates"); } } } private class QueueHog extends Thread { final long initialReconnectDelay = 3000; long reconnectDelay = initialReconnectDelay; @Override public void run() { try { setName("QueueHog"); } catch (SecurityException e) { logger.log(Level.WARNING, "Can not set thread name", e); } logger.info("QueueHog: Thread started!"); try { pickFromQueue: while (!isInterrupted()) { Action action; // Wait for something to send synchronized (queue) { do { if (lastSentAction == null) { // Pick first if nothing has been sent before action = queue.isEmpty() ? null : queue.first(); } else { // Or the next one action = queue.higher(lastSentAction); } if (action == null) { logger.fine("QueueHog: Got nothing from queue. Waiting for action."); queue.wait(); } } while (action == null); } // Open connection if it is closed while (status != Status.OPENED) { if (status == Status.CLOSED) { synchronized (networkAvailabilityHook) { while (!networkAvailability) { logger.fine("QueueHog: Network is unavailable. Waiting until it is available again."); networkAvailabilityHook.wait(); // TODO: Timeout? } } reconnectDelay = initialReconnectDelay; // TODO: Hmm not sure if this is a proper place to set this logger.fine("QueueHog: calling connect()"); connect(); } synchronized (statusHook) { if (status == Status.OPENING) { logger.info("QueueHog: Waiting for opened connection"); statusHook.wait(); // WebSocket onOpen callback wakes me up } } if (status == Status.CLOSED) { logger.fine("QueueHog: Connection attempt failed"); // If connect failed ... if (!autoReconnect) { logger.fine("QueueHog: Autoreconnect is disabled. Bailing out."); terminate(); return; } logger.fine("QueueHog: Sleeping " + reconnectDelay + "ms before trying again"); synchronized (networkAvailabilityHook) { networkAvailabilityHook.wait(reconnectDelay); } reconnectDelay *= 1.5; } else if (status == Status.OPENED) { if (sessionId != null && lastReceivedEvent != null) { logger.fine("QueueHog: Resuming session"); // If connection was opened and session is is present try { Action r = new ResumeSession(); r.setSessionId(sessionId); r.setEventId(lastReceivedEvent.getId()); JsonElement element = gson.toJsonTree(r); element.getAsJsonObject().addProperty("action", r.getActionName()); String json = gson.toJson(element); logger.finer("QueueHog: sending resume_session to WebSocket: " + json); webSocketAdapter.send(json); // If resume_session fails, we get an error event with error type "session_not_found" rewindQueue(); continue pickFromQueue; } catch (Exception e) { logger.log(Level.WARNING, "Can't send resume_session", e); // TODO: Terminate session gracefully } } else { logger.fine("QueueHog: Got a connection"); } } } // Include action name and payload count JsonElement element = gson.toJsonTree(action); element.getAsJsonObject().addProperty("action", action.getActionName()); if (action instanceof PayloadAction) { element.getAsJsonObject().addProperty("frames", ((PayloadAction)action).getPayloadCount()); } String header = gson.toJson(element); boolean closingRequest = action instanceof CloseSession; if (closingRequest) { logger.fine("QueueHog: Sending close_session action. I'll quit after this action!"); } try { if (logger.isLoggable(Level.FINER)) logger.finer("QueueHog: sending header to WebSocket: " + header); webSocketAdapter.send(header); if (action instanceof PayloadAction) { Payload [] payloads = ((PayloadAction)action).getPayloads(); if (payloads != null && payloads.length >= 1) { for (Payload payload : payloads) { String json = "{}"; if (payload != null) { json = gson.toJson(payload); } if (logger.isLoggable(Level.FINER)) logger.finer("QueueHog: sending payload to WebSocket: " + json); webSocketAdapter.send(json); } } } lastSentActionTimestamp.set(elapsedTime()); if (action.isExpectActionId()) { action.flagSent(); synchronized (messageSentToWebsocketHook) { messageSentToWebsocketHook.notifyAll(); } synchronized (queue) { lastSentAction = action; } } else { // Actions without actionId must not be retransmitted or tracked by TimeoutMonitor synchronized (queue) { queue.remove(action); } } } catch (WebSocketAdapterException e) { logger.log(Level.WARNING, "Problem with WebSocket.", e); setStatus(Status.CLOSED); } if (closingRequest) { logger.fine("QueueHog: Terminating transport and stopping QueueHog."); terminate(); return; } } } catch (InterruptedException e) { logger.fine("QueueHog: Thread interrupted"); } finally { logger.fine("QueueHog: Thread terminates"); } } public void resetReconnectDelay() { reconnectDelay = initialReconnectDelay; } } }
Added some exceptions to WebSocketTransport. Trying to catch a bug...
src/com/ninchat/client/transport/WebSocketTransport.java
Added some exceptions to WebSocketTransport. Trying to catch a bug...
<ide><path>rc/com/ninchat/client/transport/WebSocketTransport.java <ide> try { <ide> String eventName = null; <ide> <del> assert payloadFramesLeft == 0; <add> if (payloadFramesLeft != 0) { <add> throw new RuntimeException("Mismatch in payload frame counter!"); <add> } <ide> <ide> // First we have to view received object briefly to figure out a concrete event type and the number of expected payload frames <ide> JsonReader reader = new JsonReader(new StringReader(text)); <ide> // We really should not get into these exception handlers. There's a risk that we mess up payload <ide> // counters and transport state gets corrupted. TODO: Session should probably get terminated now... <ide> <del> } catch (JsonSyntaxException e) { <del> logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); <del> <del> } catch (IOException e) { <del> logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); <del> } <del> <add> } catch (Exception e) { <add> //logger.log(Level.SEVERE, "Error while parsing websocket message: " + message, e); <add> throw new RuntimeException("Error while parsing websocket message: " + message, e); <add> } <add> } <add> <add> // Debugging. This should never happen <add> if (currentEvent == null) { <add> throw new RuntimeException("currentEvent is null. Should not be!"); <ide> } <ide> <ide> if (payloadFramesLeft <= 0) {
Java
mit
5d349b9bf0475a8211c719bdb11bd1e9fe8345f4
0
TylerNakamura/Calendar-Event-File-Generator,IHoffman5214/Calendar-Event-File-Generator,TylerNakamura/Calendar-Event-File-Generator,IHoffman5214/Calendar-Event-File-Generator
import java.util.Random; import java.text.DecimalFormat; public class Vevent implements Comparable<Vevent> { /* ----TEMPLATE---- https://en.wikipedia.org/wiki/ICalendar BEGIN:VEVENT UID:[email protected] DTSTAMP:19970714T170000Z ORGANIZER;CN=John Doe:MAILTO:[email protected] DTSTART:19970714T170000Z DTEND:19970715T035959Z SUMMARY:Bastille Day Party */ private String UID; private String DTSTAMP; private String ORGANIZER; private String DTSTART; private String DTEND; private String SUMMARY; private Geo GEO; private String CLASS; private String COMMENT; //default constructor public Vevent() { } //overloaded constructor public Vevent(String inputUID, String inputDTSTAMP, String inputORGANIZER, String inputDTSTART, String inputDTEND, String inputSUMMARY, Geo inputGEO, String inputCLASS) { UID = inputUID; DTSTAMP = inputDTSTAMP; ORGANIZER = inputORGANIZER; DTSTART = inputDTSTART; DTEND = inputDTEND; SUMMARY = inputSUMMARY; GEO = inputGEO; CLASS = inputCLASS; } //sets all vevents members variables to random but valid values public void setRandomValues() { int minuidsize = 7; int maxuidsize = 20; int minsumsize = 10; int maxsumsize = 40; int minorgsize = 5; int maxorgsize = 20; String [] classOptions = {"PUBLIC", "PRIVATE"}; DecimalFormat df = new DecimalFormat("#.##"); int sizeOfUid = minuidsize + (int)(Math.random() * maxuidsize); int sizeOfSummary = minsumsize + (int)(Math.random() * maxsumsize); int sizeOfOrganizer = minorgsize + (int)(Math.random() * maxorgsize); int classChoice = 0 + (int)(Math.random() * 2 ); double latChoice = -90.0 + (Math.random() * 90.0 ); double lonChoice = -180.0 + (Math.random() * 180.0 ); RandomString myRS1 = new RandomString(sizeOfUid); RandomString myRS2 = new RandomString(sizeOfSummary); RandomString myRS3 = new RandomString(sizeOfOrganizer); UID = myRS1.nextString(); SUMMARY = myRS2.nextString(); ORGANIZER = myRS3.nextString(); CLASS = classOptions[classChoice]; GEO = new Geo(df.format(latChoice) + ";" + df.format(lonChoice)); DTSTAMP = getRandomDate(); DTSTART = getRandomDate(); DTEND = getRandomDate(); COMMENT = myRS1.nextString(); //ensure that the stamp is lesser than the start date of the event while(dateIsGreaterThanDate(DTSTAMP, DTSTART)) { DTSTART = getRandomDate(); } //ensure that the date end is lesser than the date start while(dateIsGreaterThanDate(DTSTART, DTEND)) { DTEND = getRandomDate(); } //DEBUG OUTPUT /* System.out.println("UID: " + UID); System.out.println("SUMMARY: " + SUMMARY); System.out.println("ORGANIZER: " + ORGANIZER); System.out.println("CLASS: " + CLASS); System.out.println("GEO: " + GEO.toString()); System.out.println("DTSTAMP: " + DTSTAMP); System.out.println("DTSTART: " + DTSTART); System.out.println("DTEND " + DTEND); System.out.println("COMMENT " + COMMENT); */ } private String getRandomDate() { DecimalFormat dm = new DecimalFormat("##"); String result = ""; int minYear = 1990; int maxYear = 2030; int maxMonth = 12; int maxDay = 31; int maxHour = 24; int max = 60; int min = 1; int yearChoice = minYear + (int)(Math.random() * (maxYear - minYear)); int monthChoice= min + (int)(Math.random() * (maxMonth - min)); int dayChoice = min + (int)(Math.random() * (maxDay - min)); int hourChoice = min + (int)(Math.random() * (maxHour - min)); int minChoice = min + (int)(Math.random() * (max - min)); int secChoice = min + (int)(Math.random() * (max - min)); result += yearChoice + "" + String.format("%02d", monthChoice) + "" + String.format("%02d", dayChoice) + "" + "T" + String.format("%02d", hourChoice) + "" + String.format("%02d", minChoice) + "" + String.format("%02d", secChoice) + "" + "Z"; return result; } private boolean dateIsGreaterThanDate(String a, String b) { if(!validDateFormat(a) || !validDateFormat(b)) { System.err.println("not a valid date format"); return false; } int AYear = Integer.parseInt(a.substring(0, 4)); int AMonth = Integer.parseInt(a.substring(4, 6)); int ADay = Integer.parseInt(a.substring(6, 8)); int AHours = Integer.parseInt(a.substring(9, 11)); int AMinutes = Integer.parseInt(a.substring(11, 13)); int ASeconds = Integer.parseInt(a.substring(13,15)); int BYear = Integer.parseInt(b.substring(0, 4)); int BMonth = Integer.parseInt(b.substring(4, 6)); int BDay = Integer.parseInt(b.substring(6, 8)); int BHours = Integer.parseInt(b.substring(9, 11)); int BMinutes = Integer.parseInt(b.substring(11, 13)); int BSeconds = Integer.parseInt(b.substring(13,15)); if (AYear > BYear) return true; if(BYear > AYear) return false; if(AMonth > BMonth) return true; if(BMonth > AMonth) return false; if(ADay > BDay) return true; if(BDay > ADay) return false; if(AHours > BHours) return true; if(BHours > AHours) return false; if(AMinutes > AMinutes) return true; if(BMinutes > AMinutes) return false; if(ASeconds > BSeconds) return true; if(BSeconds > ASeconds) return false; System.err.println(a + " is the same as " + b); return false; } public String getCOMMENT() { return COMMENT; } public void setCOMMENT(String input) { if(validCOMMENT(input)) { COMMENT = input; } else { System.err.println("Cannot set COMMENT to \"" + input +"\"because COMMENT is not valid"); } } public String getUID() { return UID; } public void setUID(String input) { if (validUID(input)) { UID = input; } else { System.err.println("Cannot set UID to \"" + input + "\"\nbecause UID is not valid"); } } public String getDTSTAMP() { return DTSTAMP; } public void setDTSTAMP(String input) { if (validDTSTAMP(input)) { DTSTAMP = input; } else { System.err.println("Cannot set DTSTAMP to \"" + input + "\"\nbecause DTSTAMP is not valid"); } } public String getORGANIZER() { return ORGANIZER; } public void setORGANIZER(String input) { if (validORGANIZER(input)) { ORGANIZER = input; } else { System.err.println("Cannot set ORGANIZER to \"" + input + "\"\nbecause ORGANIZER is not valid"); } } public String getDTSTART() { return DTSTART; } public void setDTSTART(String input) { if (validDTSTART(input)) { DTSTART = input; } else { System.err.println("Cannot set DTSTART to \"" + input + "\"\nbecause DTSTART is not valid"); } } public String getDTEND() { return DTEND; } public void setDTEND(String input) { if (validDTEND(input)) { DTEND = input; } else { System.err.println("Cannot set DTEND to \"" + input + "\"\nbecause DTEND is not valid"); } } public String getSUMMARY() { return SUMMARY; } public void setSUMMARY(String input) { if (validSUMMARY(input)) { SUMMARY = input; } else { System.err.println("Cannot set SUMMARY to \"" + input + "\"\nbecause SUMMARY is not valid"); } } public Geo getGEO() { return GEO; } public void setGEO(String input) { if (validGEO(input)) { GEO = new Geo(input); } else { System.err.println("Cannot set GEO to \"" + input.toString() + "\"\nbecause SUMMARY is not valid"); } } public String getCLASS() { return CLASS; } public void setCLASS(String input) { if (validCLASS(input)) { CLASS = input; } else { System.err.println("Cannot set CLASS to \"" + input + "\"\nbecause CLASS is not valid"); } } public boolean validCOMMENT(String input) { if(input.length() > 100) { return false; } else { return true; } } public boolean validUID(String input) { if (input.equals("invalid") || input.length() > 70) { return false; } return true; } public boolean validDTSTAMP(String input) { return validDateFormat(input); } public boolean validORGANIZER(String input) { if (input.length() > 60) { return false; } else { return true; } } public boolean validDTSTART(String input) { return validDateFormat(input); } public boolean validDTEND(String input) { return validDateFormat(input); } private boolean validDateFormat(String input) { if (input.length() != 16) { System.err.println("Date: " + input + " is " + input.length() + " char, should be 16."); return false; } int expectYear = Integer.parseInt(input.substring(0, 4)); int expectMonth = Integer.parseInt(input.substring(4, 6)); int expectDay = Integer.parseInt(input.substring(6, 8)); char expectT = input.charAt(8); expectT = Character.toLowerCase(expectT); int expectHours = Integer.parseInt(input.substring(9, 11)); int expectMinutes = Integer.parseInt(input.substring(11, 13)); int expectSeconds = Integer.parseInt(input.substring(13,15)); char expectZ = input.charAt(15); expectZ = Character.toLowerCase(expectZ); if (expectYear < 0) { return false; } if (expectMonth < 1 || expectMonth > 12) { return false; } if (expectDay < 1 || expectDay > 31) { return false; } if(expectT != 't') { return false; } if (expectHours < 1 || expectHours > 24) { return false; } if (expectMinutes < 0 || expectMinutes > 59) { return false; } if (expectSeconds < 0 || expectSeconds > 59) { return false; } if (expectZ != 'z') { return false; } return true; } public boolean validSUMMARY(String input) { if (input.length() > 400) { return false; } else { return true; } } public boolean validGEO(Geo input) { return validGEO(input.toString()); } public boolean validGEO(String input) { Geo temp = new Geo(); boolean myReturn = false; try { if (input.contains("GEO:")) { //get rid of GEO: input = stripTitle(input); } //get coordinates String [] coords = input.split(";"); if (!temp.isValidLatitude(Float.valueOf(coords[0])) || !temp.isValidLongitude(Float.valueOf(coords[1]))) { myReturn = false; } else { myReturn = true; } } catch(Exception e) { } return myReturn; } //http://tools.ietf.org/html/rfc5545#section-3.8.1.3 public boolean validCLASS(String input) { boolean myReturn = false; //if the input contains class if (input.contains("CLASS:")) { input = stripTitle(input); } //if the input is equal to any of the three accepted values, see RFC for more information if (input.equals("PUBLIC") || input.equals("PRIVATE") || input.equals("CONFIDENTIAL")) { myReturn = true; } else { myReturn = false; } return myReturn; } /* ensures that all fields of vevent are valid according to protocol https://en.wikipedia.org/wiki/ICalendar http://tools.ietf.org/html/rfc5545 */ public boolean isValid() { boolean myReturn; if (validUID(UID) && validDTSTAMP(DTSTAMP) && validORGANIZER(ORGANIZER) && validDTSTART(DTSTART) && validDTEND(DTEND) && validSUMMARY(SUMMARY)) { myReturn = true; } else { myReturn = false; } //first checks to see if CLASS is set, then checks to see if it is valid //without checking for null value, a null pointer exception is returned if (CLASS != null) { if(validCLASS(CLASS)) { myReturn = true; } else { myReturn = false; } } //first checks to see if GEO is set, then checks to see if it is valid //without checking for null value, a null pointer exception is returned if (GEO != null) { if (validGEO(GEO)) { myReturn = true; } else { myReturn = false; } } return myReturn; } /* adds all non null or empty fields to a result string */ public String toString() { String result = ""; result += "BEGIN:VEVENT\n"; if (UID != null && !UID.equals("")) { result += "UID:"; result += UID; result += "\n"; } if (DTSTAMP != null && !DTSTAMP.equals("")) { result += "DTSTAMP:"; result += DTSTAMP; result += "\n"; } if (ORGANIZER != null && !ORGANIZER.equals("")) { result += "ORGANIZER:"; result += ORGANIZER; result += "\n"; } if (DTSTART != null && !DTSTART.equals("")) { result += "DTSTART:"; result += DTSTART; result += "\n"; } if (DTEND != null && !DTEND.equals("")) { result += "DTEND:"; result += DTEND; result += "\n"; } if (SUMMARY != null && !SUMMARY.equals("")) { result += "SUMMARY:"; result += SUMMARY; result += "\n"; } if (GEO != null && !GEO.toString().equals("")) { result += "GEO:"; result += GEO.toString(); result += "\n"; } if (CLASS != null && !CLASS.equals("")) { result += "CLASS:"; result += CLASS; result += "\n"; } if (COMMENT != null && !COMMENT.equals("")) { result += "COMMENT:"; result += COMMENT; result += "\n"; } result += "END:VEVENT\n"; return result; } private String stripTitle(String input) { String [] temp = input.split(":"); return temp[1]; } public int compareTo(Vevent anotherVevent) { int yearComp = Integer.parseInt(DTSTART.substring(0, 4)) - Integer.parseInt(anotherVevent.getDTSTART().substring(0, 4)); if (yearComp != 0) { return yearComp; } int monthComp = Integer.parseInt(DTSTART.substring(4, 6)) - Integer.parseInt(anotherVevent.getDTSTART().substring(4, 6)); if (monthComp != 0) { return monthComp; } int dayComp = Integer.parseInt(DTSTART.substring(6, 8)) - Integer.parseInt(anotherVevent.getDTSTART().substring(6, 8)); if (dayComp != 0) { return dayComp; } int hoursComp = Integer.parseInt(DTSTART.substring(9, 11)) - Integer.parseInt(anotherVevent.getDTSTART().substring(9, 11)); if (hoursComp != 0) { return hoursComp; } int minsComp = Integer.parseInt(DTSTART.substring(11, 13)) - Integer.parseInt(anotherVevent.getDTSTART().substring(11, 13)); if (minsComp != 0) { return minsComp; } int secsComp = Integer.parseInt(DTSTART.substring(13, 15)) - Integer.parseInt(anotherVevent.getDTSTART().substring(13, 15)); if (secsComp != 0) { return secsComp; } return 0; } }
Vevent.java
import java.util.Random; import java.text.DecimalFormat; public class Vevent implements Comparable<Vevent> { /* ----TEMPLATE---- https://en.wikipedia.org/wiki/ICalendar BEGIN:VEVENT UID:[email protected] DTSTAMP:19970714T170000Z ORGANIZER;CN=John Doe:MAILTO:[email protected] DTSTART:19970714T170000Z DTEND:19970715T035959Z SUMMARY:Bastille Day Party */ private String UID; private String DTSTAMP; private String ORGANIZER; private String DTSTART; private String DTEND; private String SUMMARY; private Geo GEO; private String CLASS; private String COMMENT; //default constructor public Vevent() { } //overloaded constructor public Vevent(String inputUID, String inputDTSTAMP, String inputORGANIZER, String inputDTSTART, String inputDTEND, String inputSUMMARY, Geo inputGEO, String inputCLASS) { UID = inputUID; DTSTAMP = inputDTSTAMP; ORGANIZER = inputORGANIZER; DTSTART = inputDTSTART; DTEND = inputDTEND; SUMMARY = inputSUMMARY; GEO = inputGEO; CLASS = inputCLASS; } //sets all vevents members variables to random but valid values public void setRandomValues() { int minuidsize = 7; int maxuidsize = 20; int minsumsize = 10; int maxsumsize = 40; int minorgsize = 5; int maxorgsize = 20; String [] classOptions = {"PUBLIC", "PRIVATE"}; DecimalFormat df = new DecimalFormat("#.##"); int sizeOfUid = minuidsize + (int)(Math.random() * maxuidsize); int sizeOfSummary = minsumsize + (int)(Math.random() * maxsumsize); int sizeOfOrganizer = minorgsize + (int)(Math.random() * maxorgsize); int classChoice = 0 + (int)(Math.random() * 2 ); double latChoice = -90.0 + (Math.random() * 90.0 ); double lonChoice = -180.0 + (Math.random() * 180.0 ); RandomString myRS1 = new RandomString(sizeOfUid); RandomString myRS2 = new RandomString(sizeOfSummary); RandomString myRS3 = new RandomString(sizeOfOrganizer); UID = myRS1.nextString(); SUMMARY = myRS2.nextString(); ORGANIZER = myRS3.nextString(); CLASS = classOptions[classChoice]; GEO = new Geo(df.format(latChoice) + ";" + df.format(lonChoice)); DTSTAMP = getRandomDate(); DTSTART = getRandomDate(); DTEND = getRandomDate(); while(dateIsGreaterThanDate(DTSTAMP, DTSTART)) { DTSTART = getRandomDate(); } while(dateIsGreaterThanDate(DTSTART, DTEND)) { DTEND = getRandomDate(); } //DEBUG OUTPUT /* System.out.println("UID: " + UID); System.out.println("SUMMARY: " + SUMMARY); System.out.println("ORGANIZER: " + ORGANIZER); System.out.println("CLASS: " + CLASS); System.out.println("GEO: " + GEO.toString()); System.out.println("DTSTAMP: " + DTSTAMP); System.out.println("DTSTART: " + DTSTART); System.out.println("DTEND " + DTEND); */ } private String getRandomDate() { DecimalFormat dm = new DecimalFormat("##"); String result = ""; int minYear = 1990; int maxYear = 2030; int maxMonth = 12; int maxDay = 31; int maxHour = 24; int max = 60; int min = 1; int yearChoice = minYear + (int)(Math.random() * (maxYear - minYear)); int monthChoice= min + (int)(Math.random() * (maxMonth - min)); int dayChoice = min + (int)(Math.random() * (maxDay - min)); int hourChoice = min + (int)(Math.random() * (maxHour - min)); int minChoice = min + (int)(Math.random() * (max - min)); int secChoice = min + (int)(Math.random() * (max - min)); result += yearChoice + "" + String.format("%02d", monthChoice) + "" + String.format("%02d", dayChoice) + "" + "T" + String.format("%02d", hourChoice) + "" + String.format("%02d", minChoice) + "" + String.format("%02d", secChoice) + "" + "Z"; return result; } private boolean dateIsGreaterThanDate(String a, String b) { if(!validDateFormat(a) || !validDateFormat(b)) { System.err.println("not a valid date format"); return false; } int AYear = Integer.parseInt(a.substring(0, 4)); int AMonth = Integer.parseInt(a.substring(4, 6)); int ADay = Integer.parseInt(a.substring(6, 8)); int AHours = Integer.parseInt(a.substring(9, 11)); int AMinutes = Integer.parseInt(a.substring(11, 13)); int ASeconds = Integer.parseInt(a.substring(13,15)); int BYear = Integer.parseInt(b.substring(0, 4)); int BMonth = Integer.parseInt(b.substring(4, 6)); int BDay = Integer.parseInt(b.substring(6, 8)); int BHours = Integer.parseInt(b.substring(9, 11)); int BMinutes = Integer.parseInt(b.substring(11, 13)); int BSeconds = Integer.parseInt(b.substring(13,15)); if (AYear > BYear) return true; if(BYear > AYear) return false; if(AMonth > BMonth) return true; if(BMonth > AMonth) return false; if(ADay > BDay) return true; if(BDay > ADay) return false; if(AHours > BHours) return true; if(BHours > AHours) return false; if(AMinutes > AMinutes) return true; if(BMinutes > AMinutes) return false; if(ASeconds > BSeconds) return true; if(BSeconds > ASeconds) return false; System.err.println(a + " is the same as " + b); return false; } public String getCOMMENT() { return COMMENT; } public void setCOMMENT(String input) { if(validCOMMENT(input)) { COMMENT = input; } else { System.err.println("Cannot set COMMENT to \"" + input +"\"because COMMENT is not valid"); } } public String getUID() { return UID; } public void setUID(String input) { if (validUID(input)) { UID = input; } else { System.err.println("Cannot set UID to \"" + input + "\"\nbecause UID is not valid"); } } public String getDTSTAMP() { return DTSTAMP; } public void setDTSTAMP(String input) { if (validDTSTAMP(input)) { DTSTAMP = input; } else { System.err.println("Cannot set DTSTAMP to \"" + input + "\"\nbecause DTSTAMP is not valid"); } } public String getORGANIZER() { return ORGANIZER; } public void setORGANIZER(String input) { if (validORGANIZER(input)) { ORGANIZER = input; } else { System.err.println("Cannot set ORGANIZER to \"" + input + "\"\nbecause ORGANIZER is not valid"); } } public String getDTSTART() { return DTSTART; } public void setDTSTART(String input) { if (validDTSTART(input)) { DTSTART = input; } else { System.err.println("Cannot set DTSTART to \"" + input + "\"\nbecause DTSTART is not valid"); } } public String getDTEND() { return DTEND; } public void setDTEND(String input) { if (validDTEND(input)) { DTEND = input; } else { System.err.println("Cannot set DTEND to \"" + input + "\"\nbecause DTEND is not valid"); } } public String getSUMMARY() { return SUMMARY; } public void setSUMMARY(String input) { if (validSUMMARY(input)) { SUMMARY = input; } else { System.err.println("Cannot set SUMMARY to \"" + input + "\"\nbecause SUMMARY is not valid"); } } public Geo getGEO() { return GEO; } public void setGEO(String input) { if (validGEO(input)) { GEO = new Geo(input); } else { System.err.println("Cannot set GEO to \"" + input.toString() + "\"\nbecause SUMMARY is not valid"); } } public String getCLASS() { return CLASS; } public void setCLASS(String input) { if (validCLASS(input)) { CLASS = input; } else { System.err.println("Cannot set CLASS to \"" + input + "\"\nbecause CLASS is not valid"); } } public boolean validCOMMENT(String input) { if(input.length() > 100) { return false; } else { return true; } } public boolean validUID(String input) { if (input.equals("invalid") || input.length() > 70) { return false; } return true; } public boolean validDTSTAMP(String input) { return validDateFormat(input); } public boolean validORGANIZER(String input) { if (input.length() > 60) { return false; } else { return true; } } public boolean validDTSTART(String input) { return validDateFormat(input); } public boolean validDTEND(String input) { return validDateFormat(input); } private boolean validDateFormat(String input) { if (input.length() != 16) { System.err.println("Date: " + input + " is " + input.length() + " char, should be 16."); return false; } int expectYear = Integer.parseInt(input.substring(0, 4)); int expectMonth = Integer.parseInt(input.substring(4, 6)); int expectDay = Integer.parseInt(input.substring(6, 8)); char expectT = input.charAt(8); expectT = Character.toLowerCase(expectT); int expectHours = Integer.parseInt(input.substring(9, 11)); int expectMinutes = Integer.parseInt(input.substring(11, 13)); int expectSeconds = Integer.parseInt(input.substring(13,15)); char expectZ = input.charAt(15); expectZ = Character.toLowerCase(expectZ); if (expectYear < 0) { return false; } if (expectMonth < 1 || expectMonth > 12) { return false; } if (expectDay < 1 || expectDay > 31) { return false; } if(expectT != 't') { return false; } if (expectHours < 1 || expectHours > 24) { return false; } if (expectMinutes < 0 || expectMinutes > 59) { return false; } if (expectSeconds < 0 || expectSeconds > 59) { return false; } if (expectZ != 'z') { return false; } return true; } public boolean validSUMMARY(String input) { if (input.length() > 400) { return false; } else { return true; } } public boolean validGEO(Geo input) { return validGEO(input.toString()); } public boolean validGEO(String input) { Geo temp = new Geo(); boolean myReturn = false; try { if (input.contains("GEO:")) { //get rid of GEO: input = stripTitle(input); } //get coordinates String [] coords = input.split(";"); if (!temp.isValidLatitude(Float.valueOf(coords[0])) || !temp.isValidLongitude(Float.valueOf(coords[1]))) { myReturn = false; } else { myReturn = true; } } catch(Exception e) { } return myReturn; } //http://tools.ietf.org/html/rfc5545#section-3.8.1.3 public boolean validCLASS(String input) { boolean myReturn = false; //if the input contains class if (input.contains("CLASS:")) { input = stripTitle(input); } //if the input is equal to any of the three accepted values, see RFC for more information if (input.equals("PUBLIC") || input.equals("PRIVATE") || input.equals("CONFIDENTIAL")) { myReturn = true; } else { myReturn = false; } return myReturn; } /* ensures that all fields of vevent are valid according to protocol https://en.wikipedia.org/wiki/ICalendar http://tools.ietf.org/html/rfc5545 */ public boolean isValid() { boolean myReturn; if (validUID(UID) && validDTSTAMP(DTSTAMP) && validORGANIZER(ORGANIZER) && validDTSTART(DTSTART) && validDTEND(DTEND) && validSUMMARY(SUMMARY)) { myReturn = true; } else { myReturn = false; } //first checks to see if CLASS is set, then checks to see if it is valid //without checking for null value, a null pointer exception is returned if (CLASS != null) { if(validCLASS(CLASS)) { myReturn = true; } else { myReturn = false; } } //first checks to see if GEO is set, then checks to see if it is valid //without checking for null value, a null pointer exception is returned if (GEO != null) { if (validGEO(GEO)) { myReturn = true; } else { myReturn = false; } } return myReturn; } /* adds all non null or empty fields to a result string */ public String toString() { String result = ""; result += "BEGIN:VEVENT\n"; if (UID != null && !UID.equals("")) { result += "UID:"; result += UID; result += "\n"; } if (DTSTAMP != null && !DTSTAMP.equals("")) { result += "DTSTAMP:"; result += DTSTAMP; result += "\n"; } if (ORGANIZER != null && !ORGANIZER.equals("")) { result += "ORGANIZER:"; result += ORGANIZER; result += "\n"; } if (DTSTART != null && !DTSTART.equals("")) { result += "DTSTART:"; result += DTSTART; result += "\n"; } if (DTEND != null && !DTEND.equals("")) { result += "DTEND:"; result += DTEND; result += "\n"; } if (SUMMARY != null && !SUMMARY.equals("")) { result += "SUMMARY:"; result += SUMMARY; result += "\n"; } if (GEO != null && !GEO.toString().equals("")) { result += "GEO:"; result += GEO.toString(); result += "\n"; } if (CLASS != null && !CLASS.equals("")) { result += "CLASS:"; result += CLASS; result += "\n"; } if (COMMENT != null && !COMMENT.equals("")) { result += "COMMENT:"; result += COMMENT; result += "\n"; } result += "END:VEVENT\n"; return result; } private String stripTitle(String input) { String [] temp = input.split(":"); return temp[1]; } public int compareTo(Vevent anotherVevent) { int yearComp = Integer.parseInt(DTSTART.substring(0, 4)) - Integer.parseInt(anotherVevent.getDTSTART().substring(0, 4)); if (yearComp != 0) { return yearComp; } int monthComp = Integer.parseInt(DTSTART.substring(4, 6)) - Integer.parseInt(anotherVevent.getDTSTART().substring(4, 6)); if (monthComp != 0) { return monthComp; } int dayComp = Integer.parseInt(DTSTART.substring(6, 8)) - Integer.parseInt(anotherVevent.getDTSTART().substring(6, 8)); if (dayComp != 0) { return dayComp; } int hoursComp = Integer.parseInt(DTSTART.substring(9, 11)) - Integer.parseInt(anotherVevent.getDTSTART().substring(9, 11)); if (hoursComp != 0) { return hoursComp; } int minsComp = Integer.parseInt(DTSTART.substring(11, 13)) - Integer.parseInt(anotherVevent.getDTSTART().substring(11, 13)); if (minsComp != 0) { return minsComp; } int secsComp = Integer.parseInt(DTSTART.substring(13, 15)) - Integer.parseInt(anotherVevent.getDTSTART().substring(13, 15)); if (secsComp != 0) { return secsComp; } return 0; } }
add comment randomization to random event
Vevent.java
add comment randomization to random event
<ide><path>event.java <ide> DTSTAMP = getRandomDate(); <ide> DTSTART = getRandomDate(); <ide> DTEND = getRandomDate(); <del> <add> COMMENT = myRS1.nextString(); <add> <add> //ensure that the stamp is lesser than the start date of the event <ide> while(dateIsGreaterThanDate(DTSTAMP, DTSTART)) <ide> { <ide> DTSTART = getRandomDate(); <ide> } <ide> <add> //ensure that the date end is lesser than the date start <ide> while(dateIsGreaterThanDate(DTSTART, DTEND)) <ide> { <ide> DTEND = getRandomDate(); <ide> System.out.println("DTSTAMP: " + DTSTAMP); <ide> System.out.println("DTSTART: " + DTSTART); <ide> System.out.println("DTEND " + DTEND); <add> System.out.println("COMMENT " + COMMENT); <ide> */ <ide> } <ide>
Java
apache-2.0
c77005cde848b53e54f9a6d22d2c8240e8184b42
0
dldinternet/resty-gwt,ibaca/resty-gwt,paul-duffy/resty-gwt,Armageddon-/resty-gwt,BiovistaInc/resty-gwt,resty-gwt/resty-gwt,ibaca/resty-gwt,paul-duffy/resty-gwt,Armageddon-/resty-gwt,BiovistaInc/resty-gwt,cguillot/resty-gwt,BiovistaInc/resty-gwt,resty-gwt/resty-gwt,cguillot/resty-gwt,ibaca/resty-gwt,dldinternet/resty-gwt,cguillot/resty-gwt,Armageddon-/resty-gwt,resty-gwt/resty-gwt,paul-duffy/resty-gwt
/** * Copyright (C) 2009-2012 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.restygwt.rebind; import static org.fusesource.restygwt.rebind.BaseSourceCreator.DEBUG; import static org.fusesource.restygwt.rebind.BaseSourceCreator.ERROR; import static org.fusesource.restygwt.rebind.BaseSourceCreator.INFO; import static org.fusesource.restygwt.rebind.BaseSourceCreator.TRACE; import static org.fusesource.restygwt.rebind.BaseSourceCreator.WARN; import java.lang.reflect.Constructor; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import com.google.gwt.core.ext.BadPropertyValueException; import org.fusesource.restygwt.client.AbstractJsonEncoderDecoder; import org.fusesource.restygwt.client.AbstractNestedJsonEncoderDecoder; import org.fusesource.restygwt.client.Json; import org.fusesource.restygwt.client.Json.Style; import org.fusesource.restygwt.client.ObjectEncoderDecoder; import com.google.gwt.core.ext.GeneratorContext; import com.google.gwt.core.ext.TreeLogger; import com.google.gwt.core.ext.UnableToCompleteException; import com.google.gwt.core.ext.typeinfo.JClassType; import com.google.gwt.core.ext.typeinfo.JParameterizedType; import com.google.gwt.core.ext.typeinfo.JPrimitiveType; import com.google.gwt.core.ext.typeinfo.JType; import com.google.gwt.json.client.JSONValue; import com.google.gwt.xml.client.Document; /** * * @author <a href="http://hiramchirino.com">Hiram Chirino</a> */ public class JsonEncoderDecoderInstanceLocator { public static final String JSON_ENCODER_DECODER_CLASS = AbstractJsonEncoderDecoder.class.getName(); public static final String JSON_NESTED_ENCODER_DECODER_CLASS = AbstractNestedJsonEncoderDecoder.class.getName(); public static final String JSON_CLASS = Json.class.getName(); public static final String CUSTOM_SERIALIZER_GENERATORS = "org.fusesource.restygwt.restyjsonserializergenerator"; public final JClassType STRING_TYPE; public final JClassType JSON_VALUE_TYPE; public final JClassType DOCUMENT_TYPE; public final JClassType MAP_TYPE; public final JClassType SET_TYPE; public final JClassType LIST_TYPE; public final JClassType COLLECTION_TYPE; public final HashMap<JType, String> builtInEncoderDecoders = new HashMap<JType, String>(); public final JsonSerializerGenerators customGenerators = new JsonSerializerGenerators(); public final GeneratorContext context; public final TreeLogger logger; public JsonEncoderDecoderInstanceLocator(GeneratorContext context, TreeLogger logger) throws UnableToCompleteException { this.context = context; this.logger = logger; this.STRING_TYPE = find(String.class); this.JSON_VALUE_TYPE = find(JSONValue.class); this.DOCUMENT_TYPE = find(Document.class); this.MAP_TYPE = find(Map.class); this.SET_TYPE = find(Set.class); this.LIST_TYPE = find(List.class); this.COLLECTION_TYPE = find(Collection.class); builtInEncoderDecoders.put(JPrimitiveType.BOOLEAN, JSON_ENCODER_DECODER_CLASS + ".BOOLEAN"); builtInEncoderDecoders.put(JPrimitiveType.BYTE, JSON_ENCODER_DECODER_CLASS + ".BYTE"); builtInEncoderDecoders.put(JPrimitiveType.CHAR, JSON_ENCODER_DECODER_CLASS + ".CHAR"); builtInEncoderDecoders.put(JPrimitiveType.SHORT, JSON_ENCODER_DECODER_CLASS + ".SHORT"); builtInEncoderDecoders.put(JPrimitiveType.INT, JSON_ENCODER_DECODER_CLASS + ".INT"); builtInEncoderDecoders.put(JPrimitiveType.LONG, JSON_ENCODER_DECODER_CLASS + ".LONG"); builtInEncoderDecoders.put(JPrimitiveType.FLOAT, JSON_ENCODER_DECODER_CLASS + ".FLOAT"); builtInEncoderDecoders.put(JPrimitiveType.DOUBLE, JSON_ENCODER_DECODER_CLASS + ".DOUBLE"); builtInEncoderDecoders.put(find(Boolean.class), JSON_ENCODER_DECODER_CLASS + ".BOOLEAN"); builtInEncoderDecoders.put(find(Byte.class), JSON_ENCODER_DECODER_CLASS + ".BYTE"); builtInEncoderDecoders.put(find(Character.class), JSON_ENCODER_DECODER_CLASS + ".CHAR"); builtInEncoderDecoders.put(find(Short.class), JSON_ENCODER_DECODER_CLASS + ".SHORT"); builtInEncoderDecoders.put(find(Integer.class), JSON_ENCODER_DECODER_CLASS + ".INT"); builtInEncoderDecoders.put(find(Long.class), JSON_ENCODER_DECODER_CLASS + ".LONG"); builtInEncoderDecoders.put(find(Float.class), JSON_ENCODER_DECODER_CLASS + ".FLOAT"); builtInEncoderDecoders.put(find(Double.class), JSON_ENCODER_DECODER_CLASS + ".DOUBLE"); builtInEncoderDecoders.put(find(BigDecimal.class), JSON_ENCODER_DECODER_CLASS + ".BIG_DECIMAL"); builtInEncoderDecoders.put(find(BigInteger.class), JSON_ENCODER_DECODER_CLASS + ".BIG_INTEGER"); builtInEncoderDecoders.put(STRING_TYPE, JSON_ENCODER_DECODER_CLASS + ".STRING"); builtInEncoderDecoders.put(DOCUMENT_TYPE, JSON_ENCODER_DECODER_CLASS + ".DOCUMENT"); builtInEncoderDecoders.put(JSON_VALUE_TYPE, JSON_ENCODER_DECODER_CLASS + ".JSON_VALUE"); builtInEncoderDecoders.put(find(Date.class), JSON_ENCODER_DECODER_CLASS + ".DATE"); builtInEncoderDecoders.put(find(Object.class), ObjectEncoderDecoder.class.getName() + ".INSTANCE"); fillInCustomGenerators(context, logger); } @SuppressWarnings("unchecked") private void fillInCustomGenerators(GeneratorContext context, TreeLogger logger) { try { List<String> classNames = context.getPropertyOracle().getConfigurationProperty(CUSTOM_SERIALIZER_GENERATORS).getValues(); for (String name: classNames) { try { Class<? extends RestyJsonSerializerGenerator> clazz = (Class<? extends RestyJsonSerializerGenerator>) Class.forName(name); Constructor<? extends RestyJsonSerializerGenerator> constructor = clazz.getDeclaredConstructor(); RestyJsonSerializerGenerator generator = constructor.newInstance(); customGenerators.addGenerator(generator, context.getTypeOracle()); } catch (Exception e) { logger.log(WARN, "Could not access class: " + name, e); } } } catch (BadPropertyValueException ignore) {} } private JClassType find(Class<?> type) throws UnableToCompleteException { return find(type.getName()); } private JClassType find(String type) throws UnableToCompleteException { return RestServiceGenerator.find(logger, context, type); } private String getEncoderDecoder(JType type, TreeLogger logger) throws UnableToCompleteException { String rc = builtInEncoderDecoders.get(type); if (rc == null) { JClassType ct = type.isClass() == null? type.isInterface() : type.isClass(); if (ct != null && !isCollectionType(ct)) { JsonEncoderDecoderClassCreator generator = new JsonEncoderDecoderClassCreator(logger, context, ct); return generator.create() + ".INSTANCE"; } } return rc; } private String getCustomEncoderDecoder(JType type) { RestyJsonSerializerGenerator restyGenerator = customGenerators.findGenerator(type); if (restyGenerator == null) { return null; } Class<? extends JsonEncoderDecoderClassCreator> clazz = restyGenerator.getGeneratorClass(); try { Constructor<? extends JsonEncoderDecoderClassCreator> constructor = clazz.getDeclaredConstructor(TreeLogger.class, GeneratorContext.class, JClassType.class); JsonEncoderDecoderClassCreator generator = constructor.newInstance(logger, context, type); return generator.create() + ".INSTANCE"; } catch (Exception e) { logger.log(WARN, "Could not access class: " + clazz, e); return null; } } public boolean hasCustomEncoderDecoder(JType type) { return getCustomEncoderDecoder(type) != null; } public String encodeExpression(JType type, String expression, Style style) throws UnableToCompleteException { return encodeDecodeExpression(type, expression, style, "encode", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON"); } public String decodeExpression(JType type, String expression, Style style) throws UnableToCompleteException { return encodeDecodeExpression(type, expression, style, "decode", JSON_ENCODER_DECODER_CLASS + ".toMap", JSON_ENCODER_DECODER_CLASS + ".toSet", JSON_ENCODER_DECODER_CLASS + ".toList", JSON_ENCODER_DECODER_CLASS + ".toArray"); } private String encodeDecodeExpression(JType type, String expression, Style style, String encoderMethod, String mapMethod, String setMethod, String listMethod, String arrayMethod) throws UnableToCompleteException { String customEncoderDecoder = getCustomEncoderDecoder(type); if (customEncoderDecoder != null) { return customEncoderDecoder + "." + encoderMethod + "(" + expression + ")"; } if (null != type.isEnum()) { if (encoderMethod.equals("encode")) { return encodeDecodeExpression(STRING_TYPE, expression + ".name()", style, encoderMethod, mapMethod, setMethod, listMethod, arrayMethod); } return type.getQualifiedSourceName() + ".valueOf(" + encodeDecodeExpression(STRING_TYPE, expression, style, encoderMethod, mapMethod, setMethod, listMethod, arrayMethod) + ")"; } String encoderDecoder = getEncoderDecoder(type, logger); if (encoderDecoder != null) { return encoderDecoder + "." + encoderMethod + "(" + expression + ")"; } JClassType clazz = type.isClassOrInterface(); if (isCollectionType(clazz)) { JClassType[] types = getTypes(type); String[] coders = isMapEncoderDecoder( clazz, types, style ); if ( coders != null ){ String keyEncoderDecoder = coders[ 1 ]; encoderDecoder = coders[ 0 ]; if (encoderDecoder != null && keyEncoderDecoder != null) { return mapMethod + "(" + expression + ", " + keyEncoderDecoder + ", " + encoderDecoder + ", " + JSON_CLASS + ".Style." + style.name() + ")"; } else if (encoderDecoder != null) { return mapMethod + "(" + expression + ", " + encoderDecoder + ", " + JSON_CLASS + ".Style." + style.name() + ")"; } } encoderDecoder = isSetEncoderDecoder(clazz, types, style); if (encoderDecoder != null) { return setMethod + "(" + expression + ", " + encoderDecoder + ")"; } encoderDecoder = isListEncoderDecoder(clazz, types, style); if (encoderDecoder != null) { return listMethod + "(" + expression + ", " + encoderDecoder + ")"; } encoderDecoder = isCollectionEncoderDecoder(clazz, types, style); if (encoderDecoder != null) { return listMethod + "(" + expression + ", " + encoderDecoder + ")"; } } encoderDecoder = isArrayEncoderDecoder(type, style); if (encoderDecoder != null) { if (encoderMethod.equals("encode")) { return arrayMethod + "(" + expression + ", " + encoderDecoder + ")"; } return arrayMethod + "(" + expression + ", " + encoderDecoder + ", new " + type.isArray().getComponentType().getQualifiedSourceName() + "[" + JSON_ENCODER_DECODER_CLASS + ".getSize(" + expression + ")])"; } error("Do not know how to encode/decode " + type); return null; } protected String[] isMapEncoderDecoder(JClassType clazz, JClassType[] types, Style style) throws UnableToCompleteException { String encoderDecoder; if (clazz.isAssignableTo(MAP_TYPE)) { if (types.length != 2) { error("Map must define two and only two type parameters"); } if (isCollectionType(types[0])) { error("Map key can't be a collection"); } String keyEncoderDecoder = getNestedEncoderDecoder(types[0], style); encoderDecoder = getNestedEncoderDecoder(types[1], style); return new String[]{ encoderDecoder, keyEncoderDecoder }; } return null; } String getNestedEncoderDecoder( JType type, Style style ) throws UnableToCompleteException{ String result = getEncoderDecoder(type, logger); if ( result != null ){ return result; } JClassType clazz = type.isClassOrInterface(); if (isCollectionType(clazz)) { JClassType[] types = getTypes(type); String[] coders = isMapEncoderDecoder( clazz, types, style ); if ( coders != null ){ String keyEncoderDecoder = coders[ 1 ]; result = coders[ 0 ]; if (result != null && keyEncoderDecoder != null) { return JSON_NESTED_ENCODER_DECODER_CLASS + ".mapEncoderDecoder( " + keyEncoderDecoder + ", " + result + ", " + JSON_CLASS + ".Style." + style.name() + " )"; } else if (result != null) { return JSON_NESTED_ENCODER_DECODER_CLASS + ".mapEncoderDecoder( " + result + ", " + JSON_CLASS + ".Style." + style.name() + " )"; } } result = isListEncoderDecoder( clazz, types, style ); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".listEncoderDecoder( " + result + " )"; } result = isSetEncoderDecoder( clazz, types, style ); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".setEncoderDecoder( " + result + " )"; } return JSON_NESTED_ENCODER_DECODER_CLASS + ".collectionEncoderDecoder( " + result + " )"; } else { result = isArrayEncoderDecoder(type, style); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".arrayEncoderDecoder( " + result + " )"; } } return null; } protected String isArrayEncoderDecoder( JType type, Style style ) throws UnableToCompleteException { if (type.isArray() != null){ JType componentType = type.isArray().getComponentType(); if (componentType.isArray() != null) { error("Multi-dimensional arrays are not yet supported"); } String encoderDecoder = getNestedEncoderDecoder( componentType, style ); debug("type encoder for: " + componentType + " is " + encoderDecoder); return encoderDecoder; } return null; } protected String isSetEncoderDecoder( JClassType clazz, JClassType[] types, Style style ) throws UnableToCompleteException { if (clazz.isAssignableTo(SET_TYPE)) { if (types.length != 1) { error("Set must define one and only one type parameter"); } String encoderDecoder = getNestedEncoderDecoder( types[0], style ); debug("type encoder for: " + types[0] + " is " + encoderDecoder); return encoderDecoder; } return null; } protected String isListEncoderDecoder( JClassType clazz, JClassType[] types, Style style) throws UnableToCompleteException { if (clazz.isAssignableTo(LIST_TYPE)) { if (types.length != 1) { error("List must define one and only one type parameter"); } String encoderDecoder = getNestedEncoderDecoder( types[0], style ); debug("type encoder for: " + types[0] + " is " + encoderDecoder); return encoderDecoder; } return null; } protected String isCollectionEncoderDecoder(JClassType clazz, JClassType[] types, Style style) throws UnableToCompleteException { if (clazz.isAssignableTo(COLLECTION_TYPE)) { if (types.length != 1) { error("Collection must define one and only one type parameter"); } String encoderDecoder = getNestedEncoderDecoder(types[0], style); debug("type encoder for: " + types[0] + " is " + encoderDecoder); return encoderDecoder; } return null; } protected JClassType[] getTypes(JType type) throws UnableToCompleteException { JParameterizedType parameterizedType = type.isParameterized(); if (parameterizedType == null || parameterizedType.getTypeArgs() == null) { error("Collection types must be parameterized."); } JClassType[] types = parameterizedType.getTypeArgs(); return types; } boolean isCollectionType(JClassType clazz) { return clazz != null && (clazz.isAssignableTo(SET_TYPE) || clazz.isAssignableTo(LIST_TYPE) || clazz.isAssignableTo(MAP_TYPE) || clazz.isAssignableTo(COLLECTION_TYPE)); } protected void error(String msg) throws UnableToCompleteException { logger.log(ERROR, msg); throw new UnableToCompleteException(); } protected void warn(String msg) throws UnableToCompleteException { logger.log(WARN, msg); throw new UnableToCompleteException(); } protected void info(String msg) { logger.log(INFO, msg); } protected void debug(String msg) { logger.log(DEBUG, msg); } protected void trace(String msg) { logger.log(TRACE, msg); } }
restygwt/src/main/java/org/fusesource/restygwt/rebind/JsonEncoderDecoderInstanceLocator.java
/** * Copyright (C) 2009-2012 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.restygwt.rebind; import static org.fusesource.restygwt.rebind.BaseSourceCreator.DEBUG; import static org.fusesource.restygwt.rebind.BaseSourceCreator.ERROR; import static org.fusesource.restygwt.rebind.BaseSourceCreator.INFO; import static org.fusesource.restygwt.rebind.BaseSourceCreator.TRACE; import static org.fusesource.restygwt.rebind.BaseSourceCreator.WARN; import java.lang.reflect.Constructor; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import com.google.gwt.core.ext.BadPropertyValueException; import org.fusesource.restygwt.client.AbstractJsonEncoderDecoder; import org.fusesource.restygwt.client.AbstractNestedJsonEncoderDecoder; import org.fusesource.restygwt.client.Json; import org.fusesource.restygwt.client.Json.Style; import org.fusesource.restygwt.client.ObjectEncoderDecoder; import com.google.gwt.core.ext.GeneratorContext; import com.google.gwt.core.ext.TreeLogger; import com.google.gwt.core.ext.UnableToCompleteException; import com.google.gwt.core.ext.typeinfo.JClassType; import com.google.gwt.core.ext.typeinfo.JParameterizedType; import com.google.gwt.core.ext.typeinfo.JPrimitiveType; import com.google.gwt.core.ext.typeinfo.JType; import com.google.gwt.json.client.JSONValue; import com.google.gwt.xml.client.Document; /** * * @author <a href="http://hiramchirino.com">Hiram Chirino</a> */ public class JsonEncoderDecoderInstanceLocator { public static final String JSON_ENCODER_DECODER_CLASS = AbstractJsonEncoderDecoder.class.getName(); public static final String JSON_NESTED_ENCODER_DECODER_CLASS = AbstractNestedJsonEncoderDecoder.class.getName(); public static final String JSON_CLASS = Json.class.getName(); public static final String CUSTOM_SERIALIZER_GENERATORS = "org.fusesource.restygwt.restyjsonserializergenerator"; public final JClassType STRING_TYPE; public final JClassType JSON_VALUE_TYPE; public final JClassType DOCUMENT_TYPE; public final JClassType MAP_TYPE; public final JClassType SET_TYPE; public final JClassType LIST_TYPE; public final HashMap<JType, String> builtInEncoderDecoders = new HashMap<JType, String>(); public final JsonSerializerGenerators customGenerators = new JsonSerializerGenerators(); public final GeneratorContext context; public final TreeLogger logger; public JsonEncoderDecoderInstanceLocator(GeneratorContext context, TreeLogger logger) throws UnableToCompleteException { this.context = context; this.logger = logger; this.STRING_TYPE = find(String.class); this.JSON_VALUE_TYPE = find(JSONValue.class); this.DOCUMENT_TYPE = find(Document.class); this.MAP_TYPE = find(Map.class); this.SET_TYPE = find(Set.class); this.LIST_TYPE = find(List.class); builtInEncoderDecoders.put(JPrimitiveType.BOOLEAN, JSON_ENCODER_DECODER_CLASS + ".BOOLEAN"); builtInEncoderDecoders.put(JPrimitiveType.BYTE, JSON_ENCODER_DECODER_CLASS + ".BYTE"); builtInEncoderDecoders.put(JPrimitiveType.CHAR, JSON_ENCODER_DECODER_CLASS + ".CHAR"); builtInEncoderDecoders.put(JPrimitiveType.SHORT, JSON_ENCODER_DECODER_CLASS + ".SHORT"); builtInEncoderDecoders.put(JPrimitiveType.INT, JSON_ENCODER_DECODER_CLASS + ".INT"); builtInEncoderDecoders.put(JPrimitiveType.LONG, JSON_ENCODER_DECODER_CLASS + ".LONG"); builtInEncoderDecoders.put(JPrimitiveType.FLOAT, JSON_ENCODER_DECODER_CLASS + ".FLOAT"); builtInEncoderDecoders.put(JPrimitiveType.DOUBLE, JSON_ENCODER_DECODER_CLASS + ".DOUBLE"); builtInEncoderDecoders.put(find(Boolean.class), JSON_ENCODER_DECODER_CLASS + ".BOOLEAN"); builtInEncoderDecoders.put(find(Byte.class), JSON_ENCODER_DECODER_CLASS + ".BYTE"); builtInEncoderDecoders.put(find(Character.class), JSON_ENCODER_DECODER_CLASS + ".CHAR"); builtInEncoderDecoders.put(find(Short.class), JSON_ENCODER_DECODER_CLASS + ".SHORT"); builtInEncoderDecoders.put(find(Integer.class), JSON_ENCODER_DECODER_CLASS + ".INT"); builtInEncoderDecoders.put(find(Long.class), JSON_ENCODER_DECODER_CLASS + ".LONG"); builtInEncoderDecoders.put(find(Float.class), JSON_ENCODER_DECODER_CLASS + ".FLOAT"); builtInEncoderDecoders.put(find(Double.class), JSON_ENCODER_DECODER_CLASS + ".DOUBLE"); builtInEncoderDecoders.put(find(BigDecimal.class), JSON_ENCODER_DECODER_CLASS + ".BIG_DECIMAL"); builtInEncoderDecoders.put(find(BigInteger.class), JSON_ENCODER_DECODER_CLASS + ".BIG_INTEGER"); builtInEncoderDecoders.put(STRING_TYPE, JSON_ENCODER_DECODER_CLASS + ".STRING"); builtInEncoderDecoders.put(DOCUMENT_TYPE, JSON_ENCODER_DECODER_CLASS + ".DOCUMENT"); builtInEncoderDecoders.put(JSON_VALUE_TYPE, JSON_ENCODER_DECODER_CLASS + ".JSON_VALUE"); builtInEncoderDecoders.put(find(Date.class), JSON_ENCODER_DECODER_CLASS + ".DATE"); builtInEncoderDecoders.put(find(Object.class), ObjectEncoderDecoder.class.getName() + ".INSTANCE"); fillInCustomGenerators(context, logger); } @SuppressWarnings("unchecked") private void fillInCustomGenerators(GeneratorContext context, TreeLogger logger) { try { List<String> classNames = context.getPropertyOracle().getConfigurationProperty(CUSTOM_SERIALIZER_GENERATORS).getValues(); for (String name: classNames) { try { Class<? extends RestyJsonSerializerGenerator> clazz = (Class<? extends RestyJsonSerializerGenerator>) Class.forName(name); Constructor<? extends RestyJsonSerializerGenerator> constructor = clazz.getDeclaredConstructor(); RestyJsonSerializerGenerator generator = constructor.newInstance(); customGenerators.addGenerator(generator, context.getTypeOracle()); } catch (Exception e) { logger.log(WARN, "Could not access class: " + name, e); } } } catch (BadPropertyValueException ignore) {} } private JClassType find(Class<?> type) throws UnableToCompleteException { return find(type.getName()); } private JClassType find(String type) throws UnableToCompleteException { return RestServiceGenerator.find(logger, context, type); } private String getEncoderDecoder(JType type, TreeLogger logger) throws UnableToCompleteException { String rc = builtInEncoderDecoders.get(type); if (rc == null) { JClassType ct = type.isClass() == null? type.isInterface() : type.isClass(); if (ct != null && !isCollectionType(ct)) { JsonEncoderDecoderClassCreator generator = new JsonEncoderDecoderClassCreator(logger, context, ct); return generator.create() + ".INSTANCE"; } } return rc; } private String getCustomEncoderDecoder(JType type) { RestyJsonSerializerGenerator restyGenerator = customGenerators.findGenerator(type); if (restyGenerator == null) { return null; } Class<? extends JsonEncoderDecoderClassCreator> clazz = restyGenerator.getGeneratorClass(); try { Constructor<? extends JsonEncoderDecoderClassCreator> constructor = clazz.getDeclaredConstructor(TreeLogger.class, GeneratorContext.class, JClassType.class); JsonEncoderDecoderClassCreator generator = constructor.newInstance(logger, context, type); return generator.create() + ".INSTANCE"; } catch (Exception e) { logger.log(WARN, "Could not access class: " + clazz, e); return null; } } public boolean hasCustomEncoderDecoder(JType type) { return getCustomEncoderDecoder(type) != null; } public String encodeExpression(JType type, String expression, Style style) throws UnableToCompleteException { return encodeDecodeExpression(type, expression, style, "encode", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON", JSON_ENCODER_DECODER_CLASS + ".toJSON"); } public String decodeExpression(JType type, String expression, Style style) throws UnableToCompleteException { return encodeDecodeExpression(type, expression, style, "decode", JSON_ENCODER_DECODER_CLASS + ".toMap", JSON_ENCODER_DECODER_CLASS + ".toSet", JSON_ENCODER_DECODER_CLASS + ".toList", JSON_ENCODER_DECODER_CLASS + ".toArray"); } private String encodeDecodeExpression(JType type, String expression, Style style, String encoderMethod, String mapMethod, String setMethod, String listMethod, String arrayMethod) throws UnableToCompleteException { String customEncoderDecoder = getCustomEncoderDecoder(type); if (customEncoderDecoder != null) { return customEncoderDecoder + "." + encoderMethod + "(" + expression + ")"; } if (null != type.isEnum()) { if (encoderMethod.equals("encode")) { return encodeDecodeExpression(STRING_TYPE, expression + ".name()", style, encoderMethod, mapMethod, setMethod, listMethod, arrayMethod); } return type.getQualifiedSourceName() + ".valueOf(" + encodeDecodeExpression(STRING_TYPE, expression, style, encoderMethod, mapMethod, setMethod, listMethod, arrayMethod) + ")"; } String encoderDecoder = getEncoderDecoder(type, logger); if (encoderDecoder != null) { return encoderDecoder + "." + encoderMethod + "(" + expression + ")"; } JClassType clazz = type.isClassOrInterface(); if (isCollectionType(clazz)) { JClassType[] types = getTypes(type); String[] coders = isMapEncoderDecoder( clazz, types, style ); if ( coders != null ){ String keyEncoderDecoder = coders[ 1 ]; encoderDecoder = coders[ 0 ]; if (encoderDecoder != null && keyEncoderDecoder != null) { return mapMethod + "(" + expression + ", " + keyEncoderDecoder + ", " + encoderDecoder + ", " + JSON_CLASS + ".Style." + style.name() + ")"; } else if (encoderDecoder != null) { return mapMethod + "(" + expression + ", " + encoderDecoder + ", " + JSON_CLASS + ".Style." + style.name() + ")"; } } encoderDecoder = isSetEncoderDecoder(clazz, types, style); if (encoderDecoder != null) { return setMethod + "(" + expression + ", " + encoderDecoder + ")"; } encoderDecoder = isListEncoderDecoder(clazz, types, style); if (encoderDecoder != null) { return listMethod + "(" + expression + ", " + encoderDecoder + ")"; } } encoderDecoder = isArrayEncoderDecoder(type, style); if (encoderDecoder != null) { if (encoderMethod.equals("encode")) { return arrayMethod + "(" + expression + ", " + encoderDecoder + ")"; } return arrayMethod + "(" + expression + ", " + encoderDecoder + ", new " + type.isArray().getComponentType().getQualifiedSourceName() + "[" + JSON_ENCODER_DECODER_CLASS + ".getSize(" + expression + ")])"; } error("Do not know how to encode/decode " + type); return null; } protected String[] isMapEncoderDecoder(JClassType clazz, JClassType[] types, Style style) throws UnableToCompleteException { String encoderDecoder; if (clazz.isAssignableTo(MAP_TYPE)) { if (types.length != 2) { error("Map must define two and only two type parameters"); } if (isCollectionType(types[0])) { error("Map key can't be a collection"); } String keyEncoderDecoder = getNestedEncoderDecoder(types[0], style); encoderDecoder = getNestedEncoderDecoder(types[1], style); return new String[]{ encoderDecoder, keyEncoderDecoder }; } return null; } String getNestedEncoderDecoder( JType type, Style style ) throws UnableToCompleteException{ String result = getEncoderDecoder(type, logger); if ( result != null ){ return result; } JClassType clazz = type.isClassOrInterface(); if (isCollectionType(clazz)) { JClassType[] types = getTypes(type); String[] coders = isMapEncoderDecoder( clazz, types, style ); if ( coders != null ){ String keyEncoderDecoder = coders[ 1 ]; result = coders[ 0 ]; if (result != null && keyEncoderDecoder != null) { return JSON_NESTED_ENCODER_DECODER_CLASS + ".mapEncoderDecoder( " + keyEncoderDecoder + ", " + result + ", " + JSON_CLASS + ".Style." + style.name() + " )"; } else if (result != null) { return JSON_NESTED_ENCODER_DECODER_CLASS + ".mapEncoderDecoder( " + result + ", " + JSON_CLASS + ".Style." + style.name() + " )"; } } result = isListEncoderDecoder( clazz, types, style ); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".listEncoderDecoder( " + result + " )"; } result = isSetEncoderDecoder( clazz, types, style ); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".setEncoderDecoder( " + result + " )"; } return JSON_NESTED_ENCODER_DECODER_CLASS + ".collectionEncoderDecoder( " + result + " )"; } else { result = isArrayEncoderDecoder(type, style); if( result != null ){ return JSON_NESTED_ENCODER_DECODER_CLASS + ".arrayEncoderDecoder( " + result + " )"; } } return null; } protected String isArrayEncoderDecoder( JType type, Style style ) throws UnableToCompleteException { if (type.isArray() != null){ JType componentType = type.isArray().getComponentType(); if (componentType.isArray() != null) { error("Multi-dimensional arrays are not yet supported"); } String encoderDecoder = getNestedEncoderDecoder( componentType, style ); debug("type encoder for: " + componentType + " is " + encoderDecoder); return encoderDecoder; } return null; } protected String isSetEncoderDecoder( JClassType clazz, JClassType[] types, Style style ) throws UnableToCompleteException { if (clazz.isAssignableTo(SET_TYPE)) { if (types.length != 1) { error("Set must define one and only one type parameter"); } String encoderDecoder = getNestedEncoderDecoder( types[0], style ); debug("type encoder for: " + types[0] + " is " + encoderDecoder); return encoderDecoder; } return null; } protected String isListEncoderDecoder( JClassType clazz, JClassType[] types, Style style) throws UnableToCompleteException { if (clazz.isAssignableTo(LIST_TYPE)) { if (types.length != 1) { error("List must define one and only one type parameter"); } String encoderDecoder = getNestedEncoderDecoder( types[0], style ); debug("type encoder for: " + types[0] + " is " + encoderDecoder); return encoderDecoder; } return null; } protected JClassType[] getTypes(JType type) throws UnableToCompleteException { JParameterizedType parameterizedType = type.isParameterized(); if (parameterizedType == null || parameterizedType.getTypeArgs() == null) { error("Collection types must be parameterized."); } JClassType[] types = parameterizedType.getTypeArgs(); return types; } boolean isCollectionType(JClassType clazz) { return clazz != null && (clazz.isAssignableTo(SET_TYPE) || clazz.isAssignableTo(LIST_TYPE) || clazz.isAssignableTo(MAP_TYPE)); } protected void error(String msg) throws UnableToCompleteException { logger.log(ERROR, msg); throw new UnableToCompleteException(); } protected void warn(String msg) throws UnableToCompleteException { logger.log(WARN, msg); throw new UnableToCompleteException(); } protected void info(String msg) { logger.log(INFO, msg); } protected void debug(String msg) { logger.log(DEBUG, msg); } protected void trace(String msg) { logger.log(TRACE, msg); } }
final touches to make Collections work as Lists
restygwt/src/main/java/org/fusesource/restygwt/rebind/JsonEncoderDecoderInstanceLocator.java
final touches to make Collections work as Lists
<ide><path>estygwt/src/main/java/org/fusesource/restygwt/rebind/JsonEncoderDecoderInstanceLocator.java <ide> import java.lang.reflect.Constructor; <ide> import java.math.BigDecimal; <ide> import java.math.BigInteger; <add>import java.util.Collection; <ide> import java.util.Date; <ide> import java.util.HashMap; <ide> import java.util.List; <ide> public final JClassType MAP_TYPE; <ide> public final JClassType SET_TYPE; <ide> public final JClassType LIST_TYPE; <add> public final JClassType COLLECTION_TYPE; <ide> <ide> public final HashMap<JType, String> builtInEncoderDecoders = new HashMap<JType, String>(); <ide> public final JsonSerializerGenerators customGenerators = new JsonSerializerGenerators(); <ide> this.MAP_TYPE = find(Map.class); <ide> this.SET_TYPE = find(Set.class); <ide> this.LIST_TYPE = find(List.class); <add> this.COLLECTION_TYPE = find(Collection.class); <ide> <ide> builtInEncoderDecoders.put(JPrimitiveType.BOOLEAN, JSON_ENCODER_DECODER_CLASS + ".BOOLEAN"); <ide> builtInEncoderDecoders.put(JPrimitiveType.BYTE, JSON_ENCODER_DECODER_CLASS + ".BYTE"); <ide> if (encoderDecoder != null) { <ide> return listMethod + "(" + expression + ", " + encoderDecoder + ")"; <ide> } <add> <add> encoderDecoder = isCollectionEncoderDecoder(clazz, types, style); <add> if (encoderDecoder != null) { <add> return listMethod + "(" + expression + ", " + encoderDecoder + ")"; <add> } <ide> } <ide> <ide> encoderDecoder = isArrayEncoderDecoder(type, style); <ide> return null; <ide> } <ide> <add> protected String isCollectionEncoderDecoder(JClassType clazz, JClassType[] types, <add> Style style) throws UnableToCompleteException { <add> if (clazz.isAssignableTo(COLLECTION_TYPE)) { <add> if (types.length != 1) { <add> error("Collection must define one and only one type parameter"); <add> } <add> String encoderDecoder = getNestedEncoderDecoder(types[0], style); <add> debug("type encoder for: " + types[0] + " is " + encoderDecoder); <add> return encoderDecoder; <add> } <add> return null; <add> } <add> <ide> protected JClassType[] getTypes(JType type) throws UnableToCompleteException { <ide> JParameterizedType parameterizedType = type.isParameterized(); <ide> if (parameterizedType == null || parameterizedType.getTypeArgs() == null) { <ide> } <ide> boolean isCollectionType(JClassType clazz) { <ide> return clazz != null <del> && (clazz.isAssignableTo(SET_TYPE) || clazz.isAssignableTo(LIST_TYPE) || clazz.isAssignableTo(MAP_TYPE)); <add> && (clazz.isAssignableTo(SET_TYPE) || clazz.isAssignableTo(LIST_TYPE) || clazz.isAssignableTo(MAP_TYPE) || clazz.isAssignableTo(COLLECTION_TYPE)); <ide> } <ide> <ide> protected void error(String msg) throws UnableToCompleteException {
Java
lgpl-2.1
ebd7f324c35b2b704baa80f740f5cd1114dcbdb5
0
lopescan/languagetool,janissl/languagetool,lopescan/languagetool,meg0man/languagetool,jimregan/languagetool,languagetool-org/languagetool,meg0man/languagetool,jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,lopescan/languagetool,janissl/languagetool,janissl/languagetool,lopescan/languagetool,jimregan/languagetool,janissl/languagetool,languagetool-org/languagetool,janissl/languagetool,meg0man/languagetool,meg0man/languagetool,meg0man/languagetool,languagetool-org/languagetool,janissl/languagetool,lopescan/languagetool,jimregan/languagetool
/* LanguageTool, a natural language style checker * Copyright (C) 2005 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.gui; import java.awt.Container; import java.awt.Dimension; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.ResourceBundle; import java.util.Set; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JRootPane; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.KeyStroke; import org.languagetool.JLanguageTool; import org.languagetool.Language; import org.languagetool.rules.Rule; import org.languagetool.server.HTTPServer; import org.languagetool.tools.StringTools; /** * Dialog that offers the available rules so they can be turned on/off * individually. * * @author Daniel Naber */ public class ConfigurationDialog implements ActionListener { private static final String NO_MOTHER_TONGUE = "---"; private JButton okButton; private JButton cancelButton; private final ResourceBundle messages; private JDialog dialog; private JComboBox motherTongueBox; private JCheckBox serverCheckbox; private JTextField serverPortField; private final List<JCheckBox> checkBoxes = new ArrayList<JCheckBox>(); private final List<String> checkBoxesRuleIds = new ArrayList<String>(); private final List<String> checkBoxesCategories = new ArrayList<String>(); private final List<String> defaultOffRules = new ArrayList<String>(); private Set<String> inactiveRuleIds = new HashSet<String>(); private Set<String> enabledRuleIds = new HashSet<String>(); private Set<String> inactiveCategoryNames = new HashSet<String>(); private final List<JCheckBox> categoryCheckBoxes = new ArrayList<JCheckBox>(); private final List<String> checkBoxesCategoryNames = new ArrayList<String>(); private Language motherTongue; private boolean serverMode; private int serverPort; private boolean useGUIConfig; private final Frame owner; private final boolean insideOOo; private JCheckBox serverSettingsCheckbox; public ConfigurationDialog(Frame owner, boolean insideOOo) { this.owner = owner; this.insideOOo = insideOOo; messages = JLanguageTool.getMessageBundle(); } public void show(List<Rule> rules) { dialog = new JDialog(owner, true); dialog.setTitle(messages.getString("guiConfigWindowTitle")); checkBoxes.clear(); checkBoxesRuleIds.clear(); categoryCheckBoxes.clear(); checkBoxesCategoryNames.clear(); Collections.sort(rules, new CategoryComparator()); // close dialog when user presses Escape key: final KeyStroke stroke = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0); final ActionListener actionListener = new ActionListener() { @Override public void actionPerformed(@SuppressWarnings("unused") ActionEvent actionEvent) { dialog.setVisible(false); } }; final JRootPane rootPane = dialog.getRootPane(); rootPane.registerKeyboardAction(actionListener, stroke, JComponent.WHEN_IN_FOCUSED_WINDOW); // JPanel final JPanel checkBoxPanel = new JPanel(); checkBoxPanel.setLayout(new GridBagLayout()); GridBagConstraints cons = new GridBagConstraints(); cons.anchor = GridBagConstraints.NORTHWEST; cons.gridx = 0; int row = 0; String prevID = null; String prevCategory = null; for (final Rule rule : rules) { // avoid displaying rules from rule groups more than once: if (prevID == null || !rule.getId().equals(prevID)) { cons.gridy = row; final JCheckBox checkBox = new JCheckBox(rule.getDescription()); if (inactiveRuleIds != null && (inactiveRuleIds.contains(rule.getId()) || inactiveCategoryNames .contains(rule.getCategory().getName()))) { checkBox.setSelected(false); } else { checkBox.setSelected(true); } if (rule.isDefaultOff() && !enabledRuleIds.contains(rule.getId())) { checkBox.setSelected(false); } if (rule.isDefaultOff()) { defaultOffRules.add(rule.getId()); if (rule.getCategory().isDefaultOff()) { inactiveCategoryNames.add(rule.getCategory().getName()); } } else { if (rule.getCategory().isDefaultOff()) { inactiveCategoryNames.remove(rule.getCategory().getName()); } } final ActionListener ruleCheckBoxListener = makeRuleCheckboxListener(); checkBox.addActionListener(ruleCheckBoxListener); checkBoxes.add(checkBox); checkBoxesRuleIds.add(rule.getId()); checkBoxesCategories.add(rule.getCategory().getName()); final boolean showHeadline = rule.getCategory() != null && !rule.getCategory().getName().equals(prevCategory); if ((showHeadline || prevCategory == null) && rule.getCategory() != null) { // TODO: maybe use a Tree of Checkboxes here, like in: // http://www.javaworld.com/javaworld/jw-09-2007/jw-09-checkboxtree.html final JCheckBox categoryCheckBox = new JCheckBox(rule.getCategory() .getName()); if (inactiveCategoryNames != null && inactiveCategoryNames.contains(rule.getCategory().getName())) { categoryCheckBox.setSelected(false); } else { categoryCheckBox.setSelected(true); } final ActionListener categoryCheckBoxListener = makeCategoryCheckboxListener(); categoryCheckBox.addActionListener(categoryCheckBoxListener); categoryCheckBoxes.add(categoryCheckBox); checkBoxesCategoryNames.add(rule.getCategory().getName()); checkBoxPanel.add(categoryCheckBox, cons); prevCategory = rule.getCategory().getName(); cons.gridy++; row++; } checkBox.setMargin(new Insets(0, 20, 0, 0)); // indent checkBoxPanel.add(checkBox, cons); row++; } prevID = rule.getId(); } final JPanel motherTonguePanel = new JPanel(); motherTonguePanel.add(new JLabel(messages.getString("guiMotherTongue")), cons); motherTongueBox = new JComboBox(getPossibleMotherTongues()); if (motherTongue != null) { if (motherTongue == Language.DEMO) { motherTongueBox.setSelectedItem(NO_MOTHER_TONGUE); } else { motherTongueBox.setSelectedItem(motherTongue.getTranslatedName(messages)); } } motherTonguePanel.add(motherTongueBox, cons); final JPanel portPanel = new JPanel(); portPanel.setLayout(new GridBagLayout()); // TODO: why is this now left-aligned?!?! cons = new GridBagConstraints(); cons.insets = new Insets(0, 4, 0, 0); cons.gridx = 0; cons.gridy = 0; cons.anchor = GridBagConstraints.WEST; cons.fill = GridBagConstraints.NONE; cons.weightx = 0.0f; if (!insideOOo) { serverCheckbox = new JCheckBox(StringTools.getLabel(messages .getString("guiRunOnPort"))); serverCheckbox.setMnemonic(StringTools.getMnemonic(messages .getString("guiRunOnPort"))); serverCheckbox.setSelected(serverMode); portPanel.add(serverCheckbox, cons); serverPortField = new JTextField(Integer.toString(serverPort)); serverPortField.setEnabled(serverCheckbox.isSelected()); serverSettingsCheckbox = new JCheckBox(StringTools.getLabel(messages .getString("useGUIConfig"))); // TODO: without this the box is just a few pixels small, but why??: serverPortField.setMinimumSize(new Dimension(100, 25)); cons.gridx = 1; serverCheckbox.addActionListener(new ActionListener() { @Override public void actionPerformed(@SuppressWarnings("unused") ActionEvent e) { serverPortField.setEnabled(serverCheckbox.isSelected()); serverSettingsCheckbox.setEnabled(serverCheckbox.isSelected()); } }); portPanel.add(serverPortField, cons); cons.gridx = 0; cons.gridy = 10; serverSettingsCheckbox.setMnemonic(StringTools.getMnemonic(messages .getString("useGUIConfig"))); serverSettingsCheckbox.setSelected(useGUIConfig); serverSettingsCheckbox.setEnabled(serverMode); portPanel.add(serverSettingsCheckbox, cons); } final JPanel buttonPanel = new JPanel(); buttonPanel.setLayout(new GridBagLayout()); okButton = new JButton(StringTools.getLabel(messages .getString("guiOKButton"))); okButton.setMnemonic(StringTools.getMnemonic(messages .getString("guiOKButton"))); okButton.addActionListener(this); cancelButton = new JButton(StringTools.getLabel(messages .getString("guiCancelButton"))); cancelButton.setMnemonic(StringTools.getMnemonic(messages .getString("guiCancelButton"))); cancelButton.addActionListener(this); cons = new GridBagConstraints(); cons.insets = new Insets(0, 4, 0, 0); buttonPanel.add(okButton, cons); buttonPanel.add(cancelButton, cons); final Container contentPane = dialog.getContentPane(); contentPane.setLayout(new GridBagLayout()); cons = new GridBagConstraints(); cons.insets = new Insets(4, 4, 4, 4); cons.gridx = 0; cons.gridy = 0; cons.weightx = 10.0f; cons.weighty = 10.0f; cons.fill = GridBagConstraints.BOTH; contentPane.add(new JScrollPane(checkBoxPanel), cons); cons.gridx = 0; cons.gridy = 1; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.WEST; contentPane.add(motherTonguePanel, cons); cons.gridx = 0; cons.gridy = 2; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.WEST; contentPane.add(portPanel, cons); cons.gridx = 0; cons.gridy = 3; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.EAST; contentPane.add(buttonPanel, cons); dialog.pack(); dialog.setSize(500, 500); // center on screen: final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); final Dimension frameSize = dialog.getSize(); dialog.setLocation(screenSize.width / 2 - frameSize.width / 2, screenSize.height / 2 - frameSize.height / 2); dialog.setVisible(true); } private ActionListener makeRuleCheckboxListener() { return new ActionListener() { @Override public void actionPerformed(final ActionEvent actionEvent) { final JCheckBox cBox = (JCheckBox) actionEvent.getSource(); final boolean selected = cBox.getModel().isSelected(); int i = 0; for (final JCheckBox chBox : checkBoxes) { if (chBox.equals(cBox)) { final int catNo = checkBoxesCategoryNames .indexOf(checkBoxesCategories.get(i)); if (selected && !categoryCheckBoxes.get(catNo).isSelected()) { categoryCheckBoxes.get(catNo).setSelected(true); } } i++; } } }; } private ActionListener makeCategoryCheckboxListener() { return new ActionListener() { @Override public void actionPerformed(final ActionEvent actionEvent) { final JCheckBox cBox = (JCheckBox) actionEvent.getSource(); final boolean selected = cBox.getModel().isSelected(); int i = 0; for (final JCheckBox ruleBox : checkBoxes) { if (ruleBox.isSelected() != selected) { if (checkBoxesCategories.get(i).equals(cBox.getText())) { ruleBox.setSelected(selected); } } i++; } } }; } private Object[] getPossibleMotherTongues() { final List<Object> motherTongues = new ArrayList<Object>(); motherTongues.add(NO_MOTHER_TONGUE); for (final Language lang : Language.LANGUAGES) { if (lang != Language.DEMO) { motherTongues.add(lang.getTranslatedName(messages)); } } return motherTongues.toArray(); } @Override public void actionPerformed(ActionEvent e) { if (e.getSource() == okButton) { int i = 0; inactiveCategoryNames.clear(); for (final JCheckBox checkBox : categoryCheckBoxes) { if (!checkBox.isSelected()) { final String categoryName = checkBoxesCategoryNames.get(i); inactiveCategoryNames.add(categoryName); } i++; } i = 0; inactiveRuleIds.clear(); enabledRuleIds.clear(); for (final JCheckBox checkBox : checkBoxes) { if (!checkBox.isSelected()) { final String ruleId = checkBoxesRuleIds.get(i); if (!defaultOffRules.contains(ruleId)) { inactiveRuleIds.add(ruleId); } } if (checkBox.isSelected()) { final String ruleId = checkBoxesRuleIds.get(i); if (defaultOffRules.contains(ruleId)) { enabledRuleIds.add(ruleId); } } i++; } if (motherTongueBox.getSelectedItem() instanceof String) { motherTongue = getLanguageForLocalizedName(motherTongueBox .getSelectedItem().toString()); } else { motherTongue = (Language) motherTongueBox.getSelectedItem(); } if (serverCheckbox != null) { serverMode = serverCheckbox.isSelected(); serverPort = Integer.parseInt(serverPortField.getText()); } if (serverSettingsCheckbox != null) { useGUIConfig = serverSettingsCheckbox.isSelected(); } dialog.setVisible(false); } else if (e.getSource() == cancelButton) { dialog.setVisible(false); } } public void setDisabledRules(Set<String> ruleIDs) { inactiveRuleIds = ruleIDs; } public Set<String> getDisabledRuleIds() { return inactiveRuleIds; } public void setEnabledRules(Set<String> ruleIDs) { enabledRuleIds = ruleIDs; } public Set<String> getEnabledRuleIds() { return enabledRuleIds; } public void setDisabledCategories(Set<String> categoryNames) { inactiveCategoryNames = categoryNames; } public Set<String> getDisabledCategoryNames() { return inactiveCategoryNames; } public void setMotherTongue(Language motherTongue) { this.motherTongue = motherTongue; } public Language getMotherTongue() { return motherTongue; } /** * Get the Language object for the given localized language name. * * @param languageName * e.g. <code>English</code> or <code>German</code> (case is * significant) * @return a Language object or <code>null</code> */ private Language getLanguageForLocalizedName(final String languageName) { for (final Language element : Language.LANGUAGES) { if (NO_MOTHER_TONGUE.equals(languageName)) { return Language.DEMO; } if (languageName.equals(element.getTranslatedName(messages))) { return element; } } return null; } public void setRunServer(boolean serverMode) { this.serverMode = serverMode; } public void setUseGUIConfig(boolean useGUIConfig) { this.useGUIConfig = useGUIConfig; } public boolean getUseGUIConfig() { if (serverSettingsCheckbox == null) { return false; } return serverSettingsCheckbox.isSelected(); } public boolean getRunServer() { if (serverCheckbox == null) { return false; } return serverCheckbox.isSelected(); } public void setServerPort(int serverPort) { this.serverPort = serverPort; } public int getServerPort() { if (serverPortField == null) { return HTTPServer.DEFAULT_PORT; } return Integer.parseInt(serverPortField.getText()); } /** * Opens the dialog - for internal testing only. */ public static void main(String[] args) throws IOException { final ConfigurationDialog dlg = new ConfigurationDialog(null, false); final List<Rule> rules = new ArrayList<Rule>(); final JLanguageTool lt = new JLanguageTool(Language.ENGLISH); lt.activateDefaultPatternRules(); rules.addAll(lt.getAllRules()); dlg.show(rules); } } class CategoryComparator implements Comparator<Rule> { @Override public int compare(final Rule r1, final Rule r2) { final boolean hasCat = r1.getCategory() != null && r2.getCategory() != null; if (hasCat) { final int res = r1.getCategory().getName().compareTo( r2.getCategory().getName()); if (res == 0) { return r1.getDescription().compareToIgnoreCase(r2.getDescription()); } return res; } return r1.getDescription().compareToIgnoreCase(r2.getDescription()); } }
trunk/JLanguageTool/src/java/org/languagetool/gui/ConfigurationDialog.java
/* LanguageTool, a natural language style checker * Copyright (C) 2005 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.gui; import java.awt.Container; import java.awt.Dimension; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.ResourceBundle; import java.util.Set; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JRootPane; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.KeyStroke; import org.languagetool.JLanguageTool; import org.languagetool.Language; import org.languagetool.rules.Rule; import org.languagetool.server.HTTPServer; import org.languagetool.tools.StringTools; /** * Dialog that offers the available rules so they can be turned on/off * individually. * * @author Daniel Naber */ public class ConfigurationDialog implements ActionListener { private static final String NO_MOTHER_TONGUE = "---"; private JButton okButton; private JButton cancelButton; private final ResourceBundle messages; private JDialog dialog; private JComboBox motherTongueBox; private JCheckBox serverCheckbox; private JTextField serverPortField; private final List<JCheckBox> checkBoxes = new ArrayList<JCheckBox>(); private final List<String> checkBoxesRuleIds = new ArrayList<String>(); private final List<String> checkBoxesCategories = new ArrayList<String>(); private final List<String> defaultOffRules = new ArrayList<String>(); private Set<String> inactiveRuleIds = new HashSet<String>(); private Set<String> enabledRuleIds = new HashSet<String>(); private Set<String> inactiveCategoryNames = new HashSet<String>(); private final List<JCheckBox> categoryCheckBoxes = new ArrayList<JCheckBox>(); private final List<String> checkBoxesCategoryNames = new ArrayList<String>(); private Language motherTongue; private boolean serverMode; private int serverPort; private boolean useGUIConfig; private final Frame owner; private final boolean insideOOo; private JCheckBox serverSettingsCheckbox; public ConfigurationDialog(Frame owner, boolean insideOOo) { this.owner = owner; this.insideOOo = insideOOo; messages = JLanguageTool.getMessageBundle(); } public void show(List<Rule> rules) { dialog = new JDialog(owner, true); dialog.setTitle(messages.getString("guiConfigWindowTitle")); checkBoxes.clear(); checkBoxesRuleIds.clear(); categoryCheckBoxes.clear(); checkBoxesCategoryNames.clear(); Collections.sort(rules, new CategoryComparator()); // close dialog when user presses Escape key: final KeyStroke stroke = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0); final ActionListener actionListener = new ActionListener() { @Override public void actionPerformed(@SuppressWarnings("unused") ActionEvent actionEvent) { dialog.setVisible(false); } }; final JRootPane rootPane = dialog.getRootPane(); rootPane.registerKeyboardAction(actionListener, stroke, JComponent.WHEN_IN_FOCUSED_WINDOW); // JPanel final JPanel checkBoxPanel = new JPanel(); checkBoxPanel.setLayout(new GridBagLayout()); GridBagConstraints cons = new GridBagConstraints(); cons.anchor = GridBagConstraints.NORTHWEST; cons.gridx = 0; int row = 0; String prevID = null; String prevCategory = null; for (final Rule rule : rules) { // avoid displaying rules from rule groups more than once: if (prevID == null || !rule.getId().equals(prevID)) { cons.gridy = row; final JCheckBox checkBox = new JCheckBox(rule.getDescription()); if (inactiveRuleIds != null && (inactiveRuleIds.contains(rule.getId()) || inactiveCategoryNames .contains(rule.getCategory().getName()))) { checkBox.setSelected(false); } else { checkBox.setSelected(true); } if (rule.isDefaultOff() && !enabledRuleIds.contains(rule.getId())) { checkBox.setSelected(false); } if (rule.isDefaultOff()) { defaultOffRules.add(rule.getId()); if (rule.getCategory().isDefaultOff()) { inactiveCategoryNames.add(rule.getCategory().getName()); } } else { if (rule.getCategory().isDefaultOff()) { inactiveCategoryNames.remove(rule.getCategory().getName()); } } final ActionListener ruleCheckBoxListener = makeRuleCheckboxListener(); checkBox.addActionListener(ruleCheckBoxListener); checkBoxes.add(checkBox); checkBoxesRuleIds.add(rule.getId()); checkBoxesCategories.add(rule.getCategory().getName()); final boolean showHeadline = rule.getCategory() != null && !rule.getCategory().getName().equals(prevCategory); if ((showHeadline || prevCategory == null) && rule.getCategory() != null) { // TODO: maybe use a Tree of Checkboxes here, like in: // http://www.javaworld.com/javaworld/jw-09-2007/jw-09-checkboxtree.html final JCheckBox categoryCheckBox = new JCheckBox(rule.getCategory() .getName()); if (inactiveCategoryNames != null && inactiveCategoryNames.contains(rule.getCategory().getName())) { categoryCheckBox.setSelected(false); } else { categoryCheckBox.setSelected(true); } final ActionListener categoryCheckBoxListener = makeCategoryCheckboxListener(); categoryCheckBox.addActionListener(categoryCheckBoxListener); categoryCheckBoxes.add(categoryCheckBox); checkBoxesCategoryNames.add(rule.getCategory().getName()); checkBoxPanel.add(categoryCheckBox, cons); prevCategory = rule.getCategory().getName(); cons.gridy++; row++; } checkBox.setMargin(new Insets(0, 20, 0, 0)); // indent checkBoxPanel.add(checkBox, cons); row++; } prevID = rule.getId(); } final JPanel motherTonguePanel = new JPanel(); motherTonguePanel.add(new JLabel(messages.getString("guiMotherTongue")), cons); motherTongueBox = new JComboBox(getPossibleMotherTongues()); if (motherTongue != null) { if (motherTongue == Language.DEMO) { motherTongueBox.setSelectedItem(NO_MOTHER_TONGUE); } else { motherTongueBox.setSelectedItem(messages.getString(motherTongue .getShortName())); } } motherTonguePanel.add(motherTongueBox, cons); final JPanel portPanel = new JPanel(); portPanel.setLayout(new GridBagLayout()); // TODO: why is this now left-aligned?!?! cons = new GridBagConstraints(); cons.insets = new Insets(0, 4, 0, 0); cons.gridx = 0; cons.gridy = 0; cons.anchor = GridBagConstraints.WEST; cons.fill = GridBagConstraints.NONE; cons.weightx = 0.0f; if (!insideOOo) { serverCheckbox = new JCheckBox(StringTools.getLabel(messages .getString("guiRunOnPort"))); serverCheckbox.setMnemonic(StringTools.getMnemonic(messages .getString("guiRunOnPort"))); serverCheckbox.setSelected(serverMode); portPanel.add(serverCheckbox, cons); serverPortField = new JTextField(Integer.toString(serverPort)); serverPortField.setEnabled(serverCheckbox.isSelected()); serverSettingsCheckbox = new JCheckBox(StringTools.getLabel(messages .getString("useGUIConfig"))); // TODO: without this the box is just a few pixels small, but why??: serverPortField.setMinimumSize(new Dimension(100, 25)); cons.gridx = 1; serverCheckbox.addActionListener(new ActionListener() { @Override public void actionPerformed(@SuppressWarnings("unused") ActionEvent e) { serverPortField.setEnabled(serverCheckbox.isSelected()); serverSettingsCheckbox.setEnabled(serverCheckbox.isSelected()); } }); portPanel.add(serverPortField, cons); cons.gridx = 0; cons.gridy = 10; serverSettingsCheckbox.setMnemonic(StringTools.getMnemonic(messages .getString("useGUIConfig"))); serverSettingsCheckbox.setSelected(useGUIConfig); serverSettingsCheckbox.setEnabled(serverMode); portPanel.add(serverSettingsCheckbox, cons); } final JPanel buttonPanel = new JPanel(); buttonPanel.setLayout(new GridBagLayout()); okButton = new JButton(StringTools.getLabel(messages .getString("guiOKButton"))); okButton.setMnemonic(StringTools.getMnemonic(messages .getString("guiOKButton"))); okButton.addActionListener(this); cancelButton = new JButton(StringTools.getLabel(messages .getString("guiCancelButton"))); cancelButton.setMnemonic(StringTools.getMnemonic(messages .getString("guiCancelButton"))); cancelButton.addActionListener(this); cons = new GridBagConstraints(); cons.insets = new Insets(0, 4, 0, 0); buttonPanel.add(okButton, cons); buttonPanel.add(cancelButton, cons); final Container contentPane = dialog.getContentPane(); contentPane.setLayout(new GridBagLayout()); cons = new GridBagConstraints(); cons.insets = new Insets(4, 4, 4, 4); cons.gridx = 0; cons.gridy = 0; cons.weightx = 10.0f; cons.weighty = 10.0f; cons.fill = GridBagConstraints.BOTH; contentPane.add(new JScrollPane(checkBoxPanel), cons); cons.gridx = 0; cons.gridy = 1; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.WEST; contentPane.add(motherTonguePanel, cons); cons.gridx = 0; cons.gridy = 2; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.WEST; contentPane.add(portPanel, cons); cons.gridx = 0; cons.gridy = 3; cons.weightx = 0.0f; cons.weighty = 0.0f; cons.fill = GridBagConstraints.NONE; cons.anchor = GridBagConstraints.EAST; contentPane.add(buttonPanel, cons); dialog.pack(); dialog.setSize(500, 500); // center on screen: final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); final Dimension frameSize = dialog.getSize(); dialog.setLocation(screenSize.width / 2 - frameSize.width / 2, screenSize.height / 2 - frameSize.height / 2); dialog.setVisible(true); } private ActionListener makeRuleCheckboxListener() { return new ActionListener() { @Override public void actionPerformed(final ActionEvent actionEvent) { final JCheckBox cBox = (JCheckBox) actionEvent.getSource(); final boolean selected = cBox.getModel().isSelected(); int i = 0; for (final JCheckBox chBox : checkBoxes) { if (chBox.equals(cBox)) { final int catNo = checkBoxesCategoryNames .indexOf(checkBoxesCategories.get(i)); if (selected && !categoryCheckBoxes.get(catNo).isSelected()) { categoryCheckBoxes.get(catNo).setSelected(true); } } i++; } } }; } private ActionListener makeCategoryCheckboxListener() { return new ActionListener() { @Override public void actionPerformed(final ActionEvent actionEvent) { final JCheckBox cBox = (JCheckBox) actionEvent.getSource(); final boolean selected = cBox.getModel().isSelected(); int i = 0; for (final JCheckBox ruleBox : checkBoxes) { if (ruleBox.isSelected() != selected) { if (checkBoxesCategories.get(i).equals(cBox.getText())) { ruleBox.setSelected(selected); } } i++; } } }; } private Object[] getPossibleMotherTongues() { final List<Object> motherTongues = new ArrayList<Object>(); motherTongues.add(NO_MOTHER_TONGUE); for (final Language lang : Language.LANGUAGES) { if (lang != Language.DEMO) { motherTongues.add(messages.getString(lang.getShortName())); } } return motherTongues.toArray(); } @Override public void actionPerformed(ActionEvent e) { if (e.getSource() == okButton) { int i = 0; inactiveCategoryNames.clear(); for (final JCheckBox checkBox : categoryCheckBoxes) { if (!checkBox.isSelected()) { final String categoryName = checkBoxesCategoryNames.get(i); inactiveCategoryNames.add(categoryName); } i++; } i = 0; inactiveRuleIds.clear(); enabledRuleIds.clear(); for (final JCheckBox checkBox : checkBoxes) { if (!checkBox.isSelected()) { final String ruleId = checkBoxesRuleIds.get(i); if (!defaultOffRules.contains(ruleId)) { inactiveRuleIds.add(ruleId); } } if (checkBox.isSelected()) { final String ruleId = checkBoxesRuleIds.get(i); if (defaultOffRules.contains(ruleId)) { enabledRuleIds.add(ruleId); } } i++; } if (motherTongueBox.getSelectedItem() instanceof String) { motherTongue = getLanguageForLocalizedName(motherTongueBox .getSelectedItem().toString()); } else { motherTongue = (Language) motherTongueBox.getSelectedItem(); } if (serverCheckbox != null) { serverMode = serverCheckbox.isSelected(); serverPort = Integer.parseInt(serverPortField.getText()); } if (serverSettingsCheckbox != null) { useGUIConfig = serverSettingsCheckbox.isSelected(); } dialog.setVisible(false); } else if (e.getSource() == cancelButton) { dialog.setVisible(false); } } public void setDisabledRules(Set<String> ruleIDs) { inactiveRuleIds = ruleIDs; } public Set<String> getDisabledRuleIds() { return inactiveRuleIds; } public void setEnabledRules(Set<String> ruleIDs) { enabledRuleIds = ruleIDs; } public Set<String> getEnabledRuleIds() { return enabledRuleIds; } public void setDisabledCategories(Set<String> categoryNames) { inactiveCategoryNames = categoryNames; } public Set<String> getDisabledCategoryNames() { return inactiveCategoryNames; } public void setMotherTongue(Language motherTongue) { this.motherTongue = motherTongue; } public Language getMotherTongue() { return motherTongue; } /** * Get the Language object for the given localized language name. * * @param languageName * e.g. <code>English</code> or <code>German</code> (case is * significant) * @return a Language object or <code>null</code> */ private Language getLanguageForLocalizedName(final String languageName) { for (final Language element : Language.LANGUAGES) { if (NO_MOTHER_TONGUE.equals(languageName)) { return Language.DEMO; } if (languageName.equals(messages.getString(element.getShortName()))) { return element; } } return null; } public void setRunServer(boolean serverMode) { this.serverMode = serverMode; } public void setUseGUIConfig(boolean useGUIConfig) { this.useGUIConfig = useGUIConfig; } public boolean getUseGUIConfig() { if (serverSettingsCheckbox == null) { return false; } return serverSettingsCheckbox.isSelected(); } public boolean getRunServer() { if (serverCheckbox == null) { return false; } return serverCheckbox.isSelected(); } public void setServerPort(int serverPort) { this.serverPort = serverPort; } public int getServerPort() { if (serverPortField == null) { return HTTPServer.DEFAULT_PORT; } return Integer.parseInt(serverPortField.getText()); } /** * Opens the dialog - for internal testing only. */ public static void main(String[] args) throws IOException { final ConfigurationDialog dlg = new ConfigurationDialog(null, false); final List<Rule> rules = new ArrayList<Rule>(); final JLanguageTool lt = new JLanguageTool(Language.ENGLISH); lt.activateDefaultPatternRules(); rules.addAll(lt.getAllRules()); dlg.show(rules); } } class CategoryComparator implements Comparator<Rule> { @Override public int compare(final Rule r1, final Rule r2) { final boolean hasCat = r1.getCategory() != null && r2.getCategory() != null; if (hasCat) { final int res = r1.getCategory().getName().compareTo( r2.getCategory().getName()); if (res == 0) { return r1.getDescription().compareToIgnoreCase(r2.getDescription()); } return res; } return r1.getDescription().compareToIgnoreCase(r2.getDescription()); } }
remove unsafe call to messages.getString
trunk/JLanguageTool/src/java/org/languagetool/gui/ConfigurationDialog.java
remove unsafe call to messages.getString
<ide><path>runk/JLanguageTool/src/java/org/languagetool/gui/ConfigurationDialog.java <ide> if (motherTongue == Language.DEMO) { <ide> motherTongueBox.setSelectedItem(NO_MOTHER_TONGUE); <ide> } else { <del> motherTongueBox.setSelectedItem(messages.getString(motherTongue <del> .getShortName())); <add> motherTongueBox.setSelectedItem(motherTongue.getTranslatedName(messages)); <ide> } <ide> } <ide> motherTonguePanel.add(motherTongueBox, cons); <ide> motherTongues.add(NO_MOTHER_TONGUE); <ide> for (final Language lang : Language.LANGUAGES) { <ide> if (lang != Language.DEMO) { <del> motherTongues.add(messages.getString(lang.getShortName())); <add> motherTongues.add(lang.getTranslatedName(messages)); <ide> } <ide> } <ide> return motherTongues.toArray(); <ide> if (NO_MOTHER_TONGUE.equals(languageName)) { <ide> return Language.DEMO; <ide> } <del> if (languageName.equals(messages.getString(element.getShortName()))) { <add> if (languageName.equals(element.getTranslatedName(messages))) { <ide> return element; <ide> } <ide> }
JavaScript
mit
3ebcb3418ac16c1a1d31f1cca9d173fb0e9519db
0
SplicedMind/roomCheckingApp,SplicedMind/roomCheckingApp
function validateLogin() { var box = document.getElementById("email").value; if (box === null || box === "") { document.getElementById('m').innerHTML = "Please enter your email"; return false; } else if (box.endsWith("andela.com")) { return true; } else { document.getElementById('m').innerHTML = "Invalid email address, please enter a valid email"; return false; } }
JScript/login.js
function validateLogin() { var box = document.getElementById("email").value; if (!box.endsWith("andela.com")) { alert("You have to enter a valid email!"); return false; } }
Updates added
JScript/login.js
Updates added
<ide><path>Script/login.js <ide> function validateLogin() <ide> { <ide> var box = document.getElementById("email").value; <del> if (!box.endsWith("andela.com")) <add> if (box === null || box === "") <ide> { <del> <del> alert("You have to enter a valid email!"); <add> document.getElementById('m').innerHTML = "Please enter your email"; <ide> return false; <ide> } <del> <add> else if (box.endsWith("andela.com")) <add> { <add> return true; <add> } <add> else <add> { <add> document.getElementById('m').innerHTML = "Invalid email address, please enter a valid email"; <add> return false; <add> } <ide> }
Java
lgpl-2.1
30e7719bfc5c4cc85dd97481d26886099dd96bc3
0
andreasprlic/biojava,sbliven/biojava-sbliven,emckee2006/biojava,pwrose/biojava,lafita/biojava,pwrose/biojava,emckee2006/biojava,biojava/biojava,andreasprlic/biojava,lafita/biojava,andreasprlic/biojava,pwrose/biojava,biojava/biojava,emckee2006/biojava,andreasprlic/biojava,heuermh/biojava,heuermh/biojava,sbliven/biojava-sbliven,lafita/biojava,heuermh/biojava,sbliven/biojava-sbliven,biojava/biojava
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * */ package org.biojava.nbio.aaproperties; import org.biojava.nbio.aaproperties.xml.AminoAcidCompositionTable; import org.biojava.nbio.core.exceptions.CompoundNotFoundException; import org.biojava.nbio.core.sequence.ProteinSequence; import org.biojava.nbio.core.sequence.compound.AminoAcidCompound; import org.biojava.nbio.core.sequence.compound.AminoAcidCompoundSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXBException; import java.io.File; import java.io.FileNotFoundException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * This is an adaptor class which enable the ease of generating protein properties. * At least one adaptor method is written for each available properties provided in IPeptideProperties. * * @author kohchuanhock * @version 2011.08.22 * @since 3.0.2 * @see IPeptideProperties * @see PeptidePropertiesImpl */ public class PeptideProperties { private final static Logger logger = LoggerFactory.getLogger(PeptideProperties.class); /** * Enumeration of 20 standard amino acid code */ public enum SingleLetterAACode { W, C, M, H, Y, F, Q, N, I, R, D, P, T, K, E, V, S, G, A, L} /** * Contains the 20 standard AA code in a set */ public static Set<Character> standardAASet; /** * To initialize the standardAASet */ static{ standardAASet = new HashSet<Character>(); for(SingleLetterAACode c:SingleLetterAACode.values()) standardAASet.add(c.toString().charAt(0)); } /** * An adaptor method to return the molecular weight of sequence. * The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on <a href="http://web.expasy.org/findmod/findmod_masses.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the total molecular weight of sequence + weight of water molecule */ public static final double getMolecularWeight(String sequence){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence); } /** * An adaptor method to return the molecular weight of sequence. * The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the input xml file. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param elementMassFile * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the total molecular weight of sequence + weight of water molecule * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile */ public static final double getMolecularWeight(String sequence, File elementMassFile, File aminoAcidCompositionFile) throws FileNotFoundException, JAXBException{ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence, elementMassFile, aminoAcidCompositionFile); } /** * An adaptor method to return the molecular weight of sequence. The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the input files. These input files must be XML using the defined schema. * Note that it assumes that ElementMass.xml file can be found in default location. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the total molecular weight of sequence + weight of water molecule * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final double getMolecularWeight(String sequence, File aminoAcidCompositionFile) throws FileNotFoundException, JAXBException{ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence, aminoAcidCompositionFile); } /** * An adaptor method would initialize amino acid composition table based on the input xml files and stores the table for usage in future calls to * IPeptideProperties.getMolecularWeightBasedOnXML(ProteinSequence, AminoAcidCompositionTable). * Note that ElementMass.xml is assumed to be able to be seen in default location. * * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the initialized amino acid composition table * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final AminoAcidCompositionTable obtainAminoAcidCompositionTable(File aminoAcidCompositionFile) throws JAXBException, FileNotFoundException{ IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.obtainAminoAcidCompositionTable(aminoAcidCompositionFile); } /** * An adaptor method would initialize amino acid composition table based on the input xml files and stores the table for usage in future calls to * IPeptideProperties.getMolecularWeightBasedOnXML(ProteinSequence, AminoAcidCompositionTable). * * @param elementMassFile * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the initialized amino acid composition table * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final AminoAcidCompositionTable obtainAminoAcidCompositionTable(File elementMassFile, File aminoAcidCompositionFile) throws JAXBException, FileNotFoundException{ IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.obtainAminoAcidCompositionTable(elementMassFile, aminoAcidCompositionFile); } /** * An adaptor method that returns the molecular weight of sequence. The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the AminoAcidCompositionTable. * Those input files must be XML using the defined schema. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCompositionTable * a amino acid composition table obtained by calling IPeptideProperties.obtainAminoAcidCompositionTable * @return the total molecular weight of sequence + weight of water molecule * thrown if the method IPeptideProperties.setMolecularWeightXML(File, File) is not successfully called before calling this method. */ public static double getMolecularWeightBasedOnXML(String sequence, AminoAcidCompositionTable aminoAcidCompositionTable){ sequence = Utils.checkSequence(sequence, aminoAcidCompositionTable.getSymbolSet()); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence, aminoAcidCompositionTable.getAminoAcidCompoundSet()); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeightBasedOnXML(pSequence, aminoAcidCompositionTable); } /** * An adaptor method to returns the absorbance (optical density) of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The computation of absorbance (optical density) follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param assumeCysReduced * true if Cys are assumed to be reduced and false if Cys are assumed to form cystines * @return the absorbance (optical density) of sequence */ public static final double getAbsorbance(String sequence, boolean assumeCysReduced){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAbsorbance(pSequence, assumeCysReduced); } /** * An adaptor method to return the extinction coefficient of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The extinction coefficient indicates how much light a protein absorbs at * a certain wavelength. It is useful to have an estimation of this * coefficient for following a protein which a spectrophotometer when * purifying it. The computation of extinction coefficient follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param assumeCysReduced * true if Cys are assumed to be reduced and false if Cys are * assumed to form cystines * @return the extinction coefficient of sequence */ public static final double getExtinctionCoefficient(String sequence, boolean assumeCysReduced) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getExtinctionCoefficient(pSequence, assumeCysReduced); } /** * An adaptor method to return the instability index of sequence. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The instability index provides an estimate of the stability of your * protein in a test tube. The computation of instability index follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the instability index of sequence */ public static final double getInstabilityIndex(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getInstabilityIndex(pSequence); } /** * An adaptor method to return the apliphatic index of sequence. The sequence argument must be a * protein sequence consisting of only non-ambiguous characters. * The aliphatic index of a protein is defined as the relative volume * occupied by aliphatic side chains (alanine, valine, isoleucine, and * leucine). It may be regarded as a positive factor for the increase of * thermostability of globular proteins. The computation of aliphatic index * follows the documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * A protein whose instability index is smaller than 40 is predicted as stable, a value above 40 predicts that the protein may be unstable. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the aliphatic index of sequence */ public static final double getApliphaticIndex(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getApliphaticIndex(pSequence); } /** * An adaptor method to return the average hydropathy value of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The average value for a sequence is calculated as the sum of hydropathy * values of all the amino acids, divided by the number of residues in the * sequence. Hydropathy values are based on (Kyte, J. and Doolittle, R.F. * (1982) A simple method for displaying the hydropathic character of a * protein. J. Mol. Biol. 157, 105-132). * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the average hydropathy value of sequence */ public static final double getAvgHydropathy(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAvgHydropathy(pSequence); } /** * An adaptor method to return the isoelectric point of sequence. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The isoelectric point is the pH at which the protein carries no net * electrical charge. The isoelectric point will be computed based on * approach stated in * <a href="http://www.innovagen.se/custom-peptide-synthesis/peptide-property-calculator/peptide-property-calculator-notes.asp#PI">here</a> * * pKa values used will be either * those used by Expasy which referenced "Electrophoresis 1994, 15, 529-539" * OR * A.Lehninger, Principles of Biochemistry, 4th Edition (2005), Chapter 3, page78, Table 3-1. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param useExpasyValues * whether to use Expasy values (Default) or Innovagen values * @return the isoelectric point of sequence */ public static final double getIsoelectricPoint(String sequence, boolean useExpasyValues) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getIsoelectricPoint(pSequence, useExpasyValues); } public static final double getIsoelectricPoint(String sequence){ return getIsoelectricPoint(sequence, true); } /** * An adaptor method to return the net charge of sequence at pH 7. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The net charge will be computed using the approach stated in * <a href="http://www.innovagen.se/custom-peptide-synthesis/peptide-property-calculator/peptide-property-calculator-notes.asp#PI">here</a> * * pKa values used will be either * those used by Expasy which referenced "Electrophoresis 1994, 15, 529-539" * OR * A.Lehninger, Principles of Biochemistry, 4th Edition (2005), Chapter 3, page78, Table 3-1. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param useExpasyValues * whether to use Expasy values (Default) or Innovagen values * @param pHPoint * the pH value to use for computation of the net charge. Default at 7. * @return the net charge of sequence at given pHPoint */ public static final double getNetCharge(String sequence, boolean useExpasyValues, double pHPoint){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getNetCharge(pSequence, useExpasyValues, pHPoint); } public static final double getNetCharge(String sequence, boolean useExpasyValues) { return getNetCharge(sequence, useExpasyValues, 7.0); } public static final double getNetCharge(String sequence){ return getNetCharge(sequence, true); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence * @see SingleLetterAACode */ public static final double getEnrichment(String sequence, SingleLetterAACode aminoAcidCode) { return getEnrichment(sequence, aminoAcidCode.toString()); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence */ public static final double getEnrichment(String sequence, char aminoAcidCode){ return getEnrichment(sequence, aminoAcidCode); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence */ public static final double getEnrichment(String sequence, String aminoAcidCode){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); AminoAcidCompoundSet aaSet = new AminoAcidCompoundSet(); return pp.getEnrichment(pSequence, aaSet.getCompoundForString(aminoAcidCode)); } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence * @see AminoAcidCompound */ public static final Map<AminoAcidCompound, Double> getAAComposition(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAAComposition(pSequence); } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence */ public static final Map<String, Double> getAACompositionString(String sequence){ Map<AminoAcidCompound, Double> aa2Composition = getAAComposition(sequence); Map<String, Double> aaString2Composition = new HashMap<String, Double>(); for(AminoAcidCompound aaCompound:aa2Composition.keySet()){ aaString2Composition.put(aaCompound.getShortName(), aa2Composition.get(aaCompound)); } return aaString2Composition; } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence */ public static final Map<Character, Double> getAACompositionChar(String sequence){ Map<AminoAcidCompound, Double> aa2Composition = getAAComposition(sequence); Map<Character, Double> aaChar2Composition = new HashMap<Character, Double>(); for(AminoAcidCompound aaCompound:aa2Composition.keySet()){ aaChar2Composition.put(aaCompound.getShortName().charAt(0), aa2Composition.get(aaCompound)); } return aaChar2Composition; } /** * Returns the array of charges of each amino acid in a protein. At pH=7, two are negative charged: aspartic acid (Asp, D) and glutamic acid (Glu, E) (acidic side chains), * and three are positive charged: lysine (Lys, K), arginine (Arg, R) and histidine (His, H) (basic side chains). * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the array of charges of amino acids in the protein (1 if amino acid is positively charged, -1 if negatively charged, 0 if not charged) */ public static final int[] getChargesOfAminoAcids(String sequence) { int[] charges = new int[sequence.length()]; for ( int i=0; i < sequence.length(); i++ ) { char aa = sequence.toCharArray()[i]; charges[i] = AminoAcidProperties.getChargeOfAminoAcid(aa); } return charges; } /** * Returns the array of polarity values of each amino acid in a protein sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the array of polarity of amino acids in the protein (1 if amino acid is polar, 0 if not) */ public static final int[] getPolarityOfAminoAcids(String sequence) { int[] polarity = new int[sequence.length()]; for ( int i=0; i < sequence.length(); i++ ) { char aa = sequence.toCharArray()[i]; polarity[i] = AminoAcidProperties.getPolarityOfAminoAcid(aa); } return polarity; } }
biojava-aa-prop/src/main/java/org/biojava/nbio/aaproperties/PeptideProperties.java
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * */ package org.biojava.nbio.aaproperties; import org.biojava.nbio.aaproperties.xml.AminoAcidCompositionTable; import org.biojava.nbio.core.exceptions.CompoundNotFoundException; import org.biojava.nbio.core.sequence.ProteinSequence; import org.biojava.nbio.core.sequence.compound.AminoAcidCompound; import org.biojava.nbio.core.sequence.compound.AminoAcidCompoundSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXBException; import java.io.File; import java.io.FileNotFoundException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * This is an adaptor class which enable the ease of generating protein properties. * At least one adaptor method is written for each available properties provided in IPeptideProperties. * * @author kohchuanhock * @version 2011.08.22 * @since 3.0.2 * @see IPeptideProperties * @see PeptidePropertiesImpl */ public class PeptideProperties { private final static Logger logger = LoggerFactory.getLogger(PeptideProperties.class); /** * Enumeration of 20 standard amino acid code */ public enum SingleLetterAACode { W, C, M, H, Y, F, Q, N, I, R, D, P, T, K, E, V, S, G, A, L} /** * Contains the 20 standard AA code in a set */ public static Set<Character> standardAASet; /** * To initialize the standardAASet */ static{ standardAASet = new HashSet<Character>(); for(SingleLetterAACode c:SingleLetterAACode.values()) standardAASet.add(c.toString().charAt(0)); } /** * An adaptor method to return the molecular weight of sequence. * The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on <a href="http://web.expasy.org/findmod/findmod_masses.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the total molecular weight of sequence + weight of water molecule */ public static final double getMolecularWeight(String sequence){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence); } /** * An adaptor method to return the molecular weight of sequence. * The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the input xml file. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param elementMassFile * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the total molecular weight of sequence + weight of water molecule * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile */ public static final double getMolecularWeight(String sequence, File elementMassFile, File aminoAcidCompositionFile) throws FileNotFoundException, JAXBException{ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence, elementMassFile, aminoAcidCompositionFile); } /** * An adaptor method to return the molecular weight of sequence. The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the input files. These input files must be XML using the defined schema. * Note that it assumes that ElementMass.xml file can be found in default location. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the total molecular weight of sequence + weight of water molecule * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final double getMolecularWeight(String sequence, File aminoAcidCompositionFile) throws FileNotFoundException, JAXBException{ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeight(pSequence, aminoAcidCompositionFile); } /** * An adaptor method would initialize amino acid composition table based on the input xml files and stores the table for usage in future calls to * IPeptideProperties.getMolecularWeightBasedOnXML(ProteinSequence, AminoAcidCompositionTable). * Note that ElementMass.xml is assumed to be able to be seen in default location. * * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the initialized amino acid composition table * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final AminoAcidCompositionTable obtainAminoAcidCompositionTable(File aminoAcidCompositionFile) throws JAXBException, FileNotFoundException{ IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.obtainAminoAcidCompositionTable(aminoAcidCompositionFile); } /** * An adaptor method would initialize amino acid composition table based on the input xml files and stores the table for usage in future calls to * IPeptideProperties.getMolecularWeightBasedOnXML(ProteinSequence, AminoAcidCompositionTable). * * @param elementMassFile * xml file that details the mass of each elements and isotopes * @param aminoAcidCompositionFile * xml file that details the composition of amino acids * @return the initialized amino acid composition table * @throws JAXBException * thrown if unable to properly parse either elementMassFile or aminoAcidCompositionFile * @throws FileNotFoundException * thrown if either elementMassFile or aminoAcidCompositionFile are not found */ public static final AminoAcidCompositionTable obtainAminoAcidCompositionTable(File elementMassFile, File aminoAcidCompositionFile) throws JAXBException, FileNotFoundException{ IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.obtainAminoAcidCompositionTable(elementMassFile, aminoAcidCompositionFile); } /** * An adaptor method that returns the molecular weight of sequence. The sequence argument must be a protein sequence consisting of only non-ambiguous characters. * This method will sum the molecular weight of each amino acid in the * sequence. Molecular weights are based on the AminoAcidCompositionTable. * Those input files must be XML using the defined schema. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCompositionTable * a amino acid composition table obtained by calling IPeptideProperties.obtainAminoAcidCompositionTable * @return the total molecular weight of sequence + weight of water molecule * thrown if the method IPeptideProperties.setMolecularWeightXML(File, File) is not successfully called before calling this method. */ public static double getMolecularWeightBasedOnXML(String sequence, AminoAcidCompositionTable aminoAcidCompositionTable){ sequence = Utils.checkSequence(sequence, aminoAcidCompositionTable.getSymbolSet()); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence, aminoAcidCompositionTable.getAminoAcidCompoundSet()); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getMolecularWeightBasedOnXML(pSequence, aminoAcidCompositionTable); } /** * An adaptor method to returns the absorbance (optical density) of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The computation of absorbance (optical density) follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param assumeCysReduced * true if Cys are assumed to be reduced and false if Cys are assumed to form cystines * @return the absorbance (optical density) of sequence */ public static final double getAbsorbance(String sequence, boolean assumeCysReduced){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAbsorbance(pSequence, assumeCysReduced); } /** * An adaptor method to return the extinction coefficient of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The extinction coefficient indicates how much light a protein absorbs at * a certain wavelength. It is useful to have an estimation of this * coefficient for following a protein which a spectrophotometer when * purifying it. The computation of extinction coefficient follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param assumeCysReduced * true if Cys are assumed to be reduced and false if Cys are * assumed to form cystines * @return the extinction coefficient of sequence */ public static final double getExtinctionCoefficient(String sequence, boolean assumeCysReduced) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getExtinctionCoefficient(pSequence, assumeCysReduced); } /** * An adaptor method to return the instability index of sequence. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The instability index provides an estimate of the stability of your * protein in a test tube. The computation of instability index follows the * documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the instability index of sequence */ public static final double getInstabilityIndex(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getInstabilityIndex(pSequence); } /** * An adaptor method to return the apliphatic index of sequence. The sequence argument must be a * protein sequence consisting of only non-ambiguous characters. * The aliphatic index of a protein is defined as the relative volume * occupied by aliphatic side chains (alanine, valine, isoleucine, and * leucine). It may be regarded as a positive factor for the increase of * thermostability of globular proteins. The computation of aliphatic index * follows the documentation in <a href="http://web.expasy.org/protparam/protparam-doc.html">here</a>. * A protein whose instability index is smaller than 40 is predicted as stable, a value above 40 predicts that the protein may be unstable. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the aliphatic index of sequence */ public static final double getApliphaticIndex(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getApliphaticIndex(pSequence); } /** * An adaptor method to return the average hydropathy value of sequence. The sequence argument * must be a protein sequence consisting of only non-ambiguous characters. * The average value for a sequence is calculated as the sum of hydropathy * values of all the amino acids, divided by the number of residues in the * sequence. Hydropathy values are based on (Kyte, J. and Doolittle, R.F. * (1982) A simple method for displaying the hydropathic character of a * protein. J. Mol. Biol. 157, 105-132). * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the average hydropathy value of sequence */ public static final double getAvgHydropathy(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAvgHydropathy(pSequence); } /** * An adaptor method to return the isoelectric point of sequence. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The isoelectric point is the pH at which the protein carries no net * electrical charge. The isoelectric point will be computed based on * approach stated in * <a href="http://www.innovagen.se/custom-peptide-synthesis/peptide-property-calculator/peptide-property-calculator-notes.asp#PI">here</a> * * pKa values used will be either * those used by Expasy which referenced "Electrophoresis 1994, 15, 529-539" * OR * A.Lehninger, Principles of Biochemistry, 4th Edition (2005), Chapter 3, page78, Table 3-1. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param useExpasyValues * whether to use Expasy values (Default) or Innovagen values * @return the isoelectric point of sequence */ public static final double getIsoelectricPoint(String sequence, boolean useExpasyValues) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getIsoelectricPoint(pSequence, useExpasyValues); } public static final double getIsoelectricPoint(String sequence){ return getIsoelectricPoint(sequence, true); } /** * An adaptor method to return the net charge of sequence at pH 7. The sequence argument must be * a protein sequence consisting of only non-ambiguous characters. * The net charge will be computed using the approach stated in * <a href="http://www.innovagen.se/custom-peptide-synthesis/peptide-property-calculator/peptide-property-calculator-notes.asp#PI">here</a> * * pKa values used will be either * those used by Expasy which referenced "Electrophoresis 1994, 15, 529-539" * OR * A.Lehninger, Principles of Biochemistry, 4th Edition (2005), Chapter 3, page78, Table 3-1. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param useExpasyValues * whether to use Expasy values (Default) or Innovagen values * @param pHPoint * the pH value to use for computation of the net charge. Default at 7. * @return the net charge of sequence at given pHPoint */ public static final double getNetCharge(String sequence, boolean useExpasyValues, double pHPoint){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getNetCharge(pSequence, useExpasyValues, pHPoint); } public static final double getNetCharge(String sequence, boolean useExpasyValues) { return getNetCharge(sequence, useExpasyValues, 7.0); } public static final double getNetCharge(String sequence){ return getNetCharge(sequence, true); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence * @see SingleLetterAACode */ public static final double getEnrichment(String sequence, SingleLetterAACode aminoAcidCode) { return getEnrichment(sequence, aminoAcidCode.toString()); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence */ public static final double getEnrichment(String sequence, char aminoAcidCode){ return getEnrichment(sequence, aminoAcidCode); } /** * An adaptor method to return the composition of specified amino acid in the sequence. The * sequence argument must be a protein sequence consisting of only * non-ambiguous characters. The aminoAcidCode must be a non-ambiguous * character. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @param aminoAcidCode * the code of the amino acid to compute * @return the composition of specified amino acid in the sequence */ public static final double getEnrichment(String sequence, String aminoAcidCode){ sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); AminoAcidCompoundSet aaSet = new AminoAcidCompoundSet(); return pp.getEnrichment(pSequence, aaSet.getCompoundForString(aminoAcidCode)); } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence * @see AminoAcidCompound */ public static final Map<AminoAcidCompound, Double> getAAComposition(String sequence) { sequence = Utils.checkSequence(sequence); ProteinSequence pSequence = null; try { pSequence = new ProteinSequence(sequence); } catch (CompoundNotFoundException e) { // the sequence was checked with Utils.checkSequence, this shouldn't happen logger.error("The protein sequence contains invalid characters ({}), this should not happen. This is most likely a bug in Utils.checkSequence()", e.getMessage()); } IPeptideProperties pp = new PeptidePropertiesImpl(); return pp.getAAComposition(pSequence); } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence */ public static final Map<String, Double> getAACompositionString(String sequence){ Map<AminoAcidCompound, Double> aa2Composition = getAAComposition(sequence); Map<String, Double> aaString2Composition = new HashMap<String, Double>(); for(AminoAcidCompound aaCompound:aa2Composition.keySet()){ aaString2Composition.put(aaCompound.getShortName(), aa2Composition.get(aaCompound)); } return aaString2Composition; } /** * An adaptor method to return the composition of the 20 standard amino acid in the sequence. * The sequence argument must be a protein sequence consisting of only * non-ambiguous characters. * The composition of an amino acid is the total number of its occurrence, * divided by the total length of the sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the composition of the 20 standard amino acid in the sequence */ public static final Map<Character, Double> getAACompositionChar(String sequence){ Map<AminoAcidCompound, Double> aa2Composition = getAAComposition(sequence); Map<Character, Double> aaChar2Composition = new HashMap<Character, Double>(); for(AminoAcidCompound aaCompound:aa2Composition.keySet()){ aaChar2Composition.put(aaCompound.getShortName().charAt(0), aa2Composition.get(aaCompound)); } return aaChar2Composition; } /** * Returns the array of charges of each amino acid in a protein. At pH=7, two are negative charged: aspartic acid (Asp, D) and glutamic acid (Glu, E) (acidic side chains), * and three are positive charged: lysine (Lys, K), arginine (Arg, R) and histidine (His, H) (basic side chains). * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the array of charges of amino acids in the protein (1 if amino acid is positively charged, -1 if negatively charged, 0 if not charged) */ public static final int[] getChargesOfAminoAcidsInProtein(String sequence) { int[] charges = new int[sequence.length()]; for ( int i=0; i < sequence.length(); i++ ) { char aa = sequence.toCharArray()[i]; charges[i] = AminoAcidProperties.getChargeOfAminoAcid(aa); } return charges; } /** * Returns the array of polarity values of each amino acid in a protein sequence. * * @param sequence * a protein sequence consisting of non-ambiguous characters only * @return the array of polarity of amino acids in the protein (1 if amino acid is polar, 0 if not) */ public static final int[] getPolarityOfAminoAcidsInProtein(String sequence) { int[] polarity = new int[sequence.length()]; for ( int i=0; i < sequence.length(); i++ ) { char aa = sequence.toCharArray()[i]; polarity[i] = AminoAcidProperties.getPolarityOfAminoAcid(aa); } return polarity; } }
Methods renaming
biojava-aa-prop/src/main/java/org/biojava/nbio/aaproperties/PeptideProperties.java
Methods renaming
<ide><path>iojava-aa-prop/src/main/java/org/biojava/nbio/aaproperties/PeptideProperties.java <ide> * a protein sequence consisting of non-ambiguous characters only <ide> * @return the array of charges of amino acids in the protein (1 if amino acid is positively charged, -1 if negatively charged, 0 if not charged) <ide> */ <del> public static final int[] getChargesOfAminoAcidsInProtein(String sequence) { <add> public static final int[] getChargesOfAminoAcids(String sequence) { <ide> int[] charges = new int[sequence.length()]; <ide> for ( int i=0; i < sequence.length(); i++ ) { <ide> char aa = sequence.toCharArray()[i]; <ide> * a protein sequence consisting of non-ambiguous characters only <ide> * @return the array of polarity of amino acids in the protein (1 if amino acid is polar, 0 if not) <ide> */ <del> public static final int[] getPolarityOfAminoAcidsInProtein(String sequence) { <add> public static final int[] getPolarityOfAminoAcids(String sequence) { <ide> int[] polarity = new int[sequence.length()]; <ide> for ( int i=0; i < sequence.length(); i++ ) { <ide> char aa = sequence.toCharArray()[i];
Java
lgpl-2.1
68c02aeec3a89d9dd0567367f0796a1f6b9f2b24
0
ebner/collaborilla,ebner/collaborilla
/* $Id$ * * Copyright (c) 2006, KMR group at KTH (Royal Institute of Technology) * Licensed under the GNU GPL. For full terms see the file LICENSE. */ package se.kth.nada.kmr.collaborilla.ldap; import java.net.URI; import java.net.URISyntaxException; import java.util.Iterator; import se.kth.nada.kmr.collaborilla.client.CollaborillaDataSet; import com.novell.ldap.LDAPConnection; import com.novell.ldap.LDAPException; /** * Provides methods to directly access and manipulate entries and fields needed * to allow collaboration in the concept browser "Conzilla". * <p> * Extends the generic class LdapObject. * * @author Hannes Ebner * @version $Id$ * @see LDAPObject */ public class CollaborillaObject extends LDAPObject implements Cloneable { private int revision = 0; private String serverDN; private String uri = new String(); /* * Constructor * * */ /** * Creates an object and sets all necessary fields. If the LDAP entry * described by the URI parameter does not exist yet it will be created by * the constructor. * * @param dir * LDAP connection * @param serverDN * Server Distinctive Name (DN).&nbsp;Example: "dc=test,dc=com". * @param uri * URI * @param create * Create the LDAP entry of the given DN should be created if it * does not exist yet * @throws LDAPException */ public CollaborillaObject(LDAPAccess dir, String serverDN, String uri, boolean create) throws LDAPException { this.ldapAccess = dir; this.serverDN = serverDN; this.setAccessUri(uri); if (!this.entryExists()) { if (create) { this.createEntryWithContainer(LDAPStringHelper.dnToParentDN(this.baseDN), CollaborillaObjectConstants.OBJECTCLASS, CollaborillaObjectConstants.INFONODETYPE, CollaborillaObjectConstants.INFONODE); this.addAttribute(CollaborillaObjectConstants.URI, uri); } else { throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), this.baseDN); } } } /* * Overriding methods * * */ /** * Returns a copy of the current object. The LDAP connection is shared. * * @see LDAPObject#clone() * @see java.lang.Object#clone() */ public Object clone() throws CloneNotSupportedException { CollaborillaObject newObject = (CollaborillaObject) super.clone(); return newObject; } /** * Returns a combination of URI and Base DN as a String value. * * @see java.lang.Object#toString() */ public String toString() { return this.uri + ":" + this.baseDN; } /* * Internal * * */ /** * Updates the Base DN for accessing the right LDAP entry. Takes the Server * DN, URI and the revision into consideration for creating a Base DN. */ private void updateBaseDN() { String tmpDN = CollaborillaObjectConstants.INFONODETYPE + "=" + CollaborillaObjectConstants.INFONODE + "," + LDAPStringHelper.uriToBaseDN(CollaborillaObjectConstants.ROOT, this.serverDN, this.uri, CollaborillaObjectConstants.INFOCONTAINERTYPE); if (this.revision > 0) { tmpDN = CollaborillaObjectConstants.INFONODETYPE + "=" + this.revision + "," + tmpDN; } this.baseDN = tmpDN; } /** * Checks whether the currently selected entry (and its respective revision) * can be modified. Throws an exception if a modification is against the * policy. (E.g. a revisioned entry cannot be modified.) * * @throws LDAPException */ private void handleWriteAttempt() throws LDAPException { if (!this.isEditable()) { throw new LDAPException("UNWILLING TO PERFORM", LDAPException.UNWILLING_TO_PERFORM, "Policy violation: Not allowed to modify revision", this.baseDN); } } /* * Public * * */ /** * Tells whether we are allowed to edit the node to which the current object * points. It should not be allowed to edit the history of a node. * * @return true or false */ public boolean isEditable() { if (this.getRevision() == 0) { return true; } return false; } /** * Returns the current Base DN. * * @return Base DN */ public String getBaseDN() { return this.baseDN; } /** * Returns the URI of the LDAP entry. * * @return URI */ public String getAccessUri() { return this.uri; } /** * Sets the URI of the LDAP entry and rebuilds the Base DN. * * @param uri * URI */ public void setAccessUri(String uri) { this.uri = uri; this.revision = 0; this.updateBaseDN(); } /* * Revisions * * */ /** * Returns the number of the current revision. * * @return Current revision number.&nbsp;If we work with an up-to-date * object (the latest revision) the returned value is 0. */ public int getRevision() { return this.revision; } /** * Sets the number of the revision. After setting the revision the Base DN * will be rebuilt and all operations will be performed at the revision with * the number of the parameter. * * @param rev * Revision number.&nbsp;Should be 0 to return to the most recent * LDAP entry. */ public void setRevision(int rev) throws LDAPException { int oldRevision = getRevision(); /* * if the following two lines are changed, probably the similar lines * after the if have to be changed too. (this is to prevent an * eventually occurring endless loop) */ this.revision = rev; this.updateBaseDN(); String tmpDN = this.baseDN; if (!this.entryExists()) { this.revision = oldRevision; this.updateBaseDN(); throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), tmpDN); } } /** * Returns the number of revisions in the LDAP directory. * * @return Number of available revisions * @throws LDAPException */ public int getRevisionCount() throws LDAPException { if (this.revision == 0) { return this.childCount(LDAPConnection.SCOPE_ONE); } else { return this.childCount(LDAPStringHelper.dnToParentDN(this.baseDN), LDAPConnection.SCOPE_ONE); } } /** * Returns information of the current revision. * * @return Info of the current revision, currently RDF info.&nbsp;Will be * probably changed in future. * @throws LDAPException */ public String getRevisionInfo() throws LDAPException { /* * perhaps return other information than description? - object with date * of creation and last change - ...? */ return this.getDescription(); } /** * Returns information of a current revision. * * @param rev * @return Revision info * @throws LDAPException * @see #getRevision() */ public String getRevisionInfo(int rev) throws LDAPException { String revInfo; int currentRev = this.getRevision(); try { this.setRevision(rev); revInfo = this.getRevisionInfo(); } catch (LDAPException e) { this.setRevision(currentRev); throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), this.baseDN); } return revInfo; } /** * Sets the current revision to the most recent entry and copies all data * into a new revision. Performs a setRevision(0). * * @throws LDAPException */ public void createRevision() throws LDAPException { this.setRevision(0); int revisionNumber = this.getRevisionCount() + 1; String destDN = CollaborillaObjectConstants.INFONODETYPE + "=" + revisionNumber + "," + this.baseDN; this.copyEntry(this.baseDN, destDN); } /** * Returns the Distinctive Name (DN) of a specific revision. * * @param rev * Number of revision * @return DN of the requested revision */ public String getRevisionDN(int rev) { if (rev == 0) { return this.baseDN; } else { return CollaborillaObjectConstants.INFONODETYPE + "=" + rev + "," + this.baseDN; } } /** * Restores a revision and makes it the most recent revision. * <p> * The current entry is copied to a revision, all fields removed and the * fields of the to-be-restored revision are copied to the most recent * entry. * * @param rev * Revision which should be restored */ public void restoreRevision(int rev) throws LDAPException { if (rev > this.getRevisionCount()) { throw new LDAPException("Revision does not exist", LDAPException.NO_SUCH_OBJECT, "Revision does not exist"); } this.setRevision(0); this.createRevision(); this.removeAllAttributes(); this.copyAttributes(this.getRevisionDN(rev), this.baseDN); } /* * URL * * */ /** * Reads all URLs of the entry and returns a String array. * * @return Array of URLs * @throws LDAPException */ public String[] getLocation() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.LOCATION); } /** * Reads all URLs of the entry and returns a String array. If the Location * attribute of this entry does not exist it will try to construct Locations * by querying the entries of the parent URIs. * * @return Array of URLs * @throws LDAPException */ public String[] getAlignedLocation() throws LDAPException { String[] result = null; String parentURI = this.uri; String originalURI = this.uri; result = this.readAttribute(CollaborillaObjectConstants.LOCATION); if (result == null) { // FLOW // // 1 check if we can go one level higher, if not -> throw // NO_SUCH_ATTRIBUTE // 2 one level up, increase level counter // 3 get location // 4 if NO_SUCH_ATTRIBUTE or NO_SUCH_OBJECT -> 1 (one level up) // 5 if we get a location: // 5.1 get last part of URI depending on the level counter // 5.2 loop through the returned URL and append 5.1 // 6 return result while ((parentURI = LDAPStringHelper.getParentURI(parentURI)) != null) { try { this.setAccessUri(parentURI); result = this.readAttribute(CollaborillaObjectConstants.LOCATION); } catch (LDAPException e) { if ((e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE) || (e.getResultCode() == LDAPException.NO_SUCH_OBJECT)) { continue; } else { this.setAccessUri(originalURI); throw e; } } if (result != null) { String append = originalURI.substring(parentURI.length(), originalURI.length()); for (int i = 0; i < result.length; i++) { if (result[i].endsWith("/") && append.startsWith("/")) { result[i] = result[i].substring(0, result[i].length() - 1); } result[i] += append; } this.setAccessUri(originalURI); return result; } } this.setAccessUri(originalURI); throw new LDAPException("NO SUCH ATTRIBUTE", LDAPException.NO_SUCH_ATTRIBUTE, "Unable to construct a URL from parent entries", this.baseDN); } return result; } /** * Adds a new URL field to the LDAP entry. * * @param url * URL * @throws LDAPException */ public void addLocation(String url) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.LOCATION, url); } /** * Modifies an already existing URL in the LDAP entry. * * @param oldUrl * URL to be modified * @param newUrl * New URL * @throws LDAPException */ public void modifyLocation(String oldUrl, String newUrl) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.LOCATION, oldUrl, newUrl); } /** * Removes a URL from the LDAP entry. * * @param url * URL to be removed * @throws LDAPException */ public void removeLocation(String url) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.LOCATION, url); } /* * Required Containers (URI) * * */ /** * Reads all URIs of the entry and returns a String array. * * @return Array of URIs * @throws LDAPException */ public String[] getRequiredContainers() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER); } /** * Adds a new URI field to the LDAP entry. * * @param uri * URI * @throws LDAPException */ public void addRequiredContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, uri); } /** * Modifies an already existing URI in the LDAP entry. * * @param oldUri * URI to be modified * @param newUri * New URI * @throws LDAPException */ public void modifyRequiredContainer(String oldUri, String newUri) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, oldUri, newUri); } /** * Removes a URI from the LDAP entry. * * @param uri * URI to be removed * @throws LDAPException */ public void removeRequiredContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, uri); } /* * Optional Containers (URI) * * */ /** * Reads all URIs of the entry and returns a String array. * * @return Array of URIs * @throws LDAPException */ public String[] getOptionalContainers() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER); } /** * Adds a new URI field to the LDAP entry. * * @param uri * URI * @throws LDAPException */ public void addOptionalContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, uri); } /** * Modifies an already existing URI in the LDAP entry. * * @param oldUri * URI to be modified * @param newUri * New URI * @throws LDAPException */ public void modifyOptionalContainer(String oldUri, String newUri) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, oldUri, newUri); } /** * Removes a URI from the LDAP entry. * * @param uri * URI to be removed * @throws LDAPException */ public void removeOptionalContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, uri); } /* * Meta Data / RDF info * * */ /** * Returns the RDF info field. * * @return RDF info field * @throws LDAPException */ public String getMetaData() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { return this.readAttribute(CollaborillaObjectConstants.METADATA)[0]; } else { return null; } } /** * Sets the RDF info field. * * @param rdfInfo * RDF info * @throws LDAPException */ public void setMetaData(String rdfInfo) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { this.resetAttribute(CollaborillaObjectConstants.METADATA, rdfInfo); } else { this.addAttribute(CollaborillaObjectConstants.METADATA, rdfInfo); } } /** * Removes an eventually existing RDF info field. * * @throws LDAPException */ public void removeMetaData() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { this.removeAttribute(CollaborillaObjectConstants.METADATA, this.getMetaData()); } } /* * Description * * */ /** * Returns the description field of the LDAP entry. * * @return Description * @throws LDAPException */ public String getDescription() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { return this.readAttribute(CollaborillaObjectConstants.DESCRIPTION)[0]; } else { return null; } } /** * Sets the description field of the LDAP entry. * * @param desc * Description * @throws LDAPException */ public void setDescription(String desc) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { this.resetAttribute(CollaborillaObjectConstants.DESCRIPTION, desc); } else { this.addAttribute(CollaborillaObjectConstants.DESCRIPTION, desc); } } /** * Removes the description field of the LDAP entry. * * @throws LDAPException */ public void removeDescription() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { this.removeAttribute(CollaborillaObjectConstants.DESCRIPTION, this.getDescription()); } } /* * TYPE * * */ /** * Returns the type field of the LDAP entry. * * @return Description * @throws LDAPException */ public String getType() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { return this.readAttribute(CollaborillaObjectConstants.TYPE)[0]; } else { return null; } } /** * Sets the type field of the LDAP entry. * * @param type * Description * @throws LDAPException */ public void setType(String type) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { this.resetAttribute(CollaborillaObjectConstants.TYPE, type); } else { this.addAttribute(CollaborillaObjectConstants.TYPE, type); } } /** * Removes the type field of the LDAP entry. * * @throws LDAPException */ public void removeType() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { this.removeAttribute(CollaborillaObjectConstants.TYPE, this.getType()); } } /* * LDIF * * */ /** * Returns the entry and its attributes in LDIF format. Can be used to * export an existing entry from the LDAP directory. * * @return LDIF data * @throws LDAPException */ public String getLdif() throws LDAPException { return this.exportEntryLdif(false); } /* * Misc * * */ /** * Returns the revision number of the container file in the RCS. * * @return Revision number; value -1 if the attribute does not exist * @throws LDAPException */ public String getContainerRevision() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.CONTAINERREVISION)) { return this.readAttribute(CollaborillaObjectConstants.CONTAINERREVISION)[0]; } else { return null; } } /** * Sets the revision number of the container file in the RCS. * * @param containerRevision * Revision number * @throws LDAPException */ public void setContainerRevision(String containerRevision) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.CONTAINERREVISION)) { this.resetAttribute(CollaborillaObjectConstants.CONTAINERREVISION, containerRevision); } else { this.addAttribute(CollaborillaObjectConstants.CONTAINERREVISION, containerRevision); } } /* * DATASET * * */ /** * Fetches all fields from the directory and returns a full dataset. * * @return Returns a CollaborillaDataSet * @throws LDAPException */ public CollaborillaDataSet getDataSet() throws LDAPException { CollaborillaDataSet data = new CollaborillaDataSet(); try { data.setAlignedLocations(CollaborillaDataSet.stringArrayToSet(getAlignedLocation())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setContainerRevision(getContainerRevision()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setDescription(getDescription()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } data.setIdentifier(getAccessUri()); try { data.setLocations(CollaborillaDataSet.stringArrayToSet(getLocation())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { //data.setMetaData(LDAPStringHelper.decode(getMetaData())); data.setMetaData(getMetaData()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setOptionalContainers(CollaborillaDataSet.stringArrayToSet(getOptionalContainers())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setRequiredContainers(CollaborillaDataSet.stringArrayToSet(getRequiredContainers())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setRevisionInfo(getRevisionInfo()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } data.setRevisionNumber(getRevision()); try { data.setTimestampCreated(getTimestampCreated()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setTimestampModified(getTimestampModified()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setType(getType()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } return data; } /** * Sets all relevant fields in the directory with the values of a given dataset. * * @param dataset A Collaborilla dataset. * @throws LDAPException */ public void setDataSet(CollaborillaDataSet dataset) throws LDAPException { if (dataset == null) { throw new IllegalArgumentException("Dataset must not be null"); } if (!getAccessUri().equals(dataset.getIdentifier())) { if (dataset.getIdentifier() != null) { setAccessUri(dataset.getIdentifier()); } else { throw new IllegalArgumentException("Identifier must not be null"); } } // save the old data System.out.println("createRevision() ->"); createRevision(); System.out.println("<- createRevision()"); // remove (almost) everything System.out.println("removeAllAttributes() ->"); removeAllAttributes(); System.out.println("<- removeAllAttributes()"); if (dataset.getContainerRevision() != null) { System.out.println("setContainerRevision() ->"); setContainerRevision(dataset.getContainerRevision()); System.out.println("<- setContainerRevision()"); } if (dataset.getDescription() != null) { setDescription(dataset.getDescription()); } if (dataset.getLocations() != null) { Iterator it = dataset.getLocations().iterator(); while (it.hasNext()) { String location = (String) it.next(); try { System.out.println("addLocation() ->"); try { URI asciiURI = new URI(location); location = asciiURI.toASCIIString(); } catch (URISyntaxException e) { } addLocation(location); System.out.println("<- addLocation()"); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getMetaData() != null) { System.out.println("setMetadata() ->"); setMetaData(dataset.getMetaData()); System.out.println("<- setMetadata()"); } if (dataset.getRequiredContainers() != null) { Iterator it = dataset.getRequiredContainers().iterator(); while (it.hasNext()) { String container = (String) it.next(); try { System.out.println("addRequiredContainers() ->"); addRequiredContainer(container); System.out.println("<- addRequiredContainers()"); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getOptionalContainers() != null) { Iterator it = dataset.getOptionalContainers().iterator(); while (it.hasNext()) { String container = (String) it.next(); try { System.out.println("addOptionalContainers() ->"); addOptionalContainer(container); System.out.println("<- addOptionalContainers()"); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getType() != null) { System.out.println("setType() ->"); setType(dataset.getType()); System.out.println("<- setType()"); } } }
src/se/kth/nada/kmr/collaborilla/ldap/CollaborillaObject.java
/* $Id$ * * Copyright (c) 2006, KMR group at KTH (Royal Institute of Technology) * Licensed under the GNU GPL. For full terms see the file LICENSE. */ package se.kth.nada.kmr.collaborilla.ldap; import java.util.Iterator; import se.kth.nada.kmr.collaborilla.client.CollaborillaDataSet; import com.novell.ldap.LDAPConnection; import com.novell.ldap.LDAPException; /** * Provides methods to directly access and manipulate entries and fields needed * to allow collaboration in the concept browser "Conzilla". * <p> * Extends the generic class LdapObject. * * @author Hannes Ebner * @version $Id$ * @see LDAPObject */ public class CollaborillaObject extends LDAPObject implements Cloneable { private int revision = 0; private String serverDN; private String uri = new String(); /* * Constructor * * */ /** * Creates an object and sets all necessary fields. If the LDAP entry * described by the URI parameter does not exist yet it will be created by * the constructor. * * @param dir * LDAP connection * @param serverDN * Server Distinctive Name (DN).&nbsp;Example: "dc=test,dc=com". * @param uri * URI * @param create * Create the LDAP entry of the given DN should be created if it * does not exist yet * @throws LDAPException */ public CollaborillaObject(LDAPAccess dir, String serverDN, String uri, boolean create) throws LDAPException { this.ldapAccess = dir; this.serverDN = serverDN; this.setAccessUri(uri); if (!this.entryExists()) { if (create) { this.createEntryWithContainer(LDAPStringHelper.dnToParentDN(this.baseDN), CollaborillaObjectConstants.OBJECTCLASS, CollaborillaObjectConstants.INFONODETYPE, CollaborillaObjectConstants.INFONODE); this.addAttribute(CollaborillaObjectConstants.URI, uri); } else { throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), this.baseDN); } } } /* * Overriding methods * * */ /** * Returns a copy of the current object. The LDAP connection is shared. * * @see LDAPObject#clone() * @see java.lang.Object#clone() */ public Object clone() throws CloneNotSupportedException { CollaborillaObject newObject = (CollaborillaObject) super.clone(); return newObject; } /** * Returns a combination of URI and Base DN as a String value. * * @see java.lang.Object#toString() */ public String toString() { return this.uri + ":" + this.baseDN; } /* * Internal * * */ /** * Updates the Base DN for accessing the right LDAP entry. Takes the Server * DN, URI and the revision into consideration for creating a Base DN. */ private void updateBaseDN() { String tmpDN = CollaborillaObjectConstants.INFONODETYPE + "=" + CollaborillaObjectConstants.INFONODE + "," + LDAPStringHelper.uriToBaseDN(CollaborillaObjectConstants.ROOT, this.serverDN, this.uri, CollaborillaObjectConstants.INFOCONTAINERTYPE); if (this.revision > 0) { tmpDN = CollaborillaObjectConstants.INFONODETYPE + "=" + this.revision + "," + tmpDN; } this.baseDN = tmpDN; } /** * Checks whether the currently selected entry (and its respective revision) * can be modified. Throws an exception if a modification is against the * policy. (E.g. a revisioned entry cannot be modified.) * * @throws LDAPException */ private void handleWriteAttempt() throws LDAPException { if (!this.isEditable()) { throw new LDAPException("UNWILLING TO PERFORM", LDAPException.UNWILLING_TO_PERFORM, "Policy violation: Not allowed to modify revision", this.baseDN); } } /* * Public * * */ /** * Tells whether we are allowed to edit the node to which the current object * points. It should not be allowed to edit the history of a node. * * @return true or false */ public boolean isEditable() { if (this.getRevision() == 0) { return true; } return false; } /** * Returns the current Base DN. * * @return Base DN */ public String getBaseDN() { return this.baseDN; } /** * Returns the URI of the LDAP entry. * * @return URI */ public String getAccessUri() { return this.uri; } /** * Sets the URI of the LDAP entry and rebuilds the Base DN. * * @param uri * URI */ public void setAccessUri(String uri) { this.uri = uri; this.revision = 0; this.updateBaseDN(); } /* * Revisions * * */ /** * Returns the number of the current revision. * * @return Current revision number.&nbsp;If we work with an up-to-date * object (the latest revision) the returned value is 0. */ public int getRevision() { return this.revision; } /** * Sets the number of the revision. After setting the revision the Base DN * will be rebuilt and all operations will be performed at the revision with * the number of the parameter. * * @param rev * Revision number.&nbsp;Should be 0 to return to the most recent * LDAP entry. */ public void setRevision(int rev) throws LDAPException { int oldRevision = getRevision(); /* * if the following two lines are changed, probably the similar lines * after the if have to be changed too. (this is to prevent an * eventually occurring endless loop) */ this.revision = rev; this.updateBaseDN(); String tmpDN = this.baseDN; if (!this.entryExists()) { this.revision = oldRevision; this.updateBaseDN(); throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), tmpDN); } } /** * Returns the number of revisions in the LDAP directory. * * @return Number of available revisions * @throws LDAPException */ public int getRevisionCount() throws LDAPException { if (this.revision == 0) { return this.childCount(LDAPConnection.SCOPE_ONE); } else { return this.childCount(LDAPStringHelper.dnToParentDN(this.baseDN), LDAPConnection.SCOPE_ONE); } } /** * Returns information of the current revision. * * @return Info of the current revision, currently RDF info.&nbsp;Will be * probably changed in future. * @throws LDAPException */ public String getRevisionInfo() throws LDAPException { /* * perhaps return other information than description? - object with date * of creation and last change - ...? */ return this.getDescription(); } /** * Returns information of a current revision. * * @param rev * @return Revision info * @throws LDAPException * @see #getRevision() */ public String getRevisionInfo(int rev) throws LDAPException { String revInfo; int currentRev = this.getRevision(); try { this.setRevision(rev); revInfo = this.getRevisionInfo(); } catch (LDAPException e) { this.setRevision(currentRev); throw new LDAPException("NO SUCH OBJECT", LDAPException.NO_SUCH_OBJECT, LDAPException .resultCodeToString(LDAPException.NO_SUCH_OBJECT), this.baseDN); } return revInfo; } /** * Sets the current revision to the most recent entry and copies all data * into a new revision. Performs a setRevision(0). * * @throws LDAPException */ public void createRevision() throws LDAPException { this.setRevision(0); int revisionNumber = this.getRevisionCount() + 1; String destDN = CollaborillaObjectConstants.INFONODETYPE + "=" + revisionNumber + "," + this.baseDN; this.copyEntry(this.baseDN, destDN); } /** * Returns the Distinctive Name (DN) of a specific revision. * * @param rev * Number of revision * @return DN of the requested revision */ public String getRevisionDN(int rev) { if (rev == 0) { return this.baseDN; } else { return CollaborillaObjectConstants.INFONODETYPE + "=" + rev + "," + this.baseDN; } } /** * Restores a revision and makes it the most recent revision. * <p> * The current entry is copied to a revision, all fields removed and the * fields of the to-be-restored revision are copied to the most recent * entry. * * @param rev * Revision which should be restored */ public void restoreRevision(int rev) throws LDAPException { if (rev > this.getRevisionCount()) { throw new LDAPException("Revision does not exist", LDAPException.NO_SUCH_OBJECT, "Revision does not exist"); } this.setRevision(0); this.createRevision(); this.removeAllAttributes(); this.copyAttributes(this.getRevisionDN(rev), this.baseDN); } /* * URL * * */ /** * Reads all URLs of the entry and returns a String array. * * @return Array of URLs * @throws LDAPException */ public String[] getLocation() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.LOCATION); } /** * Reads all URLs of the entry and returns a String array. If the Location * attribute of this entry does not exist it will try to construct Locations * by querying the entries of the parent URIs. * * @return Array of URLs * @throws LDAPException */ public String[] getAlignedLocation() throws LDAPException { String[] result = null; String parentURI = this.uri; String originalURI = this.uri; result = this.readAttribute(CollaborillaObjectConstants.LOCATION); if (result == null) { // FLOW // // 1 check if we can go one level higher, if not -> throw // NO_SUCH_ATTRIBUTE // 2 one level up, increase level counter // 3 get location // 4 if NO_SUCH_ATTRIBUTE or NO_SUCH_OBJECT -> 1 (one level up) // 5 if we get a location: // 5.1 get last part of URI depending on the level counter // 5.2 loop through the returned URL and append 5.1 // 6 return result while ((parentURI = LDAPStringHelper.getParentURI(parentURI)) != null) { try { this.setAccessUri(parentURI); result = this.readAttribute(CollaborillaObjectConstants.LOCATION); } catch (LDAPException e) { if ((e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE) || (e.getResultCode() == LDAPException.NO_SUCH_OBJECT)) { continue; } else { this.setAccessUri(originalURI); throw e; } } if (result != null) { String append = originalURI.substring(parentURI.length(), originalURI.length()); for (int i = 0; i < result.length; i++) { if (result[i].endsWith("/") && append.startsWith("/")) { result[i] = result[i].substring(0, result[i].length() - 1); } result[i] += append; } this.setAccessUri(originalURI); return result; } } this.setAccessUri(originalURI); throw new LDAPException("NO SUCH ATTRIBUTE", LDAPException.NO_SUCH_ATTRIBUTE, "Unable to construct a URL from parent entries", this.baseDN); } return result; } /** * Adds a new URL field to the LDAP entry. * * @param url * URL * @throws LDAPException */ public void addLocation(String url) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.LOCATION, url); } /** * Modifies an already existing URL in the LDAP entry. * * @param oldUrl * URL to be modified * @param newUrl * New URL * @throws LDAPException */ public void modifyLocation(String oldUrl, String newUrl) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.LOCATION, oldUrl, newUrl); } /** * Removes a URL from the LDAP entry. * * @param url * URL to be removed * @throws LDAPException */ public void removeLocation(String url) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.LOCATION, url); } /* * Required Containers (URI) * * */ /** * Reads all URIs of the entry and returns a String array. * * @return Array of URIs * @throws LDAPException */ public String[] getRequiredContainers() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER); } /** * Adds a new URI field to the LDAP entry. * * @param uri * URI * @throws LDAPException */ public void addRequiredContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, uri); } /** * Modifies an already existing URI in the LDAP entry. * * @param oldUri * URI to be modified * @param newUri * New URI * @throws LDAPException */ public void modifyRequiredContainer(String oldUri, String newUri) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, oldUri, newUri); } /** * Removes a URI from the LDAP entry. * * @param uri * URI to be removed * @throws LDAPException */ public void removeRequiredContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.REQUIREDCONTAINER, uri); } /* * Optional Containers (URI) * * */ /** * Reads all URIs of the entry and returns a String array. * * @return Array of URIs * @throws LDAPException */ public String[] getOptionalContainers() throws LDAPException { return this.readAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER); } /** * Adds a new URI field to the LDAP entry. * * @param uri * URI * @throws LDAPException */ public void addOptionalContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.addAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, uri); } /** * Modifies an already existing URI in the LDAP entry. * * @param oldUri * URI to be modified * @param newUri * New URI * @throws LDAPException */ public void modifyOptionalContainer(String oldUri, String newUri) throws LDAPException { this.handleWriteAttempt(); this.modifyAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, oldUri, newUri); } /** * Removes a URI from the LDAP entry. * * @param uri * URI to be removed * @throws LDAPException */ public void removeOptionalContainer(String uri) throws LDAPException { this.handleWriteAttempt(); this.removeAttribute(CollaborillaObjectConstants.OPTIONALCONTAINER, uri); } /* * Meta Data / RDF info * * */ /** * Returns the RDF info field. * * @return RDF info field * @throws LDAPException */ public String getMetaData() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { return this.readAttribute(CollaborillaObjectConstants.METADATA)[0]; } else { return null; } } /** * Sets the RDF info field. * * @param rdfInfo * RDF info * @throws LDAPException */ public void setMetaData(String rdfInfo) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { this.resetAttribute(CollaborillaObjectConstants.METADATA, rdfInfo); } else { this.addAttribute(CollaborillaObjectConstants.METADATA, rdfInfo); } } /** * Removes an eventually existing RDF info field. * * @throws LDAPException */ public void removeMetaData() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.METADATA)) { this.removeAttribute(CollaborillaObjectConstants.METADATA, this.getMetaData()); } } /* * Description * * */ /** * Returns the description field of the LDAP entry. * * @return Description * @throws LDAPException */ public String getDescription() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { return this.readAttribute(CollaborillaObjectConstants.DESCRIPTION)[0]; } else { return null; } } /** * Sets the description field of the LDAP entry. * * @param desc * Description * @throws LDAPException */ public void setDescription(String desc) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { this.resetAttribute(CollaborillaObjectConstants.DESCRIPTION, desc); } else { this.addAttribute(CollaborillaObjectConstants.DESCRIPTION, desc); } } /** * Removes the description field of the LDAP entry. * * @throws LDAPException */ public void removeDescription() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.DESCRIPTION)) { this.removeAttribute(CollaborillaObjectConstants.DESCRIPTION, this.getDescription()); } } /* * TYPE * * */ /** * Returns the type field of the LDAP entry. * * @return Description * @throws LDAPException */ public String getType() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { return this.readAttribute(CollaborillaObjectConstants.TYPE)[0]; } else { return null; } } /** * Sets the type field of the LDAP entry. * * @param type * Description * @throws LDAPException */ public void setType(String type) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { this.resetAttribute(CollaborillaObjectConstants.TYPE, type); } else { this.addAttribute(CollaborillaObjectConstants.TYPE, type); } } /** * Removes the type field of the LDAP entry. * * @throws LDAPException */ public void removeType() throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.TYPE)) { this.removeAttribute(CollaborillaObjectConstants.TYPE, this.getType()); } } /* * LDIF * * */ /** * Returns the entry and its attributes in LDIF format. Can be used to * export an existing entry from the LDAP directory. * * @return LDIF data * @throws LDAPException */ public String getLdif() throws LDAPException { return this.exportEntryLdif(false); } /* * Misc * * */ /** * Returns the revision number of the container file in the RCS. * * @return Revision number; value -1 if the attribute does not exist * @throws LDAPException */ public String getContainerRevision() throws LDAPException { if (this.attributeExists(CollaborillaObjectConstants.CONTAINERREVISION)) { return this.readAttribute(CollaborillaObjectConstants.CONTAINERREVISION)[0]; } else { return null; } } /** * Sets the revision number of the container file in the RCS. * * @param containerRevision * Revision number * @throws LDAPException */ public void setContainerRevision(String containerRevision) throws LDAPException { this.handleWriteAttempt(); if (this.attributeExists(CollaborillaObjectConstants.CONTAINERREVISION)) { this.resetAttribute(CollaborillaObjectConstants.CONTAINERREVISION, containerRevision); } else { this.addAttribute(CollaborillaObjectConstants.CONTAINERREVISION, containerRevision); } } /* * DATASET * * */ /** * Fetches all fields from the directory and returns a full dataset. * * @return Returns a CollaborillaDataSet * @throws LDAPException */ public CollaborillaDataSet getDataSet() throws LDAPException { CollaborillaDataSet data = new CollaborillaDataSet(); try { data.setAlignedLocations(CollaborillaDataSet.stringArrayToSet(getAlignedLocation())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setContainerRevision(getContainerRevision()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setDescription(getDescription()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } data.setIdentifier(getAccessUri()); try { data.setLocations(CollaborillaDataSet.stringArrayToSet(getLocation())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { //data.setMetaData(LDAPStringHelper.decode(getMetaData())); data.setMetaData(getMetaData()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setOptionalContainers(CollaborillaDataSet.stringArrayToSet(getOptionalContainers())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setRequiredContainers(CollaborillaDataSet.stringArrayToSet(getRequiredContainers())); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setRevisionInfo(getRevisionInfo()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } data.setRevisionNumber(getRevision()); try { data.setTimestampCreated(getTimestampCreated()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setTimestampModified(getTimestampModified()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } try { data.setType(getType()); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE)) { throw e; } } return data; } /** * Sets all relevant fields in the directory with the values of a given dataset. * * @param dataset A Collaborilla dataset. * @throws LDAPException */ public void setDataSet(CollaborillaDataSet dataset) throws LDAPException { if (dataset == null) { throw new IllegalArgumentException("Dataset must not be null"); } if (!getAccessUri().equals(dataset.getIdentifier())) { if (dataset.getIdentifier() != null) { setAccessUri(dataset.getIdentifier()); } else { throw new IllegalArgumentException("Identifier must not be null"); } } // save the old data createRevision(); // remove (almost) everything removeAllAttributes(); if (dataset.getContainerRevision() != null) { setContainerRevision(dataset.getContainerRevision()); } if (dataset.getDescription() != null) { setDescription(dataset.getDescription()); } if (dataset.getLocations() != null) { Iterator it = dataset.getLocations().iterator(); while (it.hasNext()) { String location = (String) it.next(); try { addLocation(location); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getMetaData() != null) { setMetaData(dataset.getMetaData()); } if (dataset.getRequiredContainers() != null) { Iterator it = dataset.getRequiredContainers().iterator(); while (it.hasNext()) { String container = (String) it.next(); try { addRequiredContainer(container); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getOptionalContainers() != null) { Iterator it = dataset.getOptionalContainers().iterator(); while (it.hasNext()) { String container = (String) it.next(); try { addOptionalContainer(container); } catch (LDAPException e) { if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { throw e; } } } } if (dataset.getType() != null) { setType(dataset.getType()); } } }
added some debug output
src/se/kth/nada/kmr/collaborilla/ldap/CollaborillaObject.java
added some debug output
<ide><path>rc/se/kth/nada/kmr/collaborilla/ldap/CollaborillaObject.java <ide> <ide> package se.kth.nada.kmr.collaborilla.ldap; <ide> <add>import java.net.URI; <add>import java.net.URISyntaxException; <ide> import java.util.Iterator; <ide> <ide> import se.kth.nada.kmr.collaborilla.client.CollaborillaDataSet; <ide> } <ide> <ide> // save the old data <add> System.out.println("createRevision() ->"); <ide> createRevision(); <add> System.out.println("<- createRevision()"); <ide> <ide> // remove (almost) everything <add> System.out.println("removeAllAttributes() ->"); <ide> removeAllAttributes(); <add> System.out.println("<- removeAllAttributes()"); <ide> <ide> if (dataset.getContainerRevision() != null) { <add> System.out.println("setContainerRevision() ->"); <ide> setContainerRevision(dataset.getContainerRevision()); <add> System.out.println("<- setContainerRevision()"); <ide> } <ide> <ide> if (dataset.getDescription() != null) { <ide> while (it.hasNext()) { <ide> String location = (String) it.next(); <ide> try { <add> System.out.println("addLocation() ->"); <add> try { <add> URI asciiURI = new URI(location); <add> location = asciiURI.toASCIIString(); <add> } catch (URISyntaxException e) { <add> } <ide> addLocation(location); <add> System.out.println("<- addLocation()"); <ide> } catch (LDAPException e) { <ide> if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { <ide> throw e; <ide> } <ide> <ide> if (dataset.getMetaData() != null) { <add> System.out.println("setMetadata() ->"); <ide> setMetaData(dataset.getMetaData()); <add> System.out.println("<- setMetadata()"); <ide> } <ide> <ide> if (dataset.getRequiredContainers() != null) { <ide> while (it.hasNext()) { <ide> String container = (String) it.next(); <ide> try { <add> System.out.println("addRequiredContainers() ->"); <ide> addRequiredContainer(container); <add> System.out.println("<- addRequiredContainers()"); <ide> } catch (LDAPException e) { <ide> if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { <ide> throw e; <ide> while (it.hasNext()) { <ide> String container = (String) it.next(); <ide> try { <add> System.out.println("addOptionalContainers() ->"); <ide> addOptionalContainer(container); <add> System.out.println("<- addOptionalContainers()"); <ide> } catch (LDAPException e) { <ide> if (!(e.getResultCode() == LDAPException.ATTRIBUTE_OR_VALUE_EXISTS)) { <ide> throw e; <ide> } <ide> <ide> if (dataset.getType() != null) { <add> System.out.println("setType() ->"); <ide> setType(dataset.getType()); <add> System.out.println("<- setType()"); <ide> } <ide> } <ide>
JavaScript
mit
2618369d7a94cb942c7d8f3e67f07a20c225e666
0
kkeh/tracker,eanpherick/tasks,eanpherick/tasks,kkeh/tracker
var GUI = (function() { //IIFE for all Views /** This view displays a single task, showing its title, description, status, creator, and assignee (if any). Each TaskView should include one or more controls (e.g. a select or set of buttons) to change its state. Each task view will be associated with exactly one task model, although a model may have more than one view instance. */ var TaskView = Backbone.View.extend({ initialize: function(opts) { _.extend(this, opts); this.render(); }, render: function() { var status = this.model.get('status'); var assignee = this.model.get('assignee'); assignee = assignee === "" ? "unassigned" : assignee; this.$el.append($("<h1>").html(this.model.get('title'))); this.$el.append($("<h2>").html(this.model.get('description'))); this.$el.append($("<p class='creator'>").html("CREATED BY: " + this.model.get('creator'))); this.$el.append($("<p class='assignee'>").html("ASSIGNED TO: " + assignee)); if (status === "unassigned") { this.$el.append($("<button class='claim'>").html("CLAIM")); } else if (assignee === app.currentUser.get("username")) { this.$el.append($("<button class='quit'>").html("QUIT")); this.$el.append($("<button class='done'>").html("DONE")); } this.$el.addClass("task-view"); }, events: { "click button.quit": "quitTask", "click button.done": "completeTask", "click button.claim": "claimTask" }, quitTask: function(e) { console.log("quitTask"); }, completeTask: function(e) { console.log("completeTask"); }, claimTask: function(e) { console.log("claimTask"); } }); /** You'll need a view with input fields for the user to fill in when creating a new task. It should probably have both a create and cancel button. The location and format of the view is up to you. */ var CreateTaskView = Backbone.View.extend({ }); var TaskCollectionView = Backbone.View.extend({ relevantTasks: [], initialize: function(opts) { _.extend(this, opts); this.filterCollection(); this.render(); }, filterCollection: function() { if (this.kind === "unassigned") { this.relevantTasks = this.collection.where({ status: "unassigned" }); } else { var assigned = this.collection.where({ assignee: app.currentUser.get("username") }); var created = this.collection.where({ creator: app.currentUser.get("username") }); this.relevantTasks = _.union(assigned, created); } }, render: function() { var title = this.kind === 'unassigned' ? "Unassigned Tasks" : app.currentUser.get("username") + "'s Tasks" this.$el.append($("<h1>").html(title)); // make a new TaskView for each this.relevantTasks var self = this; this.relevantTasks.forEach(function(e) { var taskView = new TaskView({ model: e, }); self.$el.append(taskView.$el); }) this.$el.addClass('task-collection'); this.$el.addClass(this.kind); } }); // would have two TaskCollectionViews (for unassigned tasks and the current user's tasks) // would also the name of the current user, a logout button, and a Create Task button var HomePageView = Backbone.View.extend({ user: null, tasks: null, initialize: function(opts) { _.extend(this, opts); this.render(); $("#app").html(this.$el); }, render: function() { this.$el.append($("<h1>").html("Hello " + this.user.get("username"))); this.$el.append($("<button id='logout'>").html("Log Out")); this.$el.append($("<button id='add-task'>").html("Add Task")); var $taskViews = $("<div id='taskViews'>"); var taskCollectionView1 = new TaskCollectionView({ collection: app.gui.tasks, kind: "unassigned" }); var taskCollectionView2 = new TaskCollectionView({ collection: app.gui.tasks, kind: "user" }); $taskViews.append(taskCollectionView1.$el); $taskViews.append(taskCollectionView2.$el); this.$el.append($taskViews); }, events: { "click button#logout": "logout", "click button#add-task": "addTask" }, logout: function(e) { var loginView = new LoginView({ collection: app.gui.users }) this.remove(); }, addTask: function(e) { console.log("add task"); } }); // a list of known users to choose from var LoginView = Backbone.View.extend({ initialize: function() { this.render(); $("#app").append(this.$el); }, events: { "click button#login": "login" }, login: function(e) { e.preventDefault(); var id = $("select#usernames").val(); var selectedUser = this.collection.get(id); app.currentUser = selectedUser; var homePageView = new HomePageView({ user: selectedUser, tasks: app.gui.tasks }) this.remove(); }, render: function() { var users = this.collection.models; var output = "<h1>Welcome!</h1><form><select id='usernames'><option></option>" users.forEach(function(user) { output += "<option value='" + user.cid + "'>" + user.get("username") + "</option>" }) output += "</select><button type='submit' name='submit' id='login'>LOG IN</button></form>" this.$el.html(output); } }); // generic ctor to represent interface: function GUI(users, tasks, el) { this.users = users; // a UsersCollection this.tasks = tasks; // an IssuesCollection var loginView = new LoginView({ collection: this.users }) } return GUI; } ())
views.js
var GUI = (function() { //IIFE for all Views /** This view displays a single task, showing its title, description, status, creator, and assignee (if any). Each TaskView should include one or more controls (e.g. a select or set of buttons) to change its state. Each task view will be associated with exactly one task model, although a model may have more than one view instance. */ var TaskView = Backbone.View.extend({ initialize: function(opts) { _.extend(this, opts); this.render(); }, render: function() { var status = this.model.get('status'); var assignee = this.model.get('assignee'); assignee = assignee === "" ? "unassigned" : assignee; var $content = $("<div>"); $content.append($("<h1>").html(this.model.get('title'))); $content.append($("<h2>").html(this.model.get('description'))); $content.append($("<p class='creator'>").html("CREATED BY: " + this.model.get('creator'))); $content.append($("<p class='assignee'>").html("ASSIGNED TO: " + assignee)); if (status === "unassigned") { $content.append($("<button class='claim'>").html("CLAIM")); } else if (assignee === app.currentUser.get("username")) { $content.append($("<button class='quit'>").html("QUIT")); $content.append($("<button class='done'>").html("DONE")); } this.$el.html($content.html()); this.$el.addClass("task-view"); }, events: { "click button.quit": "quitTask", "click button.done": "completeTask", "click button.claim": "claimTask" }, quitTask: function(e) { console.log("quitTask"); console.log(e); console.log(this); }, completeTask: function(e) { console.log("completeTask"); }, claimTask: function(e) { console.log("claimTask"); } }); /** You'll need a view with input fields for the user to fill in when creating a new task. It should probably have both a create and cancel button. The location and format of the view is up to you. */ var CreateTaskView = Backbone.View.extend({ }); var TaskCollectionView = Backbone.View.extend({ relevantTasks: [], initialize: function(opts) { _.extend(this, opts); this.filterCollection(); this.render(); }, filterCollection: function() { if (this.kind === "unassigned") { this.relevantTasks = this.collection.where({ status: "unassigned" }); } else { var assigned = this.collection.where({ assignee: app.currentUser.get("username") }); var created = this.collection.where({ creator: app.currentUser.get("username") }); this.relevantTasks = _.union(assigned, created); } }, render: function() { var $taskCollectionView = $("<div>"); var title = this.kind === 'unassigned' ? "Unassigned Tasks" : app.currentUser.get("username") + "'s Tasks" $taskCollectionView.append($("<h1>").html(title)); $taskCollectionView.append($("<button>").html("TEST BUTTON")); // make a new TaskView for each this.relevantTasks this.relevantTasks.forEach(function(e) { var taskView = new TaskView({ model: e, }); $taskCollectionView.append(taskView.$el); // $taskViews.append(taskCollectionView2.$el); }) this.$el.html($taskCollectionView.html()); this.$el.addClass('task-collection'); this.$el.addClass(this.kind); }, events: { "click button": "clickedButton" }, clickedButton: function(e) { console.log("clicked the test button"); } }); // would have two TaskCollectionViews (for unassigned tasks and the current user's tasks) // would also the name of the current user, a logout button, and a Create Task button var HomePageView = Backbone.View.extend({ user: null, tasks: null, initialize: function(opts) { _.extend(this, opts); this.render(); $("#app").html(this.$el); }, render: function() { // var $output = "<h1>Hello " + this.user.get("username") + "</h1>"; var $output = $("<div>") $output.append($("<h1>").html("Hello " + this.user.get("username"))); $output.append($("<button id='logout'>").html("Log Out")); $output.append($("<button id='add-task'>").html("Add Task")); var $taskViews = $("<div id='taskViews'>"); var taskCollectionView1 = new TaskCollectionView({ collection: app.gui.tasks, kind: "unassigned" }); var taskCollectionView2 = new TaskCollectionView({ collection: app.gui.tasks, kind: "user" }); $taskViews.append(taskCollectionView1.$el); $taskViews.append(taskCollectionView2.$el); $output.append($taskViews); this.$el.html($output.html()); }, events: { "click button#logout": "logout", "click button#add-task": "addTask" }, logout: function(e) { console.log("log out"); var loginView = new LoginView({ collection: app.gui.users, el: "#app" }) this.remove(); }, addTask: function(e) { console.log(e); } }); // a list of known users to choose from var LoginView = Backbone.View.extend({ initialize: function() { this.render(); $("#app").append(this.$el); }, events: { "click button#login": "login" }, login: function(e) { console.log("clicked log in"); e.preventDefault(); var id = $("select#usernames").val(); var selectedUser = this.collection.get(id); app.currentUser = selectedUser; var homePageView = new HomePageView({ user: selectedUser, tasks: app.gui.tasks }) // this.remove(); }, render: function() { var users = this.collection.models; var output = "<h1>Welcome!</h1><form><select id='usernames'><option></option>" users.forEach(function(user) { output += "<option value='" + user.cid + "'>" + user.get("username") + "</option>" }) output += "</select><button type='submit' name='submit' id='login'>LOG IN</button></form>" this.$el.html(output); } }); // generic ctor to represent interface: function GUI(users, tasks, el) { this.users = users; // a UsersCollection this.tasks = tasks; // an IssuesCollection var loginView = new LoginView({ collection: this.users }) } return GUI; } ())
subview events are now firing like they should
views.js
subview events are now firing like they should
<ide><path>iews.js <ide> var status = this.model.get('status'); <ide> var assignee = this.model.get('assignee'); <ide> assignee = assignee === "" ? "unassigned" : assignee; <del> var $content = $("<div>"); <del> $content.append($("<h1>").html(this.model.get('title'))); <del> $content.append($("<h2>").html(this.model.get('description'))); <del> $content.append($("<p class='creator'>").html("CREATED BY: " + this.model.get('creator'))); <del> $content.append($("<p class='assignee'>").html("ASSIGNED TO: " + assignee)); <add> this.$el.append($("<h1>").html(this.model.get('title'))); <add> this.$el.append($("<h2>").html(this.model.get('description'))); <add> this.$el.append($("<p class='creator'>").html("CREATED BY: " + this.model.get('creator'))); <add> this.$el.append($("<p class='assignee'>").html("ASSIGNED TO: " + assignee)); <ide> if (status === "unassigned") { <del> $content.append($("<button class='claim'>").html("CLAIM")); <add> this.$el.append($("<button class='claim'>").html("CLAIM")); <ide> } else if (assignee === app.currentUser.get("username")) { <del> $content.append($("<button class='quit'>").html("QUIT")); <del> $content.append($("<button class='done'>").html("DONE")); <add> this.$el.append($("<button class='quit'>").html("QUIT")); <add> this.$el.append($("<button class='done'>").html("DONE")); <ide> } <del> this.$el.html($content.html()); <ide> this.$el.addClass("task-view"); <ide> }, <ide> events: { <ide> }, <ide> quitTask: function(e) { <ide> console.log("quitTask"); <del> console.log(e); <del> console.log(this); <ide> }, <ide> completeTask: function(e) { <ide> console.log("completeTask"); <ide> } <ide> }, <ide> render: function() { <del> var $taskCollectionView = $("<div>"); <ide> var title = this.kind === 'unassigned' ? "Unassigned Tasks" : app.currentUser.get("username") + "'s Tasks" <del> $taskCollectionView.append($("<h1>").html(title)); <del> $taskCollectionView.append($("<button>").html("TEST BUTTON")); <add> this.$el.append($("<h1>").html(title)); <ide> // make a new TaskView for each this.relevantTasks <add> var self = this; <ide> this.relevantTasks.forEach(function(e) { <ide> var taskView = new TaskView({ <ide> model: e, <ide> }); <del> $taskCollectionView.append(taskView.$el); <del> // $taskViews.append(taskCollectionView2.$el); <add> self.$el.append(taskView.$el); <ide> }) <del> this.$el.html($taskCollectionView.html()); <ide> this.$el.addClass('task-collection'); <ide> this.$el.addClass(this.kind); <del> }, <del> events: { <del> "click button": "clickedButton" <del> }, <del> clickedButton: function(e) { <del> console.log("clicked the test button"); <ide> } <ide> }); <ide> <ide> $("#app").html(this.$el); <ide> }, <ide> render: function() { <del> // var $output = "<h1>Hello " + this.user.get("username") + "</h1>"; <del> var $output = $("<div>") <del> $output.append($("<h1>").html("Hello " + this.user.get("username"))); <del> $output.append($("<button id='logout'>").html("Log Out")); <del> $output.append($("<button id='add-task'>").html("Add Task")); <add> this.$el.append($("<h1>").html("Hello " + this.user.get("username"))); <add> this.$el.append($("<button id='logout'>").html("Log Out")); <add> this.$el.append($("<button id='add-task'>").html("Add Task")); <ide> var $taskViews = $("<div id='taskViews'>"); <ide> var taskCollectionView1 = new TaskCollectionView({ <ide> collection: app.gui.tasks, <ide> }); <ide> $taskViews.append(taskCollectionView1.$el); <ide> $taskViews.append(taskCollectionView2.$el); <del> $output.append($taskViews); <del> this.$el.html($output.html()); <add> this.$el.append($taskViews); <ide> }, <ide> events: { <ide> "click button#logout": "logout", <ide> "click button#add-task": "addTask" <ide> }, <ide> logout: function(e) { <del> console.log("log out"); <ide> var loginView = new LoginView({ <del> collection: app.gui.users, <del> el: "#app" <add> collection: app.gui.users <ide> }) <ide> this.remove(); <ide> }, <ide> addTask: function(e) { <del> console.log(e); <add> console.log("add task"); <ide> } <ide> }); <ide> <ide> "click button#login": "login" <ide> }, <ide> login: function(e) { <del> console.log("clicked log in"); <ide> e.preventDefault(); <ide> var id = $("select#usernames").val(); <ide> var selectedUser = this.collection.get(id); <ide> user: selectedUser, <ide> tasks: app.gui.tasks <ide> }) <del> // this.remove(); <add> this.remove(); <ide> }, <ide> render: function() { <ide> var users = this.collection.models;
Java
unlicense
adae596a8066bbb45642c98e07bc50dc3f3acde8
0
tifsoft/maven_build_buddy
package com.tifsoft.mavenbuildbuddy.gui; import java.awt.Font; import java.awt.GraphicsEnvironment; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JComboBox; import javax.swing.JPanel; import javax.swing.JTextPane; import javax.swing.text.MutableAttributeSet; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledDocument; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.tifsoft.mavenbuildbuddy.MavenBuildBuddy; public class PreferencesPanel extends JPanel { private static final long serialVersionUID = 1L; static final Logger LOG = LoggerFactory.getLogger(PreferencesPanel.class); public static final String[] fontSizeStrings = { "10", "12", "13", "14", "16", "17", "18", "19", "20", "21", "22", "23", "24" }; public static final JComboBox fontSizeList = new JComboBox(fontSizeStrings); public static final String[] fontNameStrings = GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames(); //public static final String[] fontNameStrings = {Font.MONOSPACED, Font.SERIF, Font.SANS_SERIF}; public static final JComboBox fontNameOption = new JComboBox(fontNameStrings); public static final String[] fontWeightStrings = { "PLAIN", "BOLD", "ITALIC" }; public static final JComboBox fontWeightOption = new JComboBox(fontWeightStrings); public PreferencesPanel() { //fontSizeList.setSelectedIndex(4); this.add(fontNameOption); this.add(fontSizeList); this.add(fontWeightOption); ActionListener al = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateFont(); } }; fontNameOption.setSelectedItem("Monospaced"); fontSizeList.setSelectedItem("16"); fontNameOption.addActionListener(al); fontSizeList.addActionListener(al); fontWeightOption.addActionListener(al); //updateFont(); } public void updateFont() { String name = fontNameOption.getSelectedItem().toString(); int size = Integer.parseInt(fontSizeList.getSelectedItem().toString()); String fws = fontWeightOption.getSelectedItem().toString(); int fw = fws.equals("BOLD") ? Font.BOLD : fws.equals("ITALIC") ? Font.ITALIC : Font.PLAIN; Font font = new Font(name, fw, size); setJTextPaneFont(MavenBuildBuddy.gui.textPane, font); } /** * Utility method for setting the font and color of a JTextPane. The * result is roughly equivalent to calling setFont(...) and * setForeground(...) on an AWT TextArea. */ public static void setJTextPaneFont(JTextPane jtp, Font font) { // Start with the current input attributes for the JTextPane. This // should ensure that we do not wipe out any existing attributes // (such as alignment or other paragraph attributes) currently // set on the text area. MutableAttributeSet attrs = new SimpleAttributeSet(); // jtp.getInputAttributes(); // Set the font family, size, and style, based on properties of // the Font object. Note that JTextPane supports a number of // character attributes beyond those supported by the Font class. // For example, underline, strike-through, super- and sub-script. setAttrs(font, attrs); // Set the font color //StyleConstants.setForeground(attrs, c); // Retrieve the pane's document object StyledDocument doc = jtp.getStyledDocument(); // Replace the style for the entire document. We exceed the length // of the document by 1 so that text entered at the end of the // document uses the attributes. doc.setCharacterAttributes(0, Integer.MAX_VALUE, attrs, false); } public static void setAttrs(Font font, MutableAttributeSet attrs) { StyleConstants.setFontFamily(attrs, font.getFamily()); StyleConstants.setFontSize(attrs, font.getSize()); StyleConstants.setItalic(attrs, (font.getStyle() & Font.ITALIC) != 0); StyleConstants.setBold(attrs, (font.getStyle() & Font.BOLD) != 0); } }
src/main/java/com/tifsoft/mavenbuildbuddy/gui/PreferencesPanel.java
package com.tifsoft.mavenbuildbuddy.gui; import java.awt.Font; import java.awt.GraphicsEnvironment; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JComboBox; import javax.swing.JPanel; import javax.swing.JTextPane; import javax.swing.text.MutableAttributeSet; import javax.swing.text.StyleConstants; import javax.swing.text.StyledDocument; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.tifsoft.mavenbuildbuddy.MavenBuildBuddy; public class PreferencesPanel extends JPanel { private static final long serialVersionUID = 1L; static final Logger LOG = LoggerFactory.getLogger(PreferencesPanel.class); public static final String[] fontSizeStrings = { "10", "12", "13", "14", "16", "17", "18", "19", "20", "21", "22", "23", "24" }; public static final JComboBox fontSizeList = new JComboBox(fontSizeStrings); public static final String[] fontNameStrings = GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames(); //public static final String[] fontNameStrings = {Font.MONOSPACED, Font.SERIF, Font.SANS_SERIF}; public static final JComboBox fontNameOption = new JComboBox(fontNameStrings); public static final String[] fontWeightStrings = { "PLAIN", "BOLD", "ITALIC" }; public static final JComboBox fontWeightOption = new JComboBox(fontWeightStrings); public PreferencesPanel() { //fontSizeList.setSelectedIndex(4); this.add(fontNameOption); this.add(fontSizeList); this.add(fontWeightOption); ActionListener al = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateFont(); } }; fontNameOption.setSelectedItem("Monospaced"); fontSizeList.setSelectedItem("16"); fontNameOption.addActionListener(al); fontSizeList.addActionListener(al); fontWeightOption.addActionListener(al); //updateFont(); } public void updateFont() { String name = fontNameOption.getSelectedItem().toString(); int size = Integer.parseInt(fontSizeList.getSelectedItem().toString()); String fws = fontWeightOption.getSelectedItem().toString(); int fw = fws.equals("BOLD") ? Font.BOLD : fws.equals("ITALIC") ? Font.ITALIC : Font.PLAIN; Font font = new Font(name, fw, size); setJTextPaneFont(MavenBuildBuddy.gui.textPane, font); } /** * Utility method for setting the font and color of a JTextPane. The * result is roughly equivalent to calling setFont(...) and * setForeground(...) on an AWT TextArea. */ public static void setJTextPaneFont(JTextPane jtp, Font font) { // Start with the current input attributes for the JTextPane. This // should ensure that we do not wipe out any existing attributes // (such as alignment or other paragraph attributes) currently // set on the text area. MutableAttributeSet attrs = jtp.getInputAttributes(); // Set the font family, size, and style, based on properties of // the Font object. Note that JTextPane supports a number of // character attributes beyond those supported by the Font class. // For example, underline, strike-through, super- and sub-script. setAttrs(font, attrs); // Set the font color //StyleConstants.setForeground(attrs, c); // Retrieve the pane's document object StyledDocument doc = jtp.getStyledDocument(); // Replace the style for the entire document. We exceed the length // of the document by 1 so that text entered at the end of the // document uses the attributes. doc.setCharacterAttributes(0, Integer.MAX_VALUE, attrs, false); } public static void setAttrs(Font font, MutableAttributeSet attrs) { StyleConstants.setFontFamily(attrs, font.getFamily()); StyleConstants.setFontSize(attrs, font.getSize()); StyleConstants.setItalic(attrs, (font.getStyle() & Font.ITALIC) != 0); StyleConstants.setBold(attrs, (font.getStyle() & Font.BOLD) != 0); } }
Font update bug fix
src/main/java/com/tifsoft/mavenbuildbuddy/gui/PreferencesPanel.java
Font update bug fix
<ide><path>rc/main/java/com/tifsoft/mavenbuildbuddy/gui/PreferencesPanel.java <ide> import javax.swing.JPanel; <ide> import javax.swing.JTextPane; <ide> import javax.swing.text.MutableAttributeSet; <add>import javax.swing.text.SimpleAttributeSet; <ide> import javax.swing.text.StyleConstants; <ide> import javax.swing.text.StyledDocument; <ide> <ide> // should ensure that we do not wipe out any existing attributes <ide> // (such as alignment or other paragraph attributes) currently <ide> // set on the text area. <del> MutableAttributeSet attrs = jtp.getInputAttributes(); <add> MutableAttributeSet attrs = new SimpleAttributeSet(); // jtp.getInputAttributes(); <ide> <ide> // Set the font family, size, and style, based on properties of <ide> // the Font object. Note that JTextPane supports a number of
Java
apache-2.0
db00a2781208958253e832fedef6474bb8c355ef
0
MattGong/robotium,acanta2014/robotium,RobotiumTech/robotium,lczgywzyy/robotium,RobotiumTech/robotium,XRacoon/robotiumEx,darker50/robotium,darker50/robotium,shibenli/robotium,pefilekill/robotiumCode,XRacoon/robotiumEx,luohaoyu/robotium,MattGong/robotium,luohaoyu/robotium,Eva1123/robotium,lczgywzyy/robotium,lgs3137/robotium,Eva1123/robotium,hypest/robotium,NetEase/robotium,acanta2014/robotium,pefilekill/robotiumCode,zhic5352/robotium,zhic5352/robotium,SinnerSchraderMobileMirrors/robotium,NetEase/robotium,lgs3137/robotium,hypest/robotium,shibenli/robotium,activars/remote-robotium
package com.jayway.android.robotium.core.impl; import junit.framework.Assert; import android.app.Activity; import android.app.ActivityManager; /** * This class contains assertActivity() methods. * * @author Renas Reda, [email protected] * */ public class Asserter { private final ActivityUtils activityUtils; private final Sleeper sleeper; /** * Constructs this object. * * @param activityUtils the {@code ActivityUtils} instance. * @param sleeper the {@code Sleeper} instance. * */ public Asserter(ActivityUtils activityUtils, Sleeper sleeper) { this.activityUtils = activityUtils; this.sleeper = sleeper; } /** * Asserts that an expected {@link Activity} is currently active one. * * @param message the message that should be displayed if the assert fails * @param name the name of the {@code Activity} that is expected to be active e.g. {@code "MyActivity"} * */ public void assertCurrentActivity(String message, String name) { sleeper.sleep(); Assert.assertEquals(message, name, activityUtils.getCurrentActivity() .getClass().getSimpleName()); } /** * Asserts that an expected {@link Activity} is currently active one. * * @param message the message that should be displayed if the assert fails * @param expectedClass the {@code Class} object that is expected to be active e.g. {@code MyActivity.class} * */ public void assertCurrentActivity(String message, Class<? extends Activity> expectedClass) { sleeper.sleep(); Assert.assertEquals(message, expectedClass.getName(), activityUtils .getCurrentActivity().getClass().getName()); } /** * Asserts that an expected {@link Activity} is currently active one, with the possibility to * verify that the expected {@code Activity} is a new instance of the {@code Activity}. * * @param message the message that should be displayed if the assert fails * @param name the name of the {@code Activity} that is expected to be active e.g. {@code "MyActivity"} * @param isNewInstance {@code true} if the expected {@code Activity} is a new instance of the {@code Activity} * */ public void assertCurrentActivity(String message, String name, boolean isNewInstance) { assertCurrentActivity(message, name); assertCurrentActivity(message, activityUtils.getCurrentActivity().getClass(), isNewInstance); } /** * Asserts that an expected {@link Activity} is currently active one, with the possibility to * verify that the expected {@code Activity} is a new instance of the {@code Activity}. * * @param message the message that should be displayed if the assert fails * @param expectedClass the {@code Class} object that is expected to be active e.g. {@code MyActivity.class} * @param isNewInstance {@code true} if the expected {@code Activity} is a new instance of the {@code Activity} * */ public void assertCurrentActivity(String message, Class<? extends Activity> expectedClass, boolean isNewInstance) { boolean found = false; assertCurrentActivity(message, expectedClass); Activity activity = activityUtils.getCurrentActivity(); for (int i = 0; i < activityUtils.getAllOpenedActivities().size() - 1; i++) { String instanceString = activityUtils.getAllOpenedActivities().get(i).toString(); if (instanceString.equals(activity.toString())) found = true; } Assert.assertNotSame(message + ", isNewInstance: actual and ", isNewInstance, found); } /** * Asserts that the available memory in the system is not low. * */ public void assertMemoryNotLow() { ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo(); ((ActivityManager)activityUtils.getCurrentActivity().getSystemService("activity")).getMemoryInfo(mi); Assert.assertFalse("Low memory available: " + mi.availMem + " bytes", mi.lowMemory); } }
robotium-core/src/main/java/com/jayway/android/robotium/core/impl/Asserter.java
package com.jayway.android.robotium.core.impl; import junit.framework.Assert; import android.app.Activity; import android.app.ActivityManager; /** * This class contains assertActivity() methods. * * @author Renas Reda, [email protected] * */ public class Asserter { private final ActivityUtils activityUtils; private final Sleeper sleeper; /** * Constructs this object. * * @param activityUtils the {@code ActivityUtils} instance. * @param sleeper the {@code Sleeper} instance. * */ public Asserter(ActivityUtils activityUtils, Sleeper sleeper) { this.activityUtils = activityUtils; this.sleeper = sleeper; } /** * Asserts that an expected {@link Activity} is currently active one. * * @param message the message that should be displayed if the assert fails * @param name the name of the {@code Activity} that is expected to be active e.g. {@code "MyActivity"} * */ public void assertCurrentActivity(String message, String name) { sleeper.sleep(); Assert.assertEquals(message, name, activityUtils.getCurrentActivity() .getClass().getSimpleName()); } /** * Asserts that an expected {@link Activity} is currently active one. * * @param message the message that should be displayed if the assert fails * @param expectedClass the {@code Class} object that is expected to be active e.g. {@code MyActivity.class} * */ public void assertCurrentActivity(String message, Class<? extends Activity> expectedClass) { sleeper.sleep(); Assert.assertEquals(message, expectedClass.getName(), activityUtils .getCurrentActivity().getClass().getName()); } /** * Asserts that an expected {@link Activity} is currently active one, with the possibility to * verify that the expected {@code Activity} is a new instance of the {@code Activity}. * * @param message the message that should be displayed if the assert fails * @param name the name of the {@code Activity} that is expected to be active e.g. {@code "MyActivity"} * @param isNewInstance {@code true} if the expected {@code Activity} is a new instance of the {@code Activity} * */ public void assertCurrentActivity(String message, String name, boolean isNewInstance) { assertCurrentActivity(message, name); assertCurrentActivity(message, activityUtils.getCurrentActivity().getClass(), isNewInstance); } /** * Asserts that an expected {@link Activity} is currently active one, with the possibility to * verify that the expected {@code Activity} is a new instance of the {@code Activity}. * * @param message the message that should be displayed if the assert fails * @param expectedClass the {@code Class} object that is expected to be active e.g. {@code MyActivity.class} * @param isNewInstance {@code true} if the expected {@code Activity} is a new instance of the {@code Activity} * */ public void assertCurrentActivity(String message, Class<? extends Activity> expectedClass, boolean isNewInstance) { boolean found = false; assertCurrentActivity(message, expectedClass); Activity activity = activityUtils.getCurrentActivity(); for (int i = 0; i < activityUtils.getAllOpenedActivities().size() - 1; i++) { String instanceString = activityUtils.getAllOpenedActivities().get(i).toString(); if (instanceString.equals(activity.toString())) found = true; } Assert.assertNotSame(message + ", isNewInstance: actual and ", isNewInstance, found); } /** * Asserts that the available memory in the system is not low. * */ public void assertNotLowMemory() { ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo(); ((ActivityManager)activityUtils.getCurrentActivity().getSystemService("activity")).getMemoryInfo(mi); Assert.assertFalse("Low memory available: " + mi.availMem + " bytes", mi.lowMemory); } }
Changed method name
robotium-core/src/main/java/com/jayway/android/robotium/core/impl/Asserter.java
Changed method name
<ide><path>obotium-core/src/main/java/com/jayway/android/robotium/core/impl/Asserter.java <ide> * <ide> */ <ide> <del> public void assertNotLowMemory() <add> public void assertMemoryNotLow() <ide> { <ide> ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo(); <ide> ((ActivityManager)activityUtils.getCurrentActivity().getSystemService("activity")).getMemoryInfo(mi);
Java
agpl-3.0
30ff2fb147fdc0ceda7089c4c93a732678365396
0
bhutchinson/kfs,kkronenb/kfs,ua-eas/kfs,ua-eas/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/will-financials,kuali/kfs,bhutchinson/kfs,ua-eas/kfs-devops-automation-fork,smith750/kfs,quikkian-ua-devops/kfs,kuali/kfs,quikkian-ua-devops/will-financials,smith750/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs,bhutchinson/kfs,UniversityOfHawaii/kfs,kkronenb/kfs,kuali/kfs,UniversityOfHawaii/kfs,kuali/kfs,bhutchinson/kfs,kkronenb/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,smith750/kfs,quikkian-ua-devops/will-financials,kkronenb/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,kuali/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,ua-eas/kfs,smith750/kfs
/* * Copyright 2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.web.struts; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.apache.struts.upload.FormFile; import org.kuali.kfs.coa.businessobject.Account; import org.kuali.kfs.coa.businessobject.ObjectCode; import org.kuali.kfs.coa.businessobject.SubAccount; import org.kuali.kfs.coa.businessobject.SubObjectCode; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.KFSPropertyConstants; import org.kuali.kfs.sys.businessobject.AccountingLine; import org.kuali.kfs.sys.businessobject.AccountingLineBase; import org.kuali.kfs.sys.businessobject.AccountingLineDecorator; import org.kuali.kfs.sys.businessobject.AccountingLineOverride; import org.kuali.kfs.sys.businessobject.SourceAccountingLine; import org.kuali.kfs.sys.businessobject.TargetAccountingLine; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.sys.document.AccountingDocument; import org.kuali.kfs.sys.document.authorization.AccountingDocumentAuthorizer; import org.kuali.kfs.sys.document.web.struts.FinancialSystemTransactionalDocumentFormBase; import org.kuali.kfs.sys.service.ParameterService; import org.kuali.kfs.sys.service.impl.ParameterConstants; import org.kuali.rice.kns.exception.InfrastructureException; import org.kuali.rice.kns.service.BusinessObjectDictionaryService; import org.kuali.rice.kns.service.DocumentHelperService; import org.kuali.rice.kns.service.KualiConfigurationService; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.ObjectUtils; import org.kuali.rice.kns.web.format.CurrencyFormatter; import org.kuali.rice.kns.web.format.SimpleBooleanFormatter; /** * This class is the base action form for all financial documents. */ public class KualiAccountingDocumentFormBase extends FinancialSystemTransactionalDocumentFormBase { private SourceAccountingLine newSourceLine; private TargetAccountingLine newTargetLine; private Map editableAccounts; private Map forcedLookupOptionalFields; // TODO: FormFile isn't Serializable, so mark these fields need as transient or create a Serializable subclass of FormFile protected FormFile sourceFile; protected FormFile targetFile; private boolean hideDetails = false; private List<AccountingLineDecorator> sourceLineDecorators; private List<AccountingLineDecorator> targetLineDecorators; private List baselineSourceAccountingLines; private List baselineTargetAccountingLines; /** * This constructor sets up empty instances for the dependent objects... */ public KualiAccountingDocumentFormBase() { super(); setFormatterType("sourceLineDecorator.revertible", SimpleBooleanFormatter.class); setFormatterType("targetLineDecorator.revertible", SimpleBooleanFormatter.class); // create an empty editableAccounts map, for safety's sake editableAccounts = new HashMap(); forcedReadOnlyFields = new HashMap(); forcedLookupOptionalFields = new HashMap(); // initialize accountingLine lists baselineSourceAccountingLines = new ArrayList(); baselineTargetAccountingLines = new ArrayList(); // initialize accountingLine decoration lists sourceLineDecorators = new ArrayList<AccountingLineDecorator>(); targetLineDecorators = new ArrayList<AccountingLineDecorator>(); } /** * Overrides the parent to call super.populate and then to call the accounting lines populate method that is specific to loading * the two select lists on the page. * * @see org.kuali.rice.kns.web.struts.pojo.PojoForm#populate(javax.servlet.http.HttpServletRequest) */ @Override public void populate(HttpServletRequest request) { super.populate(request); String methodToCall = this.getMethodToCall(); if (!StringUtils.equals(methodToCall, KFSConstants.RETURN_METHOD_TO_CALL)) { resetPropertyFromHtmlCheckBox(request); } populateSourceAccountingLine(getNewSourceLine()); populateTargetAccountingLine(getNewTargetLine()); // don't call populateAccountingLines if you are copying or errorCorrecting a document, // since you want the accountingLines in the copy to be "identical" to those in the original if (!StringUtils.equals(methodToCall, KFSConstants.COPY_METHOD) && !StringUtils.equals(methodToCall, KFSConstants.ERRORCORRECT_METHOD)) { populateAccountingLines(); } setDocTypeName(discoverDocumentTypeName()); } // reset the properties rendered as Struts html checkbox when the box is unchecked protected void resetPropertyFromHtmlCheckBox(HttpServletRequest request) { this.resetAccountExpiredOverride(request, "newSourceLine.accountExpiredOverride", getNewSourceLine()); this.resetAccountExpiredOverride(request, "newTargetLine.accountExpiredOverride", getNewTargetLine()); int index = 0; String propertyNamePattern = "document.{0}[{1}].accountExpiredOverride"; for(Object accountingLine : getFinancialDocument().getSourceAccountingLines()) { SourceAccountingLine sourceAccountingLine = (SourceAccountingLine)accountingLine; String propertyName = MessageFormat.format(propertyNamePattern, KFSPropertyConstants.SOURCE_ACCOUNTING_LINES, index++); this.resetAccountExpiredOverride(request, propertyName, sourceAccountingLine); } index = 0; for(Object accountingLine : getFinancialDocument().getTargetAccountingLines()) { TargetAccountingLine targetAccountingLine = (TargetAccountingLine)accountingLine; String propertyName = MessageFormat.format(propertyNamePattern, KFSPropertyConstants.TARGET_ACCOUNTING_LINES, index++); this.resetAccountExpiredOverride(request, propertyName, targetAccountingLine); } } // reset accountExpiredOverride of the given accountingLine if its corresponding request parameter is not present private void resetAccountExpiredOverride(HttpServletRequest request, String accountingLinePropertyName, AccountingLineBase accountingLine) { if (ObjectUtils.isNull(request.getParameterMap().get(accountingLinePropertyName))) { accountingLine.setAccountExpiredOverride(false); } } /** * This method iterates over all of the source lines and all of the target lines in a transactional document, and calls * prepareAccountingLineForValidationAndPersistence on each one. This is called because a user could have updated already * existing accounting lines that had blank values in composite key fields. */ protected void populateAccountingLines() { Iterator sourceLines = getFinancialDocument().getSourceAccountingLines().iterator(); while (sourceLines.hasNext()) { SourceAccountingLine sourceLine = (SourceAccountingLine) sourceLines.next(); populateSourceAccountingLine(sourceLine); } Iterator targetLines = getFinancialDocument().getTargetAccountingLines().iterator(); while (targetLines.hasNext()) { TargetAccountingLine targetLine = (TargetAccountingLine) targetLines.next(); populateTargetAccountingLine(targetLine); } } /** * Populates a source accounting line bo using values from the struts form. This is in place to make sure that all of the * composite key objects have the correct values in them. This should be overridden by children forms in the situation where * document level attributes need to be pushed down into the accounting lines. * * @param sourceLine */ public void populateSourceAccountingLine(SourceAccountingLine sourceLine) { populateAccountingLine(sourceLine); } /** * Populates a target accounting line bo using values from the struts form. This is in place to make sure that all of the * composite key objects have the correct values in them. This should be overridden by children forms in the situation where * document level attributes need to be pushed down into the accounting lines. * * @param targetLine */ public void populateTargetAccountingLine(TargetAccountingLine targetLine) { populateAccountingLine(targetLine); } /** * Populates the dependent fields of objects contained within the given accountingLine * * @param line */ @SuppressWarnings("deprecation") private void populateAccountingLine(AccountingLineBase line) { SpringContext.getBean(BusinessObjectDictionaryService.class).performForceUppercase(line); line.setDocumentNumber(getDocument().getDocumentNumber()); if (ObjectUtils.isNull(line.getAccount())) { line.setAccount(new Account()); } line.getAccount().setChartOfAccountsCode(line.getChartOfAccountsCode()); if (ObjectUtils.isNull(line.getObjectCode())) { line.setObjectCode(new ObjectCode()); } line.getObjectCode().setUniversityFiscalYear(getFinancialDocument().getPostingYear()); line.getObjectCode().setChartOfAccountsCode(line.getChartOfAccountsCode()); if (ObjectUtils.isNull(line.getSubAccount())) { line.setSubAccount(new SubAccount()); } line.getSubAccount().setChartOfAccountsCode(line.getChartOfAccountsCode()); line.getSubAccount().setAccountNumber(line.getAccountNumber()); if (ObjectUtils.isNull(line.getSubObjectCode())) { line.setSubObjectCode(new SubObjectCode()); } line.getSubObjectCode().setChartOfAccountsCode(line.getChartOfAccountsCode()); line.getSubObjectCode().setAccountNumber(line.getAccountNumber()); line.getSubObjectCode().setFinancialObjectCode(line.getFinancialObjectCode()); line.getSubObjectCode().setUniversityFiscalYear(getFinancialDocument().getPostingYear()); AccountingLineOverride.populateFromInput(line); } /** * This method retrieves an instance of the form. * * @return */ public AccountingDocument getFinancialDocument() { return (AccountingDocument) getDocument(); } /** * @return Returns the newTargetLine. */ public TargetAccountingLine getNewTargetLine() { if (newTargetLine == null) { newTargetLine = createNewTargetAccountingLine(getFinancialDocument()); } return newTargetLine; } /** * @param newExpenseLine The newTargetLine to set. */ public void setNewTargetLine(TargetAccountingLine newExpenseLine) { this.newTargetLine = newExpenseLine; } /** * @return Returns the newSourceLine. */ public SourceAccountingLine getNewSourceLine() { if (newSourceLine == null) { newSourceLine = createNewSourceAccountingLine(getFinancialDocument()); } return newSourceLine; } /** * @param newIncomeLine The newSourceLine to set. */ public void setNewSourceLine(SourceAccountingLine newIncomeLine) { this.newSourceLine = newIncomeLine; } /** * @return Returns the sourceFile. */ public FormFile getSourceFile() { return sourceFile; } /** * @param sourceFile The sourceFile to set. */ public void setSourceFile(FormFile sourceFile) { this.sourceFile = sourceFile; } /** * @return Returns the targetFile. */ public FormFile getTargetFile() { return targetFile; } /** * @param targetFile The targetFile to set. */ public void setTargetFile(FormFile targetFile) { this.targetFile = targetFile; } /** * @return current Map of editableAccounts */ public Map getEditableAccounts() { return editableAccounts; } /** * @param editableAccounts the account Map to set */ public void setEditableAccounts(Map editableAccounts) { this.editableAccounts = editableAccounts; } /** * @return hideDetails attribute */ public boolean isHideDetails() { return hideDetails; } /** * @return hideDetails attribute * @see #isHideDetails() */ public boolean getHideDetails() { return isHideDetails(); } /** * @param hideDetails */ public void setHideDetails(boolean hideDetails) { this.hideDetails = hideDetails; } /** * @return current List of baseline SourceAccountingLines for use in update-event generation */ public List getBaselineSourceAccountingLines() { return baselineSourceAccountingLines; } /** * Sets the current List of baseline SourceAccountingLines to the given List * * @param baselineSourceAccountingLines */ public void setBaselineSourceAccountingLines(List baselineSourceAccountingLines) { this.baselineSourceAccountingLines = baselineSourceAccountingLines; } /** * @param index * @return true if a baselineSourceAccountingLine with the given index exists */ public boolean hasBaselineSourceAccountingLine(int index) { boolean has = false; if ((index >= 0) && (index <= baselineSourceAccountingLines.size())) { has = true; } return has; } /** * Implementation creates empty SourceAccountingLines as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return baseline SourceAccountingLine at the given index */ public SourceAccountingLine getBaselineSourceAccountingLine(int index) { try { while (baselineSourceAccountingLines.size() <= index) { baselineSourceAccountingLines.add(getFinancialDocument().getSourceAccountingLineClass().newInstance()); } } catch (InstantiationException e) { throw new RuntimeException("Unable to get new source line instance for document" + e.getMessage()); } catch (IllegalAccessException e) { throw new RuntimeException("Unable to get new source line instance for document" + e.getMessage()); } return (SourceAccountingLine) baselineSourceAccountingLines.get(index); } /** * @return current List of baseline TargetAccountingLines for use in update-event generation */ public List getBaselineTargetAccountingLines() { return baselineTargetAccountingLines; } /** * Sets the current List of baseline TargetAccountingLines to the given List * * @param baselineTargetAccountingLines */ public void setBaselineTargetAccountingLines(List baselineTargetAccountingLines) { this.baselineTargetAccountingLines = baselineTargetAccountingLines; } /** * @param index * @return true if a baselineTargetAccountingLine with the given index exists */ public boolean hasBaselineTargetAccountingLine(int index) { boolean has = false; if ((index >= 0) && (index <= baselineTargetAccountingLines.size())) { has = true; } return has; } /** * Implementation creates empty TargetAccountingLines as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return baseline TargetAccountingLine at the given index */ public TargetAccountingLine getBaselineTargetAccountingLine(int index) { try { while (baselineTargetAccountingLines.size() <= index) { baselineTargetAccountingLines.add(getFinancialDocument().getTargetAccountingLineClass().newInstance()); } } catch (InstantiationException e) { throw new RuntimeException("Unable to get new target line instance for document" + e.getMessage()); } catch (IllegalAccessException e) { throw new RuntimeException("Unable to get new target line instance for document" + e.getMessage()); } return (TargetAccountingLine) baselineTargetAccountingLines.get(index); } /** * @return current List of SourceAccountingLine decorations */ public List<AccountingLineDecorator> getSourceLineDecorators() { return sourceLineDecorators; } /** * @param minSize * @return current List of SourceAccountingLine decorations, expanded to have at least minSize elements */ public List<AccountingLineDecorator> getSourceLineDecorators(int minSize) { extendSourceLineDecorators(minSize); return sourceLineDecorators; } /** * Adds default AccountingLineDecorators to sourceAccountingLineDecorators until it contains at least minSize elements * * @param minSize */ private void extendSourceLineDecorators(int minSize) { while (sourceLineDecorators.size() < minSize) { sourceLineDecorators.add(new AccountingLineDecorator()); } } /** * Sets the current List of SourceAccountingLine decorators * * @param sourceLineDecorators */ public void setSourceLineDecorators(List<AccountingLineDecorator> sourceLineDecorators) { this.sourceLineDecorators = sourceLineDecorators; } /** * Implementation creates empty AccountingLineDecorators as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return AccountingLineDecorators for sourceLine at the given index */ public AccountingLineDecorator getSourceLineDecorator(int index) { extendSourceLineDecorators(index + 1); return sourceLineDecorators.get(index); } /** * @return current List of TargetAccountingLine decorators */ public List<AccountingLineDecorator> getTargetLineDecorators() { return targetLineDecorators; } /** * @param minSize * @return current List of TargetAccountingLine decorators, expanded to have at least minSize elements */ public List<AccountingLineDecorator> getTargetLineDecorators(int minSize) { extendTargetLineDecorators(minSize); return targetLineDecorators; } /** * Adds default AccountingLineDecorators to targetAccountingLineDecorators until it contains at least minSize elements * * @param minSize */ private void extendTargetLineDecorators(int minSize) { while (targetLineDecorators.size() < minSize) { targetLineDecorators.add(new AccountingLineDecorator()); } } /** * Sets the current List of TargetAccountingLine decorators * * @param targetLineDecorators */ public void setTargetLineDecorators(List<AccountingLineDecorator> targetLineDecorators) { this.targetLineDecorators = targetLineDecorators; } /** * Implementation creates empty AccountingLineDecorators as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return AccountingLineDecorator for targetLine at the given index */ public AccountingLineDecorator getTargetLineDecorator(int index) { extendTargetLineDecorators(index + 1); return targetLineDecorators.get(index); } /** * Resets the source accounting line decorators to new and ensures that there are the given number. These decorators take very * little memory, there are few of them on the page, and they are rarely reset, so this method does it the simple way. * * @param size */ public void resetSourceLineDecorators(int size) { sourceLineDecorators.clear(); extendSourceLineDecorators(size); } /** * Resets the target accounting line decorators to new and ensures that there are the given number. These decorators take very * little memory, there are few of them on the page, and they are rarely reset, so this method does it the simple way. * * @param size */ public void resetTargetLineDecorators(int size) { targetLineDecorators.clear(); extendTargetLineDecorators(size); } /** * Retrieves the source accounting lines total in a currency format with commas. * * @return String */ public String getCurrencyFormattedSourceTotal() { return (String) new CurrencyFormatter().format(getFinancialDocument().getSourceTotal()); } /** * Retrieves the source accounting lines total in a currency format with commas. * * @return String */ public String getCurrencyFormattedTargetTotal() { return (String) new CurrencyFormatter().format(getFinancialDocument().getTargetTotal()); } /** * @return String */ public String getAccountingLineImportInstructionsUrl() { // FIXME: help URLs are all being removed return "";//SpringContext.getBean(KualiConfigurationService.class).getPropertyString(KFSConstants.EXTERNALIZABLE_HELP_URL_KEY) + SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.FinancialApcParms.ACCOUNTING_LINE_IMPORT_HELP); } /** * @param financialDocument * @return a new source accounting line for the document */ protected SourceAccountingLine createNewSourceAccountingLine(AccountingDocument financialDocument) { if (financialDocument == null) { throw new IllegalArgumentException("invalid (null) document"); } try { return (SourceAccountingLine) financialDocument.getSourceAccountingLineClass().newInstance(); } catch (Exception e) { throw new InfrastructureException("unable to create a new source accounting line", e); } } /** * @param financialDocument * @return a new target accounting line for the documet */ protected TargetAccountingLine createNewTargetAccountingLine(AccountingDocument financialDocument) { if (financialDocument == null) { throw new IllegalArgumentException("invalid (null) document"); } try { return (TargetAccountingLine) financialDocument.getTargetAccountingLineClass().newInstance(); } catch (Exception e) { throw new InfrastructureException("unable to create a new target accounting line", e); } } /** * This method finds its appropriate document authorizer and uses that to reset the map of editable accounts, based on the * current accounting lines. */ public void refreshEditableAccounts() { AccountingDocumentAuthorizer authorizer = (AccountingDocumentAuthorizer) SpringContext.getBean(DocumentHelperService.class).getDocumentAuthorizer(this.getDocument()); this.setEditableAccounts(authorizer.getEditableAccounts(glomBaselineAccountingLines(), GlobalVariables.getUserSession().getPerson())); } /** * This method returns a list made up of accounting line from all baseline accounting line sources. * * @return a list of accounting lines, made up of all baseline source and baseline target lines. */ private List<AccountingLine> glomBaselineAccountingLines() { List<AccountingLine> lines = new ArrayList<AccountingLine>(); lines.addAll(harvestAccountingLines(this.getBaselineSourceAccountingLines())); lines.addAll(harvestAccountingLines(this.getBaselineTargetAccountingLines())); return lines; } /** * This method takes a generic list, hopefully with some AccountingLine objects in it, and returns a list of AccountingLine * objects, because Java generics are just so wonderful. * * @param lines a list of objects * @return a list of the accounting lines that were in the lines parameter */ private List<AccountingLine> harvestAccountingLines(List lines) { List<AccountingLine> accountingLines = new ArrayList<AccountingLine>(); for (Object o : lines) { if (o instanceof AccountingLine) { accountingLines.add((AccountingLine) o); } } return accountingLines; } /** * A <code>{@link Map}</code> of names of optional accounting line fields that require a quickfinder. * * @return a Map of fields */ public void setForcedLookupOptionalFields(Map fieldMap) { forcedLookupOptionalFields = fieldMap; } /** * A <code>{@link Map}</code> of names of optional accounting line fields that require a quickfinder. * * @return a Map of fields */ public Map getForcedLookupOptionalFields() { return forcedLookupOptionalFields; } /** * Adds the accounting line file size to the list of max file sizes. * * @see org.kuali.rice.kns.web.struts.pojo.PojoFormBase#customInitMaxUploadSizes() */ @Override protected void customInitMaxUploadSizes() { super.customInitMaxUploadSizes(); addMaxUploadSize(SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.ACCOUNTING_LINE_IMPORT_MAX_FILE_SIZE_PARM_NM)); } }
work/src/org/kuali/kfs/sys/web/struts/KualiAccountingDocumentFormBase.java
/* * Copyright 2007 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.web.struts; import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.apache.struts.upload.FormFile; import org.kuali.kfs.coa.businessobject.Account; import org.kuali.kfs.coa.businessobject.ObjectCode; import org.kuali.kfs.coa.businessobject.SubAccount; import org.kuali.kfs.coa.businessobject.SubObjectCode; import org.kuali.kfs.sys.KFSConstants; import org.kuali.kfs.sys.KFSPropertyConstants; import org.kuali.kfs.sys.businessobject.AccountingLine; import org.kuali.kfs.sys.businessobject.AccountingLineBase; import org.kuali.kfs.sys.businessobject.AccountingLineDecorator; import org.kuali.kfs.sys.businessobject.AccountingLineOverride; import org.kuali.kfs.sys.businessobject.SourceAccountingLine; import org.kuali.kfs.sys.businessobject.TargetAccountingLine; import org.kuali.kfs.sys.context.SpringContext; import org.kuali.kfs.sys.document.AccountingDocument; import org.kuali.kfs.sys.document.authorization.AccountingDocumentAuthorizer; import org.kuali.kfs.sys.document.web.struts.FinancialSystemTransactionalDocumentFormBase; import org.kuali.kfs.sys.service.ParameterService; import org.kuali.kfs.sys.service.impl.ParameterConstants; import org.kuali.rice.kns.exception.InfrastructureException; import org.kuali.rice.kns.service.BusinessObjectDictionaryService; import org.kuali.rice.kns.service.DocumentHelperService; import org.kuali.rice.kns.service.KualiConfigurationService; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.kns.util.ObjectUtils; import org.kuali.rice.kns.web.format.CurrencyFormatter; import org.kuali.rice.kns.web.format.SimpleBooleanFormatter; /** * This class is the base action form for all financial documents. */ public class KualiAccountingDocumentFormBase extends FinancialSystemTransactionalDocumentFormBase { private SourceAccountingLine newSourceLine; private TargetAccountingLine newTargetLine; private Map editableAccounts; private Map forcedLookupOptionalFields; // TODO: FormFile isn't Serializable, so mark these fields need as transient or create a Serializable subclass of FormFile protected FormFile sourceFile; protected FormFile targetFile; private boolean hideDetails = false; private List<AccountingLineDecorator> sourceLineDecorators; private List<AccountingLineDecorator> targetLineDecorators; private List baselineSourceAccountingLines; private List baselineTargetAccountingLines; /** * This constructor sets up empty instances for the dependent objects... */ public KualiAccountingDocumentFormBase() { super(); setFormatterType("sourceLineDecorator.revertible", SimpleBooleanFormatter.class); setFormatterType("targetLineDecorator.revertible", SimpleBooleanFormatter.class); // create an empty editableAccounts map, for safety's sake editableAccounts = new HashMap(); forcedReadOnlyFields = new HashMap(); forcedLookupOptionalFields = new HashMap(); // initialize accountingLine lists baselineSourceAccountingLines = new ArrayList(); baselineTargetAccountingLines = new ArrayList(); // initialize accountingLine decoration lists sourceLineDecorators = new ArrayList<AccountingLineDecorator>(); targetLineDecorators = new ArrayList<AccountingLineDecorator>(); } /** * Overrides the parent to call super.populate and then to call the accounting lines populate method that is specific to loading * the two select lists on the page. * * @see org.kuali.rice.kns.web.struts.pojo.PojoForm#populate(javax.servlet.http.HttpServletRequest) */ @Override public void populate(HttpServletRequest request) { super.populate(request); String methodToCall = this.getMethodToCall(); if (!StringUtils.equals(methodToCall, KFSConstants.RETURN_METHOD_TO_CALL)) { resetPropertyFromHtmlCheckBox(request); } populateSourceAccountingLine(getNewSourceLine()); populateTargetAccountingLine(getNewTargetLine()); // don't call populateAccountingLines if you are copying or errorCorrecting a document, // since you want the accountingLines in the copy to be "identical" to those in the original if (!StringUtils.equals(methodToCall, KFSConstants.COPY_METHOD) && !StringUtils.equals(methodToCall, KFSConstants.ERRORCORRECT_METHOD)) { populateAccountingLines(); } setDocTypeName(discoverDocumentTypeName()); } // reset the properties rendered as Struts html checkbox when the box is unchecked protected void resetPropertyFromHtmlCheckBox(HttpServletRequest request) { this.resetAccountExpiredOverride(request, "newSourceLine.accountExpiredOverride", getNewSourceLine()); this.resetAccountExpiredOverride(request, "newTargetLine.accountExpiredOverride", getNewTargetLine()); int index = 0; String propertyNamePattern = "document.{0}[{1}].accountExpiredOverride"; for(Object accountingLine : getFinancialDocument().getSourceAccountingLines()) { SourceAccountingLine sourceAccountingLine = (SourceAccountingLine)accountingLine; String propertyName = MessageFormat.format(propertyNamePattern, KFSPropertyConstants.SOURCE_ACCOUNTING_LINES, index++); this.resetAccountExpiredOverride(request, propertyName, sourceAccountingLine); } index = 0; for(Object accountingLine : getFinancialDocument().getTargetAccountingLines()) { TargetAccountingLine targetAccountingLine = (TargetAccountingLine)accountingLine; String propertyName = MessageFormat.format(propertyNamePattern, KFSPropertyConstants.TARGET_ACCOUNTING_LINES, index++); this.resetAccountExpiredOverride(request, propertyName, targetAccountingLine); } } // reset accountExpiredOverride of the given accountingLine if its corresponding request parameter is not present private void resetAccountExpiredOverride(HttpServletRequest request, String accountingLinePropertyName, AccountingLineBase accountingLine) { if (ObjectUtils.isNull(request.getParameterMap().get(accountingLinePropertyName))) { accountingLine.setAccountExpiredOverride(false); } } /** * This method iterates over all of the source lines and all of the target lines in a transactional document, and calls * prepareAccountingLineForValidationAndPersistence on each one. This is called because a user could have updated already * existing accounting lines that had blank values in composite key fields. */ protected void populateAccountingLines() { Iterator sourceLines = getFinancialDocument().getSourceAccountingLines().iterator(); while (sourceLines.hasNext()) { SourceAccountingLine sourceLine = (SourceAccountingLine) sourceLines.next(); populateSourceAccountingLine(sourceLine); } Iterator targetLines = getFinancialDocument().getTargetAccountingLines().iterator(); while (targetLines.hasNext()) { TargetAccountingLine targetLine = (TargetAccountingLine) targetLines.next(); populateTargetAccountingLine(targetLine); } } /** * Populates a source accounting line bo using values from the struts form. This is in place to make sure that all of the * composite key objects have the correct values in them. This should be overridden by children forms in the situation where * document level attributes need to be pushed down into the accounting lines. * * @param sourceLine */ public void populateSourceAccountingLine(SourceAccountingLine sourceLine) { populateAccountingLine(sourceLine); } /** * Populates a target accounting line bo using values from the struts form. This is in place to make sure that all of the * composite key objects have the correct values in them. This should be overridden by children forms in the situation where * document level attributes need to be pushed down into the accounting lines. * * @param targetLine */ public void populateTargetAccountingLine(TargetAccountingLine targetLine) { populateAccountingLine(targetLine); } /** * Populates the dependent fields of objects contained within the given accountingLine * * @param line */ @SuppressWarnings("deprecation") private void populateAccountingLine(AccountingLineBase line) { SpringContext.getBean(BusinessObjectDictionaryService.class).performForceUppercase(line); line.setDocumentNumber(getDocument().getDocumentNumber()); if (ObjectUtils.isNull(line.getAccount())) { line.setAccount(new Account()); } line.getAccount().setChartOfAccountsCode(line.getChartOfAccountsCode()); if (ObjectUtils.isNull(line.getObjectCode())) { line.setObjectCode(new ObjectCode()); } line.getObjectCode().setUniversityFiscalYear(getFinancialDocument().getPostingYear()); line.getObjectCode().setChartOfAccountsCode(line.getChartOfAccountsCode()); if (ObjectUtils.isNull(line.getSubAccount())) { line.setSubAccount(new SubAccount()); } line.getSubAccount().setChartOfAccountsCode(line.getChartOfAccountsCode()); line.getSubAccount().setAccountNumber(line.getAccountNumber()); if (ObjectUtils.isNull(line.getSubObjectCode())) { line.setSubObjectCode(new SubObjectCode()); } line.getSubObjectCode().setChartOfAccountsCode(line.getChartOfAccountsCode()); line.getSubObjectCode().setAccountNumber(line.getAccountNumber()); line.getSubObjectCode().setFinancialObjectCode(line.getFinancialObjectCode()); line.getSubObjectCode().setUniversityFiscalYear(getFinancialDocument().getPostingYear()); AccountingLineOverride.populateFromInput(line); } /** * This method retrieves an instance of the form. * * @return */ public AccountingDocument getFinancialDocument() { return (AccountingDocument) getDocument(); } /** * @return Returns the newTargetLine. */ public TargetAccountingLine getNewTargetLine() { if (newTargetLine == null) { newTargetLine = createNewTargetAccountingLine(getFinancialDocument()); } return newTargetLine; } /** * @param newExpenseLine The newTargetLine to set. */ public void setNewTargetLine(TargetAccountingLine newExpenseLine) { this.newTargetLine = newExpenseLine; } /** * @return Returns the newSourceLine. */ public SourceAccountingLine getNewSourceLine() { if (newSourceLine == null) { newSourceLine = createNewSourceAccountingLine(getFinancialDocument()); } return newSourceLine; } /** * @param newIncomeLine The newSourceLine to set. */ public void setNewSourceLine(SourceAccountingLine newIncomeLine) { this.newSourceLine = newIncomeLine; } /** * @return Returns the sourceFile. */ public FormFile getSourceFile() { return sourceFile; } /** * @param sourceFile The sourceFile to set. */ public void setSourceFile(FormFile sourceFile) { this.sourceFile = sourceFile; } /** * @return Returns the targetFile. */ public FormFile getTargetFile() { return targetFile; } /** * @param targetFile The targetFile to set. */ public void setTargetFile(FormFile targetFile) { this.targetFile = targetFile; } /** * @return current Map of editableAccounts */ public Map getEditableAccounts() { return editableAccounts; } /** * @param editableAccounts the account Map to set */ public void setEditableAccounts(Map editableAccounts) { this.editableAccounts = editableAccounts; } /** * @return hideDetails attribute */ public boolean isHideDetails() { return hideDetails; } /** * @return hideDetails attribute * @see #isHideDetails() */ public boolean getHideDetails() { return isHideDetails(); } /** * @param hideDetails */ public void setHideDetails(boolean hideDetails) { this.hideDetails = hideDetails; } /** * @return current List of baseline SourceAccountingLines for use in update-event generation */ public List getBaselineSourceAccountingLines() { return baselineSourceAccountingLines; } /** * Sets the current List of baseline SourceAccountingLines to the given List * * @param baselineSourceAccountingLines */ public void setBaselineSourceAccountingLines(List baselineSourceAccountingLines) { this.baselineSourceAccountingLines = baselineSourceAccountingLines; } /** * @param index * @return true if a baselineSourceAccountingLine with the given index exists */ public boolean hasBaselineSourceAccountingLine(int index) { boolean has = false; if ((index >= 0) && (index <= baselineSourceAccountingLines.size())) { has = true; } return has; } /** * Implementation creates empty SourceAccountingLines as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return baseline SourceAccountingLine at the given index */ public SourceAccountingLine getBaselineSourceAccountingLine(int index) { try { while (baselineSourceAccountingLines.size() <= index) { baselineSourceAccountingLines.add(getFinancialDocument().getSourceAccountingLineClass().newInstance()); } } catch (InstantiationException e) { throw new RuntimeException("Unable to get new source line instance for document" + e.getMessage()); } catch (IllegalAccessException e) { throw new RuntimeException("Unable to get new source line instance for document" + e.getMessage()); } return (SourceAccountingLine) baselineSourceAccountingLines.get(index); } /** * @return current List of baseline TargetAccountingLines for use in update-event generation */ public List getBaselineTargetAccountingLines() { return baselineTargetAccountingLines; } /** * Sets the current List of baseline TargetAccountingLines to the given List * * @param baselineTargetAccountingLines */ public void setBaselineTargetAccountingLines(List baselineTargetAccountingLines) { this.baselineTargetAccountingLines = baselineTargetAccountingLines; } /** * @param index * @return true if a baselineTargetAccountingLine with the given index exists */ public boolean hasBaselineTargetAccountingLine(int index) { boolean has = false; if ((index >= 0) && (index <= baselineTargetAccountingLines.size())) { has = true; } return has; } /** * Implementation creates empty TargetAccountingLines as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return baseline TargetAccountingLine at the given index */ public TargetAccountingLine getBaselineTargetAccountingLine(int index) { try { while (baselineTargetAccountingLines.size() <= index) { baselineTargetAccountingLines.add(getFinancialDocument().getTargetAccountingLineClass().newInstance()); } } catch (InstantiationException e) { throw new RuntimeException("Unable to get new target line instance for document" + e.getMessage()); } catch (IllegalAccessException e) { throw new RuntimeException("Unable to get new target line instance for document" + e.getMessage()); } return (TargetAccountingLine) baselineTargetAccountingLines.get(index); } /** * @return current List of SourceAccountingLine decorations */ public List<AccountingLineDecorator> getSourceLineDecorators() { return sourceLineDecorators; } /** * @param minSize * @return current List of SourceAccountingLine decorations, expanded to have at least minSize elements */ public List<AccountingLineDecorator> getSourceLineDecorators(int minSize) { extendSourceLineDecorators(minSize); return sourceLineDecorators; } /** * Adds default AccountingLineDecorators to sourceAccountingLineDecorators until it contains at least minSize elements * * @param minSize */ private void extendSourceLineDecorators(int minSize) { while (sourceLineDecorators.size() < minSize) { sourceLineDecorators.add(new AccountingLineDecorator()); } } /** * Sets the current List of SourceAccountingLine decorators * * @param sourceLineDecorators */ public void setSourceLineDecorators(List<AccountingLineDecorator> sourceLineDecorators) { this.sourceLineDecorators = sourceLineDecorators; } /** * Implementation creates empty AccountingLineDecorators as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return AccountingLineDecorators for sourceLine at the given index */ public AccountingLineDecorator getSourceLineDecorator(int index) { extendSourceLineDecorators(index + 1); return sourceLineDecorators.get(index); } /** * @return current List of TargetAccountingLine decorators */ public List<AccountingLineDecorator> getTargetLineDecorators() { return targetLineDecorators; } /** * @param minSize * @return current List of TargetAccountingLine decorators, expanded to have at least minSize elements */ public List<AccountingLineDecorator> getTargetLineDecorators(int minSize) { extendTargetLineDecorators(minSize); return targetLineDecorators; } /** * Adds default AccountingLineDecorators to targetAccountingLineDecorators until it contains at least minSize elements * * @param minSize */ private void extendTargetLineDecorators(int minSize) { while (targetLineDecorators.size() < minSize) { targetLineDecorators.add(new AccountingLineDecorator()); } } /** * Sets the current List of TargetAccountingLine decorators * * @param targetLineDecorators */ public void setTargetLineDecorators(List<AccountingLineDecorator> targetLineDecorators) { this.targetLineDecorators = targetLineDecorators; } /** * Implementation creates empty AccountingLineDecorators as a side-effect, so that Struts' efforts to set fields of lines which * haven't been created will succeed rather than causing a NullPointerException. * * @param index * @return AccountingLineDecorator for targetLine at the given index */ public AccountingLineDecorator getTargetLineDecorator(int index) { extendTargetLineDecorators(index + 1); return targetLineDecorators.get(index); } /** * Resets the source accounting line decorators to new and ensures that there are the given number. These decorators take very * little memory, there are few of them on the page, and they are rarely reset, so this method does it the simple way. * * @param size */ public void resetSourceLineDecorators(int size) { sourceLineDecorators.clear(); extendSourceLineDecorators(size); } /** * Resets the target accounting line decorators to new and ensures that there are the given number. These decorators take very * little memory, there are few of them on the page, and they are rarely reset, so this method does it the simple way. * * @param size */ public void resetTargetLineDecorators(int size) { targetLineDecorators.clear(); extendTargetLineDecorators(size); } /** * Retrieves the source accounting lines total in a currency format with commas. * * @return String */ public String getCurrencyFormattedSourceTotal() { return (String) new CurrencyFormatter().format(getFinancialDocument().getSourceTotal()); } /** * Retrieves the source accounting lines total in a currency format with commas. * * @return String */ public String getCurrencyFormattedTargetTotal() { return (String) new CurrencyFormatter().format(getFinancialDocument().getTargetTotal()); } /** * @return String */ public String getAccountingLineImportInstructionsUrl() { return SpringContext.getBean(KualiConfigurationService.class).getPropertyString(KFSConstants.EXTERNALIZABLE_HELP_URL_KEY) + SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.FinancialApcParms.ACCOUNTING_LINE_IMPORT_HELP); } /** * @param financialDocument * @return a new source accounting line for the document */ protected SourceAccountingLine createNewSourceAccountingLine(AccountingDocument financialDocument) { if (financialDocument == null) { throw new IllegalArgumentException("invalid (null) document"); } try { return (SourceAccountingLine) financialDocument.getSourceAccountingLineClass().newInstance(); } catch (Exception e) { throw new InfrastructureException("unable to create a new source accounting line", e); } } /** * @param financialDocument * @return a new target accounting line for the documet */ protected TargetAccountingLine createNewTargetAccountingLine(AccountingDocument financialDocument) { if (financialDocument == null) { throw new IllegalArgumentException("invalid (null) document"); } try { return (TargetAccountingLine) financialDocument.getTargetAccountingLineClass().newInstance(); } catch (Exception e) { throw new InfrastructureException("unable to create a new target accounting line", e); } } /** * This method finds its appropriate document authorizer and uses that to reset the map of editable accounts, based on the * current accounting lines. */ public void refreshEditableAccounts() { AccountingDocumentAuthorizer authorizer = (AccountingDocumentAuthorizer) SpringContext.getBean(DocumentHelperService.class).getDocumentAuthorizer(this.getDocument()); this.setEditableAccounts(authorizer.getEditableAccounts(glomBaselineAccountingLines(), GlobalVariables.getUserSession().getPerson())); } /** * This method returns a list made up of accounting line from all baseline accounting line sources. * * @return a list of accounting lines, made up of all baseline source and baseline target lines. */ private List<AccountingLine> glomBaselineAccountingLines() { List<AccountingLine> lines = new ArrayList<AccountingLine>(); lines.addAll(harvestAccountingLines(this.getBaselineSourceAccountingLines())); lines.addAll(harvestAccountingLines(this.getBaselineTargetAccountingLines())); return lines; } /** * This method takes a generic list, hopefully with some AccountingLine objects in it, and returns a list of AccountingLine * objects, because Java generics are just so wonderful. * * @param lines a list of objects * @return a list of the accounting lines that were in the lines parameter */ private List<AccountingLine> harvestAccountingLines(List lines) { List<AccountingLine> accountingLines = new ArrayList<AccountingLine>(); for (Object o : lines) { if (o instanceof AccountingLine) { accountingLines.add((AccountingLine) o); } } return accountingLines; } /** * A <code>{@link Map}</code> of names of optional accounting line fields that require a quickfinder. * * @return a Map of fields */ public void setForcedLookupOptionalFields(Map fieldMap) { forcedLookupOptionalFields = fieldMap; } /** * A <code>{@link Map}</code> of names of optional accounting line fields that require a quickfinder. * * @return a Map of fields */ public Map getForcedLookupOptionalFields() { return forcedLookupOptionalFields; } /** * Adds the accounting line file size to the list of max file sizes. * * @see org.kuali.rice.kns.web.struts.pojo.PojoFormBase#customInitMaxUploadSizes() */ @Override protected void customInitMaxUploadSizes() { super.customInitMaxUploadSizes(); addMaxUploadSize(SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.ACCOUNTING_LINE_IMPORT_MAX_FILE_SIZE_PARM_NM)); } }
Removed reference to HELP system parameter that will be removed tonight.
work/src/org/kuali/kfs/sys/web/struts/KualiAccountingDocumentFormBase.java
Removed reference to HELP system parameter that will be removed tonight.
<ide><path>ork/src/org/kuali/kfs/sys/web/struts/KualiAccountingDocumentFormBase.java <ide> * @return String <ide> */ <ide> public String getAccountingLineImportInstructionsUrl() { <del> return SpringContext.getBean(KualiConfigurationService.class).getPropertyString(KFSConstants.EXTERNALIZABLE_HELP_URL_KEY) + SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.FinancialApcParms.ACCOUNTING_LINE_IMPORT_HELP); <add> // FIXME: help URLs are all being removed <add> return "";//SpringContext.getBean(KualiConfigurationService.class).getPropertyString(KFSConstants.EXTERNALIZABLE_HELP_URL_KEY) + SpringContext.getBean(ParameterService.class).getParameterValue(ParameterConstants.FINANCIAL_SYSTEM_DOCUMENT.class, KFSConstants.FinancialApcParms.ACCOUNTING_LINE_IMPORT_HELP); <ide> } <ide> <ide> /**
Java
apache-2.0
19bb27ca7d209178966704cf0a96e5787119cd80
0
afaucher/see-the-dark,afaucher/see-the-dark,afaucher/see-the-dark
package com.mygdx.game; import java.util.ArrayList; import java.util.List; public abstract class AbstractBodyData implements BodyData { private List<Emission> receivedEmissions = new ArrayList<Emission>(); private boolean accumlateEmissions = false; private float temperature = 0; private static float EMISSION_UNITS_PER_HEAT = 100.0f; public AbstractBodyData(boolean accumlateEmissions) { this.accumlateEmissions = accumlateEmissions; } @Override public void receiveEmission(Emission emission) { // TODO: Take duration into account?, push into emission? this.accumlateHeat(emission.power / EMISSION_UNITS_PER_HEAT); if (!accumlateEmissions) { return; } receivedEmissions.add(emission); // TODO: Cause actual damage } @Override public void resetEmissions() { receivedEmissions.clear(); } @Override public List<Emission> getEmissions() { return receivedEmissions; } @Override public float getTemperature() { return temperature; } public void disapateHeat(float energy) { // TODO: Scale for mass temperature = Math.max(temperature - energy, 0); } public void accumlateHeat(float energy) { // TODO: Scale for mass temperature += energy; } }
core/src/com/mygdx/game/AbstractBodyData.java
package com.mygdx.game; import java.util.ArrayList; import java.util.List; public abstract class AbstractBodyData implements BodyData { private List<Emission> receivedEmissions = new ArrayList<Emission>(); private boolean accumlateEmissions = false; private float temperature = 0; private static float EMISSION_UNITS_PER_HEAT = 100.0f; public AbstractBodyData(boolean accumlateEmissions) { this.accumlateEmissions = accumlateEmissions; } @Override public void receiveEmission(Emission emission) { // TODO: Take duration into account, push into emission? if (!accumlateEmissions) return; this.accumlateHeat(emission.power / EMISSION_UNITS_PER_HEAT); receivedEmissions.add(emission); // TODO: Cause actual damage } @Override public void resetEmissions() { receivedEmissions.clear(); } @Override public List<Emission> getEmissions() { return receivedEmissions; } @Override public float getTemperature() { return temperature; } public void disapateHeat(float energy) { // TODO: Scale for mass temperature = Math.max(temperature - energy, 0); } public void accumlateHeat(float energy) { // TODO: Scale for mass temperature += energy; } }
Cleanup if
core/src/com/mygdx/game/AbstractBodyData.java
Cleanup if
<ide><path>ore/src/com/mygdx/game/AbstractBodyData.java <ide> <ide> @Override <ide> public void receiveEmission(Emission emission) { <del> // TODO: Take duration into account, push into emission? <del> if (!accumlateEmissions) <add> // TODO: Take duration into account?, push into emission? <add> this.accumlateHeat(emission.power / EMISSION_UNITS_PER_HEAT); <add> if (!accumlateEmissions) { <ide> return; <del> this.accumlateHeat(emission.power / EMISSION_UNITS_PER_HEAT); <add> } <ide> receivedEmissions.add(emission); <ide> // TODO: Cause actual damage <ide> }
Java
apache-2.0
error: pathspec 'foo-collection/src/main/java/com/github/foo/Trees.java' did not match any file(s) known to git
cb38fd222b4d1124a20f64710df34831885fb6ba
1
gauravrmazra/ds-algorithms
/** * */ package com.github.foo; /** * Class consists static methods that operates on * Tree. * <p>The methods of this class all throw a <tt>NullPointerException</tt> * if the collections or class objects provided to them are null.</p> * * @author Sachin * @author Gaurav Rai Mazra */ public class Trees { }
foo-collection/src/main/java/com/github/foo/Trees.java
Trees class consisting static methods to operate of Tree
foo-collection/src/main/java/com/github/foo/Trees.java
Trees class consisting static methods to operate of Tree
<ide><path>oo-collection/src/main/java/com/github/foo/Trees.java <add>/** <add> * <add> */ <add>package com.github.foo; <add> <add>/** <add> * Class consists static methods that operates on <add> * Tree. <add> * <p>The methods of this class all throw a <tt>NullPointerException</tt> <add> * if the collections or class objects provided to them are null.</p> <add> * <add> * @author Sachin <add> * @author Gaurav Rai Mazra <add> */ <add>public class Trees { <add> <add> <add>}
Java
apache-2.0
0272c8ba4d23e638c076521b805e0ff3586d1308
0
sidorovis/stsc.fundamental.analysis
package stsc.fundamental.analysis; import java.io.IOException; import java.nio.file.Path; import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import stsc.algorithms.fundamental.analysis.statistics.eod.LeftToRightMovingPearsonCorrelation; import stsc.common.BadSignalException; import stsc.common.FromToPeriod; import stsc.common.algorithms.BadAlgorithmException; import stsc.common.signals.SerieSignal; import stsc.common.signals.SignalContainer; import stsc.common.storage.SignalsStorage; import stsc.common.storage.StockStorage; import stsc.general.simulator.Simulator; import stsc.general.simulator.SimulatorImpl; import stsc.general.simulator.SimulatorConfiguration; import stsc.general.simulator.SimulatorConfigurationImpl; import stsc.general.trading.TradeProcessorInit; import stsc.signals.MapKeyPairToDoubleSignal; import stsc.signals.commons.KeyPair; import stsc.stocks.indexes.CountryMarketIndex; import stsc.stocks.indexes.GlobalMarketIndex; import stsc.stocks.indexes.MarketIndex; import stsc.stocks.indexes.RegionMarketIndex; import stsc.stocks.repo.MetaIndicesRepository; import stsc.stocks.repo.MetaIndicesRepositoryIncodeImpl; import stsc.yahoo.YahooDatafeedSettings; import stsc.yahoo.YahooFileStockStorage; /** * This application calculate correlations between different indexes from {@link MetaIndicesRepository}. */ public final class CorrelationCalculator { public static final String DATA_FOLDER = "./data/"; public static final String FILTER_DATA_FOLDER = "./filtered_data/"; private final MetaIndicesRepository metaIndicesRepository; private final StockStorage stockStorage; private int id = 0; public CorrelationCalculator(final CorrelationCalculatorSettings settings, final MetaIndicesRepository metaIndicesRepository) throws IOException, InterruptedException, BadAlgorithmException, BadSignalException, ParseException { this.metaIndicesRepository = metaIndicesRepository; final Path dataFolder = settings.getDatafeedFolder().resolve(DATA_FOLDER); final Path filteredDataFolder = settings.getDatafeedFolder().resolve(FILTER_DATA_FOLDER); this.stockStorage = new YahooFileStockStorage(new YahooDatafeedSettings(dataFolder, filteredDataFolder), true).waitForBackgroundProcess(); calculate(); } private void calculate() throws BadAlgorithmException, BadSignalException, ParseException { final Map<KeyPair, Double> cc = getCorrelationCoefficient(); for (Entry<KeyPair, Double> e : cc.entrySet()) { System.out.print(e + " ||| "); findType(e.getKey().getLeft()); System.out.print("- "); findType(e.getKey().getRight()); System.out.println(); } } private void findType(final String instrumentName) { final int leftCountryIndex = Collections.binarySearch(metaIndicesRepository.getCountryMarketIndices(), CountryMarketIndex.createForSearch(instrumentName)); if (leftCountryIndex >= 0) { final CountryMarketIndex index = metaIndicesRepository.getCountryMarketIndices().get(leftCountryIndex); System.out.print(index.getFilesystemName() + " (" + index.getCountry().name() + ") "); return; } final int leftRegionIndex = Collections.binarySearch(metaIndicesRepository.getRegionMarketIndices(), RegionMarketIndex.createForSearch(instrumentName)); if (leftRegionIndex >= 0) { final RegionMarketIndex index = metaIndicesRepository.getRegionMarketIndices().get(leftRegionIndex); System.out.print(index.getWorldSector().name() + " "); return; } final int leftGlobalIndex = Collections.binarySearch(metaIndicesRepository.getGlobalMarketIndices(), GlobalMarketIndex.createForSearch(instrumentName)); if (leftGlobalIndex >= 0) { System.out.print("GL "); return; } } private <T extends MarketIndex<T>> String joinForParameter(Collection<T> col) { String r = ""; for (MarketIndex<T> s : col) { r += s.getInstrumentName() + "|"; } return r; } private Map<KeyPair, Double> getCorrelationCoefficient() throws BadAlgorithmException, ParseException, BadSignalException { final String executionName = "correlation"; final String leftElements = "spy|^n225|^ftse|^ixic|msci|efa"; final String rightElements = joinForParameter(metaIndicesRepository.getCountryMarketIndices()); final TradeProcessorInit tradeProcessorInit = new TradeProcessorInit(stockStorage, new FromToPeriod("01-01-1900", "01-01-2100"), // "EodExecutions = " + executionName + "\n" + // executionName + ".loadLine = ." + LeftToRightMovingPearsonCorrelation.class.getSimpleName() + // "(size=10000i, N=104i, " + // "LE=" + leftElements + ", " + // "RE=" + rightElements + ")\n"); final SimulatorConfiguration simulatorSettings = new SimulatorConfigurationImpl(id++, tradeProcessorInit); final Simulator simulator = new SimulatorImpl(); simulator.simulateMarketTrading(simulatorSettings); final SignalsStorage signalsStorage = simulator.getSignalsStorage(); final int size = signalsStorage.getIndexSize(executionName); final Map<KeyPair, Double> result = new TreeMap<KeyPair, Double>(); collectData(executionName, signalsStorage, size, result); return result; } private void collectData(final String executionName, final SignalsStorage signalsStorage, final int size, final Map<KeyPair, Double> result) { if (size > 0) { for (int i = size - 1; i >= 0; --i) { final SignalContainer<? extends SerieSignal> sc = signalsStorage.getEodSignal(executionName, i); if (!sc.isPresent()) { continue; } final Map<KeyPair, Double> v = sc.getContent(MapKeyPairToDoubleSignal.class).getValues(); for (Entry<KeyPair, Double> e : v.entrySet()) { if (!result.containsKey(e.getKey())) { result.put(e.getKey(), e.getValue()); } } } } } public static void main(final String[] args) { try { final CorrelationCalculatorSettings settings = new CorrelationCalculatorSettings(args); new CorrelationCalculator(settings, new MetaIndicesRepositoryIncodeImpl()); } catch (final Exception e) { e.printStackTrace(); } } }
src/main/java/stsc/fundamental/analysis/CorrelationCalculator.java
package stsc.fundamental.analysis; import java.io.IOException; import java.nio.file.Path; import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import stsc.algorithms.fundamental.analysis.statistics.eod.LeftToRightMovingPearsonCorrelation; import stsc.common.BadSignalException; import stsc.common.FromToPeriod; import stsc.common.algorithms.BadAlgorithmException; import stsc.common.signals.SerieSignal; import stsc.common.signals.SignalContainer; import stsc.common.storage.SignalsStorage; import stsc.common.storage.StockStorage; import stsc.general.simulator.Simulator; import stsc.general.simulator.SimulatorImpl; import stsc.general.simulator.SimulatorConfiguration; import stsc.general.simulator.SimulatorConfigurationImpl; import stsc.general.trading.TradeProcessorInit; import stsc.signals.MapKeyPairToDoubleSignal; import stsc.signals.commons.KeyPair; import stsc.stocks.indexes.CountryMarketIndex; import stsc.stocks.indexes.GlobalMarketIndex; import stsc.stocks.indexes.MarketIndex; import stsc.stocks.indexes.RegionMarketIndex; import stsc.stocks.repo.MetaIndicesRepository; import stsc.stocks.repo.MetaIndicesRepositoryIncodeImpl; import stsc.yahoo.YahooDatafeedSettings; import stsc.yahoo.YahooFileStockStorage; /** * This application calculate correlations between different indexes from {@link MetaIndicesRepository}. */ public final class CorrelationCalculator { public static final String DATA_FOLDER = "./data/"; public static final String FILTER_DATA_FOLDER = "./filtered_data/"; private final MetaIndicesRepository metaIndicesRepository; private final StockStorage stockStorage; private long id = 0; public CorrelationCalculator(final CorrelationCalculatorSettings settings, final MetaIndicesRepository metaIndicesRepository) throws IOException, InterruptedException, BadAlgorithmException, BadSignalException, ParseException { this.metaIndicesRepository = metaIndicesRepository; final Path dataFolder = settings.getDatafeedFolder().resolve(DATA_FOLDER); final Path filteredDataFolder = settings.getDatafeedFolder().resolve(FILTER_DATA_FOLDER); this.stockStorage = new YahooFileStockStorage(new YahooDatafeedSettings(dataFolder, filteredDataFolder), true).waitForBackgroundProcess(); calculate(); } private void calculate() throws BadAlgorithmException, BadSignalException, ParseException { final Map<KeyPair, Double> cc = getCorrelationCoefficient(); for (Entry<KeyPair, Double> e : cc.entrySet()) { System.out.print(e + " ||| "); findType(e.getKey().getLeft()); System.out.print("- "); findType(e.getKey().getRight()); System.out.println(); } } private void findType(final String instrumentName) { final int leftCountryIndex = Collections.binarySearch(metaIndicesRepository.getCountryMarketIndices(), CountryMarketIndex.createForSearch(instrumentName)); if (leftCountryIndex >= 0) { final CountryMarketIndex index = metaIndicesRepository.getCountryMarketIndices().get(leftCountryIndex); System.out.print(index.getFilesystemName() + " (" + index.getCountry().name() + ") "); return; } final int leftRegionIndex = Collections.binarySearch(metaIndicesRepository.getRegionMarketIndices(), RegionMarketIndex.createForSearch(instrumentName)); if (leftRegionIndex >= 0) { final RegionMarketIndex index = metaIndicesRepository.getRegionMarketIndices().get(leftRegionIndex); System.out.print(index.getWorldSector().name() + " "); return; } final int leftGlobalIndex = Collections.binarySearch(metaIndicesRepository.getGlobalMarketIndices(), GlobalMarketIndex.createForSearch(instrumentName)); if (leftGlobalIndex >= 0) { System.out.print("GL "); return; } } private <T extends MarketIndex<T>> String joinForParameter(Collection<T> col) { String r = ""; for (MarketIndex<T> s : col) { r += s.getInstrumentName() + "|"; } return r; } private Map<KeyPair, Double> getCorrelationCoefficient() throws BadAlgorithmException, ParseException, BadSignalException { final String executionName = "correlation"; final String leftElements = "spy|^n225|^ftse|^ixic|msci|efa"; final String rightElements = joinForParameter(metaIndicesRepository.getCountryMarketIndices()); final TradeProcessorInit tradeProcessorInit = new TradeProcessorInit(stockStorage, new FromToPeriod("01-01-1900", "01-01-2100"), // "EodExecutions = " + executionName + "\n" + // executionName + ".loadLine = ." + LeftToRightMovingPearsonCorrelation.class.getSimpleName() + // "(size=10000i, N=104i, " + // "LE=" + leftElements + ", " + // "RE=" + rightElements + ")\n"); final SimulatorConfiguration simulatorSettings = new SimulatorConfigurationImpl(id++, tradeProcessorInit); final Simulator simulator = new SimulatorImpl(); simulator.simulateMarketTrading(simulatorSettings); final SignalsStorage signalsStorage = simulator.getSignalsStorage(); final int size = signalsStorage.getIndexSize(executionName); final Map<KeyPair, Double> result = new TreeMap<KeyPair, Double>(); collectData(executionName, signalsStorage, size, result); return result; } private void collectData(final String executionName, final SignalsStorage signalsStorage, final int size, final Map<KeyPair, Double> result) { if (size > 0) { for (int i = size - 1; i >= 0; --i) { final SignalContainer<? extends SerieSignal> sc = signalsStorage.getEodSignal(executionName, i); if (!sc.isPresent()) { continue; } final Map<KeyPair, Double> v = sc.getContent(MapKeyPairToDoubleSignal.class).getValues(); for (Entry<KeyPair, Double> e : v.entrySet()) { if (!result.containsKey(e.getKey())) { result.put(e.getKey(), e.getValue()); } } } } } public static void main(final String[] args) { try { final CorrelationCalculatorSettings settings = new CorrelationCalculatorSettings(args); new CorrelationCalculator(settings, new MetaIndicesRepositoryIncodeImpl()); } catch (final Exception e) { e.printStackTrace(); } } }
added int id for CorrelationCalculator
src/main/java/stsc/fundamental/analysis/CorrelationCalculator.java
added int id for CorrelationCalculator
<ide><path>rc/main/java/stsc/fundamental/analysis/CorrelationCalculator.java <ide> <ide> private final MetaIndicesRepository metaIndicesRepository; <ide> private final StockStorage stockStorage; <del> private long id = 0; <add> private int id = 0; <ide> <ide> public CorrelationCalculator(final CorrelationCalculatorSettings settings, final MetaIndicesRepository metaIndicesRepository) <ide> throws IOException, InterruptedException, BadAlgorithmException, BadSignalException, ParseException {
Java
apache-2.0
8ed4159160d1a297d0fb20beb98dc4798351bdd7
0
ieb/sling,roele/sling,awadheshv/sling,nleite/sling,tteofili/sling,ieb/sling,tmaret/sling,tyge68/sling,ist-dresden/sling,tyge68/sling,ist-dresden/sling,ffromm/sling,mmanski/sling,JEBailey/sling,awadheshv/sling,mcdan/sling,ist-dresden/sling,cleliameneghin/sling,mmanski/sling,ieb/sling,mcdan/sling,tyge68/sling,mmanski/sling,mcdan/sling,Sivaramvt/sling,trekawek/sling,tyge68/sling,sdmcraft/sling,Sivaramvt/sling,trekawek/sling,plutext/sling,headwirecom/sling,dulvac/sling,nleite/sling,ieb/sling,tteofili/sling,vladbailescu/sling,tmaret/sling,plutext/sling,trekawek/sling,nleite/sling,Nimco/sling,mikibrv/sling,JEBailey/sling,wimsymons/sling,dulvac/sling,wimsymons/sling,tyge68/sling,sdmcraft/sling,anchela/sling,anchela/sling,plutext/sling,wimsymons/sling,Nimco/sling,mmanski/sling,nleite/sling,labertasch/sling,sdmcraft/sling,roele/sling,roele/sling,anchela/sling,klcodanr/sling,tmaret/sling,tteofili/sling,mmanski/sling,Sivaramvt/sling,klcodanr/sling,dulvac/sling,Sivaramvt/sling,mcdan/sling,vladbailescu/sling,labertasch/sling,mikibrv/sling,tmaret/sling,vladbailescu/sling,awadheshv/sling,klcodanr/sling,mmanski/sling,dulvac/sling,ffromm/sling,wimsymons/sling,klcodanr/sling,cleliameneghin/sling,vladbailescu/sling,tmaret/sling,SylvesterAbreu/sling,sdmcraft/sling,mikibrv/sling,plutext/sling,gutsy/sling,cleliameneghin/sling,ist-dresden/sling,gutsy/sling,gutsy/sling,dulvac/sling,ist-dresden/sling,nleite/sling,anchela/sling,tyge68/sling,mcdan/sling,ffromm/sling,wimsymons/sling,mikibrv/sling,klcodanr/sling,SylvesterAbreu/sling,awadheshv/sling,labertasch/sling,JEBailey/sling,tteofili/sling,klcodanr/sling,Nimco/sling,awadheshv/sling,ffromm/sling,cleliameneghin/sling,vladbailescu/sling,Nimco/sling,Nimco/sling,headwirecom/sling,JEBailey/sling,mikibrv/sling,Sivaramvt/sling,sdmcraft/sling,headwirecom/sling,nleite/sling,Nimco/sling,cleliameneghin/sling,wimsymons/sling,anchela/sling,gutsy/sling,Sivaramvt/sling,SylvesterAbreu/sling,ffromm/sling,gutsy/sling,mcdan/sling,SylvesterAbreu/sling,mikibrv/sling,ieb/sling,awadheshv/sling,SylvesterAbreu/sling,labertasch/sling,trekawek/sling,headwirecom/sling,ieb/sling,ffromm/sling,plutext/sling,JEBailey/sling,roele/sling,headwirecom/sling,trekawek/sling,dulvac/sling,sdmcraft/sling,plutext/sling,tteofili/sling,tteofili/sling,trekawek/sling,gutsy/sling,roele/sling,SylvesterAbreu/sling,labertasch/sling
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.sling.hc.jmx.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; import javax.management.DynamicMBean; import javax.management.InvalidAttributeValueException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.ReflectionException; import javax.management.openmbean.CompositeDataSupport; import javax.management.openmbean.CompositeType; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.OpenMBeanAttributeInfoSupport; import javax.management.openmbean.OpenType; import javax.management.openmbean.SimpleType; import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularDataSupport; import javax.management.openmbean.TabularType; import org.apache.sling.hc.api.HealthCheck; import org.apache.sling.hc.api.Result; import org.apache.sling.hc.api.ResultLog; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; /** A {@link DynamicMBean} used to execute a {@link HealthCheck} service */ public class HealthCheckMBean implements DynamicMBean { public static final String HC_OK_ATTRIBUTE_NAME = "ok"; public static final String HC_STATUS_ATTRIBUTE_NAME = "status"; public static final String HC_LOG_ATTRIBUTE_NAME = "log"; private static CompositeType LOG_ROW_TYPE; private static TabularType LOG_TABLE_TYPE; public static final String INDEX_COLUMN = "index"; public static final String LEVEL_COLUMN = "level"; public static final String MESSAGE_COLUMN = "message"; public static final String JMX_TYPE_NAME = "HealthCheck"; public static final String JMX_DOMAIN = "org.apache.sling.healthcheck"; /** The health check service to call. */ private final HealthCheck healthCheck; /** The mbean info. */ private final MBeanInfo mbeanInfo; /** The default attributes. */ private final Map<String, Object> defaultAttributes; static { try { // Define the log row and table types LOG_ROW_TYPE = new CompositeType( "LogLine", "A line in the result log", new String [] { INDEX_COLUMN, LEVEL_COLUMN, MESSAGE_COLUMN }, new String [] { "log line index", "log level", "log message"}, new OpenType[] { SimpleType.INTEGER, SimpleType.STRING, SimpleType.STRING } ); final String [] indexes = { INDEX_COLUMN }; LOG_TABLE_TYPE = new TabularType("LogTable", "Result log messages", LOG_ROW_TYPE, indexes); } catch(Exception ignore) { // row or table type will be null if this happens } } public HealthCheckMBean(final ServiceReference ref, final HealthCheck hc) { this.healthCheck = hc; this.mbeanInfo = this.createMBeanInfo(ref); this.defaultAttributes = this.createDefaultAttributes(ref); } @Override public Object getAttribute(final String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException { // we should call getAttributes - and not vice versa to have the result // of a single check call - and not do a check call for each attribute final AttributeList result = this.getAttributes(new String[] {attribute}); if ( result.size() == 0 ) { throw new AttributeNotFoundException(attribute); } final Attribute attr = (Attribute) result.get(0); return attr.getValue(); } private TabularData logData(final Result er) throws OpenDataException { final TabularDataSupport result = new TabularDataSupport(LOG_TABLE_TYPE); int i = 1; for(final ResultLog.Entry e : er) { final Map<String, Object> data = new HashMap<String, Object>(); data.put(INDEX_COLUMN, i++); data.put(LEVEL_COLUMN, e.getStatus().toString()); data.put(MESSAGE_COLUMN, e.getMessage()); result.put(new CompositeDataSupport(LOG_ROW_TYPE, data)); } return result; } @Override public AttributeList getAttributes(final String[] attributes) { final AttributeList result = new AttributeList(); if ( attributes != null ) { Result hcResult = null; for(final String key : attributes) { final Object defaultValue = this.defaultAttributes.get(key); if ( defaultValue != null ) { result.add(new Attribute(key, defaultValue)); } else { // we assume that a valid attribute name is used // which is requesting a hc result if ( hcResult == null ) { hcResult = this.healthCheck.execute(); } if ( HC_OK_ATTRIBUTE_NAME.equals(key) ) { result.add(new Attribute(key, hcResult.isOk())); } else if ( HC_LOG_ATTRIBUTE_NAME.equals(key) ) { try { result.add(new Attribute(key, logData(hcResult))); } catch ( final OpenDataException ignore ) { // we ignore this and simply don't add the attribute } } else if ( HC_STATUS_ATTRIBUTE_NAME.equals(key) ) { result.add(new Attribute(key, hcResult.getStatus().toString())); } } } } return result; } /** * Create the mbean info */ private MBeanInfo createMBeanInfo(final ServiceReference serviceReference) { final List<MBeanAttributeInfo> attrs = new ArrayList<MBeanAttributeInfo>(); // add relevant service properties if ( serviceReference.getProperty(HealthCheck.NAME) != null ) { attrs.add(new MBeanAttributeInfo(HealthCheck.NAME, String.class.getName(), "The name of the health check service.", true, false, false)); } if ( serviceReference.getProperty(HealthCheck.TAGS) != null ) { attrs.add(new MBeanAttributeInfo(HealthCheck.TAGS, String.class.getName(), "The tags of the health check service.", true, false, false)); } if ( serviceReference.getProperty(Constants.SERVICE_PID) != null ) { attrs.add(new MBeanAttributeInfo(Constants.SERVICE_PID, String.class.getName(), "The persistence identifier of the service.", true, false, false)); } // add standard attributes attrs.add(new MBeanAttributeInfo(HC_OK_ATTRIBUTE_NAME, Boolean.class.getName(), "The health check result", true, false, false)); attrs.add(new MBeanAttributeInfo(HC_STATUS_ATTRIBUTE_NAME, String.class.getName(), "The health check status", true, false, false)); attrs.add(new OpenMBeanAttributeInfoSupport(HC_LOG_ATTRIBUTE_NAME, "The health check result log", LOG_TABLE_TYPE, true, false, false)); final String description; if ( serviceReference.getProperty(Constants.SERVICE_DESCRIPTION) != null ) { description = serviceReference.getProperty(Constants.SERVICE_DESCRIPTION).toString(); } else { description = "Health check"; } return new MBeanInfo(this.getClass().getName(), description, attrs.toArray(new MBeanAttributeInfo[attrs.size()]), null, null, null); } /** * Create the default attributes. */ private Map<String, Object> createDefaultAttributes(final ServiceReference serviceReference) { final Map<String, Object> list = new HashMap<String, Object>(); if ( serviceReference.getProperty(HealthCheck.NAME) != null ) { list.put(HealthCheck.NAME, serviceReference.getProperty(HealthCheck.NAME).toString()); } if ( serviceReference.getProperty(HealthCheck.TAGS) != null ) { final Object value = serviceReference.getProperty(HealthCheck.TAGS); if ( value instanceof String[] ) { list.put(HealthCheck.TAGS, Arrays.toString((String[])value)); } else { list.put(HealthCheck.TAGS, value.toString()); } } if ( serviceReference.getProperty(Constants.SERVICE_PID) != null ) { list.put(Constants.SERVICE_PID, serviceReference.getProperty(Constants.SERVICE_PID).toString()); } return list; } @Override public MBeanInfo getMBeanInfo() { return this.mbeanInfo; } @Override public Object invoke(final String actionName, final Object[] params, final String[] signature) throws MBeanException, ReflectionException { throw new MBeanException(new UnsupportedOperationException(getClass().getSimpleName() + " does not support operations.")); } @Override public void setAttribute(final Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException { throw new MBeanException(new UnsupportedOperationException(getClass().getSimpleName() + " does not support setting attributes.")); } @Override public AttributeList setAttributes(final AttributeList attributes) { return new AttributeList(); } @Override public String toString() { return "HealthCheckMBean [healthCheck=" + healthCheck + "]"; } }
contrib/extensions/healthcheck/jmx/src/main/java/org/apache/sling/hc/jmx/impl/HealthCheckMBean.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.sling.hc.jmx.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; import javax.management.DynamicMBean; import javax.management.InvalidAttributeValueException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanException; import javax.management.MBeanInfo; import javax.management.ReflectionException; import javax.management.openmbean.CompositeDataSupport; import javax.management.openmbean.CompositeType; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.OpenMBeanAttributeInfoSupport; import javax.management.openmbean.OpenType; import javax.management.openmbean.SimpleType; import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularDataSupport; import javax.management.openmbean.TabularType; import org.apache.sling.hc.api.HealthCheck; import org.apache.sling.hc.api.Result; import org.apache.sling.hc.api.ResultLog; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; /** A {@link DynamicMBean} used to execute a {@link HealthCheck} service */ public class HealthCheckMBean implements DynamicMBean { public static final String HC_OK_ATTRIBUTE_NAME = "ok"; public static final String HC_STATUS_ATTRIBUTE_NAME = "status"; public static final String HC_LOG_ATTRIBUTE_NAME = "log"; private static CompositeType LOG_ROW_TYPE; private static TabularType LOG_TABLE_TYPE; public static final String INDEX_COLUMN = "index"; public static final String LEVEL_COLUMN = "level"; public static final String MESSAGE_COLUMN = "message"; public static final String JMX_TYPE_NAME = "HealthCheck"; public static final String JMX_DOMAIN = "org.apache.sling.healthcheck"; /** The health check service to call. */ private final HealthCheck healthCheck; /** The mbean info. */ private final MBeanInfo mbeanInfo; /** The default attributes. */ private final Map<String, Object> defaultAttributes; static { try { // Define the log row and table types LOG_ROW_TYPE = new CompositeType( "LogLine", "A line in the result log", new String [] { INDEX_COLUMN, LEVEL_COLUMN, MESSAGE_COLUMN }, new String [] { "log line index", "log level", "log message"}, new OpenType[] { SimpleType.INTEGER, SimpleType.STRING, SimpleType.STRING } ); final String [] indexes = { INDEX_COLUMN }; LOG_TABLE_TYPE = new TabularType("LogTable", "Result log messages", LOG_ROW_TYPE, indexes); } catch(Exception ignore) { // row or table type will be null if this happens } } public HealthCheckMBean(final ServiceReference ref, final HealthCheck hc) { this.healthCheck = hc; this.mbeanInfo = this.createMBeanInfo(ref); this.defaultAttributes = this.createDefaultAttributes(ref); } @Override public Object getAttribute(final String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException { // we should call getAttributes - and not vice versa to have the result // of a single check call - and not do a check call for each attribute final AttributeList result = this.getAttributes(new String[] {attribute}); if ( result.size() == 0 ) { throw new AttributeNotFoundException(attribute); } final Attribute attr = (Attribute) result.get(0); return attr.getValue(); } private TabularData logData(final Result er) throws OpenDataException { final TabularDataSupport result = new TabularDataSupport(LOG_TABLE_TYPE); int i = 1; for(final ResultLog.Entry e : er) { final Map<String, Object> data = new HashMap<String, Object>(); data.put(INDEX_COLUMN, i++); data.put(LEVEL_COLUMN, e.getStatus().toString()); data.put(MESSAGE_COLUMN, e.getMessage()); result.put(new CompositeDataSupport(LOG_ROW_TYPE, data)); } return result; } @Override public AttributeList getAttributes(final String[] attributes) { final AttributeList result = new AttributeList(); if ( attributes != null ) { Result hcResult = null; for(final String key : attributes) { final Object defaultValue = this.defaultAttributes.get(key); if ( defaultValue != null ) { result.add(new Attribute(key, defaultValue)); } else { // we assume that a valid attribute name is used // which is requesting a hc result if ( hcResult == null ) { hcResult = this.healthCheck.execute(); } if ( HC_OK_ATTRIBUTE_NAME.equals(key) ) { result.add(new Attribute(key, hcResult.isOk())); } else if ( HC_LOG_ATTRIBUTE_NAME.equals(key) ) { try { result.add(new Attribute(key, logData(hcResult))); } catch ( final OpenDataException ignore ) { // we ignore this and simply don't add the attribute } } else if ( HC_STATUS_ATTRIBUTE_NAME.equals(key) ) { result.add(new Attribute(key, hcResult.getStatus().toString())); } } } } return result; } /** * Create the mbean info */ private MBeanInfo createMBeanInfo(final ServiceReference serviceReference) { final List<MBeanAttributeInfo> attrs = new ArrayList<MBeanAttributeInfo>(); // add relevant service properties if ( serviceReference.getProperty(HealthCheck.NAME) != null ) { attrs.add(new MBeanAttributeInfo(HealthCheck.NAME, String.class.getName(), "The name of the health check service.", true, false, false)); } if ( serviceReference.getProperty(HealthCheck.TAGS) != null ) { attrs.add(new MBeanAttributeInfo(HealthCheck.TAGS, String.class.getName(), "The tags of the health check service.", true, false, false)); } // add standard attributes attrs.add(new MBeanAttributeInfo(HC_OK_ATTRIBUTE_NAME, Boolean.class.getName(), "The health check result", true, false, false)); attrs.add(new MBeanAttributeInfo(HC_STATUS_ATTRIBUTE_NAME, String.class.getName(), "The health check status", true, false, false)); attrs.add(new OpenMBeanAttributeInfoSupport(HC_LOG_ATTRIBUTE_NAME, "The health check result log", LOG_TABLE_TYPE, true, false, false)); final String description; if ( serviceReference.getProperty(Constants.SERVICE_DESCRIPTION) != null ) { description = serviceReference.getProperty(Constants.SERVICE_DESCRIPTION).toString(); } else { description = "Health check"; } return new MBeanInfo(this.getClass().getName(), description, attrs.toArray(new MBeanAttributeInfo[attrs.size()]), null, null, null); } /** * Create the default attributes. */ private Map<String, Object> createDefaultAttributes(final ServiceReference serviceReference) { final Map<String, Object> list = new HashMap<String, Object>(); if ( serviceReference.getProperty(HealthCheck.NAME) != null ) { list.put(HealthCheck.NAME, serviceReference.getProperty(HealthCheck.NAME).toString()); } if ( serviceReference.getProperty(HealthCheck.TAGS) != null ) { final Object value = serviceReference.getProperty(HealthCheck.TAGS); if ( value instanceof String[] ) { list.put(HealthCheck.TAGS, Arrays.toString((String[])value)); } else { list.put(HealthCheck.TAGS, value.toString()); } } return list; } @Override public MBeanInfo getMBeanInfo() { return this.mbeanInfo; } @Override public Object invoke(final String actionName, final Object[] params, final String[] signature) throws MBeanException, ReflectionException { throw new MBeanException(new UnsupportedOperationException(getClass().getSimpleName() + " does not support operations.")); } @Override public void setAttribute(final Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException { throw new MBeanException(new UnsupportedOperationException(getClass().getSimpleName() + " does not support setting attributes.")); } @Override public AttributeList setAttributes(final AttributeList attributes) { return new AttributeList(); } @Override public String toString() { return "HealthCheckMBean [healthCheck=" + healthCheck + "]"; } }
Add persistence id as mbean attribute git-svn-id: 6eed74fe9a15c8da84b9a8d7f2960c0406113ece@1519581 13f79535-47bb-0310-9956-ffa450edef68
contrib/extensions/healthcheck/jmx/src/main/java/org/apache/sling/hc/jmx/impl/HealthCheckMBean.java
Add persistence id as mbean attribute
<ide><path>ontrib/extensions/healthcheck/jmx/src/main/java/org/apache/sling/hc/jmx/impl/HealthCheckMBean.java <ide> if ( serviceReference.getProperty(HealthCheck.TAGS) != null ) { <ide> attrs.add(new MBeanAttributeInfo(HealthCheck.TAGS, String.class.getName(), "The tags of the health check service.", true, false, false)); <ide> } <add> if ( serviceReference.getProperty(Constants.SERVICE_PID) != null ) { <add> attrs.add(new MBeanAttributeInfo(Constants.SERVICE_PID, String.class.getName(), "The persistence identifier of the service.", true, false, false)); <add> } <ide> <ide> // add standard attributes <ide> attrs.add(new MBeanAttributeInfo(HC_OK_ATTRIBUTE_NAME, Boolean.class.getName(), "The health check result", true, false, false)); <ide> list.put(HealthCheck.TAGS, value.toString()); <ide> } <ide> } <add> if ( serviceReference.getProperty(Constants.SERVICE_PID) != null ) { <add> list.put(Constants.SERVICE_PID, serviceReference.getProperty(Constants.SERVICE_PID).toString()); <add> } <ide> return list; <ide> } <ide>
Java
mit
df8ae3db2b93dc34fd03e577e272f18d687ecc42
0
CreaRo/dawebmail
package com.sigmobile.dawebmail.network; import android.content.Context; import android.util.Base64; import com.sigmobile.dawebmail.R; import com.sigmobile.dawebmail.database.EmailMessage; import com.sigmobile.dawebmail.database.User; import com.sigmobile.dawebmail.utils.BasePath; import com.sigmobile.dawebmail.utils.Constants; import org.json.JSONObject; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; /** * Created by rish on 7/1/16. */ public class RestAPI { private static final String TAG = "RESTAPI"; private static final int TIME_OUT = 10 * 1000; private User user; private Context context; private ArrayList<EmailMessage> allNewEmails = new ArrayList<>(); public RestAPI(User user, Context context) { this.user = user; this.context = context; } public boolean logIn() { return makeLoginRequest(); } public boolean refresh(String folder) { return handleRefreshAndLoadMoreRequest(folder, Constants.REFRESH_TYPE_REFRESH); } public boolean loadMore(String folder, int lengthToLoad) { return handleRefreshAndLoadMoreRequest(folder, Constants.REFRESH_TYPE_LOAD_MORE, lengthToLoad); } public EmailMessage fetchEmailContent(EmailMessage emailMessage) { return makeFetchRequest(emailMessage); } private boolean makeLoginRequest() { try { URL url = new URL(context.getString(R.string.rest_url_login)); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); if (conn.getResponseCode() == 200) { InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line); } in.close(); return true; } else { return false; } } catch (Exception e) { e.printStackTrace(); return false; } } private ArrayList<EmailMessage> fetchMailsOfFolder(String folder) { ArrayList<EmailMessage> parsedMails = new ArrayList<>(); URL url = null; try { if (folder.equals(Constants.INBOX)) url = new URL(context.getString(R.string.rest_url_inbox)); else if (folder.equals(Constants.SENT)) url = new URL(context.getString(R.string.rest_url_sent)); else if (folder.equals(Constants.TRASH)) url = new URL(context.getString(R.string.rest_url_trash)); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); if (conn.getResponseCode() == 200) { InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line); } in.close(); JSONObject responseObject = new JSONObject(total.toString()); for (int i = 0; i < responseObject.getJSONArray("m").length(); i++) { JSONObject webmailObject = (JSONObject) responseObject.getJSONArray("m").get(i); int contentID = Integer.parseInt(webmailObject.getString("id")); int totalAttachments = 0; String fromName = "fromName"; String fromAddress = "fromAddress"; String subject = webmailObject.getString("su"); String readUnread = Constants.WEBMAIL_READ; boolean important = false; if (webmailObject.has("f")) { if (webmailObject.getString("f").contains("u")) readUnread = Constants.WEBMAIL_UNREAD; if (webmailObject.getString("f").contains("a")) totalAttachments = 1; if (webmailObject.getString("f").contains("!")) important = true; else important = false; } String dateInMillis = webmailObject.getString("d"); for (int j = 0; j < webmailObject.getJSONArray("e").length(); j++) { JSONObject fromToObject = (JSONObject) webmailObject.getJSONArray("e").get(j); if (fromToObject.getString("t").equals("f")) { fromAddress = fromToObject.getString("a"); if (fromToObject.has("p")) fromName = fromToObject.getString("p"); else fromName = fromToObject.getString("d"); } } EmailMessage emailMessage = new EmailMessage(user.username, contentID, fromName, fromAddress, subject, dateInMillis, readUnread, "", totalAttachments, important); parsedMails.add(emailMessage); } return parsedMails; } else { return parsedMails; } } catch (Exception e) { e.printStackTrace(); return parsedMails; } } private boolean handleRefreshAndLoadMoreRequest(String folder, String refreshType, int lengthToLoad) { allNewEmails = new ArrayList<>(); if (folder.equals(Constants.INBOX)) { /** * Traverse through all stored emails, and delete those that aren't there in fetchedList */ ArrayList<EmailMessage> fetchedEmails = fetchMailsOfFolder(folder); for (EmailMessage storedEmail : EmailMessage.getAllMailsOfUser(user)) { boolean storedEmailFound = false; for (EmailMessage fetchedEmail : fetchedEmails) { if (fetchedEmail.contentID == storedEmail.contentID) { storedEmailFound = true; break; } } if (!storedEmailFound) { storedEmail.delete(); } } EmailMessage lastWebmail = EmailMessage.getLastWebmailOfUser(user); EmailMessage latestWebmail = EmailMessage.getLatestWebmailOfUser(user); int indexOfLastEmailInFetchedList = 0; int indexOfLatestEmailInFetchedList = 0; /** * Find index of latest and last webmails in the fetched list * All emails above latestEmails are ones to be saved in refresh * lengthToLoad emails below lastEmail are ones to be saved in loadmore */ for (int i = 0; i < fetchedEmails.size(); i++) { if (lastWebmail != null) if (fetchedEmails.get(i).contentID == lastWebmail.contentID) indexOfLastEmailInFetchedList = i; if (latestWebmail != null) if (fetchedEmails.get(i).contentID == latestWebmail.contentID) indexOfLatestEmailInFetchedList = i; } /** * Two cases : Refresh or Load More */ if (refreshType.equals(Constants.REFRESH_TYPE_REFRESH)) { for (int m = 0; m < indexOfLatestEmailInFetchedList; m++) { EmailMessage fetchedEmail = fetchedEmails.get(m); EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); allNewEmails.add(emailMessage); } } else if (refreshType.equals(Constants.REFRESH_TYPE_LOAD_MORE)) { /* Check if fetchedEmailSize is big enough to load lengthToLoad */ lengthToLoad = (lengthToLoad + indexOfLastEmailInFetchedList) <= (fetchedEmails.size()) ? (lengthToLoad) : (fetchedEmails.size() - indexOfLastEmailInFetchedList); for (int m = indexOfLastEmailInFetchedList; m < indexOfLastEmailInFetchedList + lengthToLoad; m++) { EmailMessage fetchedEmail = fetchedEmails.get(m); EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); allNewEmails.add(emailMessage); } } } else { allNewEmails.addAll(fetchMailsOfFolder(folder)); } return false; } private boolean handleRefreshAndLoadMoreRequest(String folder, String refreshType) { return handleRefreshAndLoadMoreRequest(folder, refreshType, Integer.MAX_VALUE); } private EmailMessage makeFetchRequest(EmailMessage emailMessage) { try { URL url = new URL(context.getString(R.string.rest_url_view_webmail) + emailMessage.contentID); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); if (conn.getResponseCode() == 200) { InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line + "\n"); } in.close(); writeStringAsFile(context, total.toString()); MailParser mailParser = new MailParser(); mailParser.newMailParser(context, emailMessage.contentID, total.toString()); emailMessage.content = mailParser.getContentHTML(); emailMessage.totalAttachments = mailParser.getTotalAttachments(); return emailMessage; } else { return null; } } catch (Exception e) { e.printStackTrace(); return null; } } public static void writeStringAsFile(Context context, final String fileContents) { try { FileWriter out = new FileWriter(new File(BasePath.getBasePath(context), "email.txt")); out.write(fileContents); out.close(); } catch (IOException e) { } } public ArrayList<EmailMessage> getNewEmails() { return allNewEmails; } }
app/src/main/java/com/sigmobile/dawebmail/network/RestAPI.java
package com.sigmobile.dawebmail.network; import android.content.Context; import android.util.Base64; import android.util.Log; import com.sigmobile.dawebmail.R; import com.sigmobile.dawebmail.database.EmailMessage; import com.sigmobile.dawebmail.database.User; import com.sigmobile.dawebmail.utils.BasePath; import com.sigmobile.dawebmail.utils.Constants; import org.json.JSONObject; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; /** * Created by rish on 7/1/16. */ public class RestAPI { private static final String TAG = "RESTAPI"; private static final int TIME_OUT = 10 * 1000; private User user; private Context context; private ArrayList<EmailMessage> allNewEmails = new ArrayList<>(); public RestAPI(User user, Context context) { this.user = user; this.context = context; } public boolean logIn() { return makeLoginRequest(); } public boolean refresh(String folder) { return handleRefreshAndLoadMoreRequest(folder, Constants.REFRESH_TYPE_REFRESH); } public boolean loadMore(String folder, int lengthToLoad) { return handleRefreshAndLoadMoreRequest(folder, Constants.REFRESH_TYPE_LOAD_MORE, lengthToLoad); } public EmailMessage fetchEmailContent(EmailMessage emailMessage) { return makeFetchRequest(emailMessage); } private boolean makeLoginRequest() { try { URL url = new URL(context.getString(R.string.rest_url_login)); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); Log.d(TAG, "Response Code: " + conn.getResponseCode()); if (conn.getResponseCode() == 200) { Log.d(TAG, "Authenticated User Successfully"); InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line); } Log.d(TAG, "" + total.toString()); in.close(); return true; } else { Log.d(TAG, "Unable to Authenticate User"); return false; } } catch (Exception e) { e.printStackTrace(); return false; } } private ArrayList<EmailMessage> fetchMailsOfFolder(String folder) { ArrayList<EmailMessage> parsedMails = new ArrayList<>(); URL url = null; try { if (folder.equals(Constants.INBOX)) url = new URL(context.getString(R.string.rest_url_inbox)); else if (folder.equals(Constants.SENT)) url = new URL(context.getString(R.string.rest_url_sent)); else if (folder.equals(Constants.TRASH)) url = new URL(context.getString(R.string.rest_url_trash)); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); if (conn.getResponseCode() == 200) { InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line); } in.close(); JSONObject responseObject = new JSONObject(total.toString()); for (int i = 0; i < responseObject.getJSONArray("m").length(); i++) { JSONObject webmailObject = (JSONObject) responseObject.getJSONArray("m").get(i); int contentID = Integer.parseInt(webmailObject.getString("id")); int totalAttachments = 0; String fromName = "fromName"; String fromAddress = "fromAddress"; String subject = webmailObject.getString("su"); String readUnread = Constants.WEBMAIL_READ; boolean important = false; if (webmailObject.has("f")) { if (webmailObject.getString("f").contains("u")) readUnread = Constants.WEBMAIL_UNREAD; if (webmailObject.getString("f").contains("a")) totalAttachments = 1; if (webmailObject.getString("f").contains("!")) important = true; else important = false; } String dateInMillis = webmailObject.getString("d"); for (int j = 0; j < webmailObject.getJSONArray("e").length(); j++) { JSONObject fromToObject = (JSONObject) webmailObject.getJSONArray("e").get(j); if (fromToObject.getString("t").equals("f")) { fromAddress = fromToObject.getString("a"); if (fromToObject.has("p")) fromName = fromToObject.getString("p"); else fromName = fromToObject.getString("d"); } } EmailMessage emailMessage = new EmailMessage(user.username, contentID, fromName, fromAddress, subject, dateInMillis, readUnread, "", totalAttachments, important); parsedMails.add(emailMessage); } return parsedMails; } else { Log.d(TAG, "Unable to Authenticate User"); return parsedMails; } } catch (Exception e) { e.printStackTrace(); return parsedMails; } } private boolean handleRefreshAndLoadMoreRequest(String folder, String refreshType, int lengthToLoad) { allNewEmails = new ArrayList<>(); if (folder.equals(Constants.INBOX)) { /** * Traverse through all stored emails, and delete those that aren't there in fetchedList */ ArrayList<EmailMessage> fetchedEmails = fetchMailsOfFolder(folder); for (EmailMessage storedEmail : EmailMessage.getAllMailsOfUser(user)) { boolean storedEmailFound = false; for (EmailMessage fetchedEmail : fetchedEmails) { if (fetchedEmail.contentID == storedEmail.contentID) { storedEmailFound = true; break; } } if (!storedEmailFound) { Log.d(TAG, "Not found, deleted"); storedEmail.delete(); } } EmailMessage lastWebmail = EmailMessage.getLastWebmailOfUser(user); EmailMessage latestWebmail = EmailMessage.getLatestWebmailOfUser(user); int indexOfLastEmailInFetchedList = 0; int indexOfLatestEmailInFetchedList = 0; /** * Find index of latest and last webmails in the fetched list * All emails above latestEmails are ones to be saved in refresh * lengthToLoad emails below lastEmail are ones to be saved in loadmore */ for (int i = 0; i < fetchedEmails.size(); i++) { if (lastWebmail != null) if (fetchedEmails.get(i).contentID == lastWebmail.contentID) indexOfLastEmailInFetchedList = i; if (latestWebmail != null) if (fetchedEmails.get(i).contentID == latestWebmail.contentID) indexOfLatestEmailInFetchedList = i; } Log.d(TAG, "indexOfLastEmailInFetchedList" + indexOfLastEmailInFetchedList + " indexOfLatestEmailInFetchedList " + indexOfLatestEmailInFetchedList); /** * Two cases : Refresh or Load More */ if (refreshType.equals(Constants.REFRESH_TYPE_REFRESH)) { Log.d(TAG, "Type refresh"); for (int m = 0; m < indexOfLatestEmailInFetchedList; m++) { EmailMessage fetchedEmail = fetchedEmails.get(m); EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); allNewEmails.add(emailMessage); } } else if (refreshType.equals(Constants.REFRESH_TYPE_LOAD_MORE)) { /* Check if fetchedEmailSize is big enough to load lengthToLoad */ lengthToLoad = (lengthToLoad + indexOfLastEmailInFetchedList) <= (fetchedEmails.size()) ? (lengthToLoad) : (fetchedEmails.size() - indexOfLastEmailInFetchedList); Log.d(TAG, "Length to load is " + lengthToLoad + " starting from " + indexOfLastEmailInFetchedList); for (int m = indexOfLastEmailInFetchedList; m < indexOfLastEmailInFetchedList + lengthToLoad; m++) { EmailMessage fetchedEmail = fetchedEmails.get(m); EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); allNewEmails.add(emailMessage); } } } else { allNewEmails.addAll(fetchMailsOfFolder(folder)); } return false; } private boolean handleRefreshAndLoadMoreRequest(String folder, String refreshType) { return handleRefreshAndLoadMoreRequest(folder, refreshType, Integer.MAX_VALUE); } private EmailMessage makeFetchRequest(EmailMessage emailMessage) { try { URL url = new URL(context.getString(R.string.rest_url_view_webmail) + emailMessage.contentID); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); String userPassword = user.username + ":" + user.password; String encoding = Base64.encodeToString(userPassword.getBytes(), Base64.DEFAULT); conn.setRequestProperty("Authorization", "Basic " + encoding); conn.setReadTimeout(TIME_OUT); conn.connect(); if (conn.getResponseCode() == 200) { InputStream in = new BufferedInputStream(conn.getInputStream()); BufferedReader r = new BufferedReader(new InputStreamReader(in)); StringBuilder total = new StringBuilder(); String line; while ((line = r.readLine()) != null) { total.append(line + "\n"); } in.close(); writeStringAsFile(context, total.toString()); MailParser mailParser = new MailParser(); mailParser.newMailParser(context, emailMessage.contentID, total.toString()); emailMessage.content = mailParser.getContentHTML(); emailMessage.totalAttachments = mailParser.getTotalAttachments(); return emailMessage; } else { Log.d(TAG, "Unable to Authenticate User"); return null; } } catch (Exception e) { e.printStackTrace(); return null; } } public static void writeStringAsFile(Context context, final String fileContents) { try { FileWriter out = new FileWriter(new File(BasePath.getBasePath(context), "email.txt")); out.write(fileContents); out.close(); } catch (IOException e) { } } public ArrayList<EmailMessage> getNewEmails() { return allNewEmails; } }
Lazy loading works 100%
app/src/main/java/com/sigmobile/dawebmail/network/RestAPI.java
Lazy loading works 100%
<ide><path>pp/src/main/java/com/sigmobile/dawebmail/network/RestAPI.java <ide> <ide> import android.content.Context; <ide> import android.util.Base64; <del>import android.util.Log; <ide> <ide> import com.sigmobile.dawebmail.R; <ide> import com.sigmobile.dawebmail.database.EmailMessage; <ide> conn.setReadTimeout(TIME_OUT); <ide> conn.connect(); <ide> <del> Log.d(TAG, "Response Code: " + conn.getResponseCode()); <ide> if (conn.getResponseCode() == 200) { <del> Log.d(TAG, "Authenticated User Successfully"); <ide> InputStream in = new BufferedInputStream(conn.getInputStream()); <ide> BufferedReader r = new BufferedReader(new InputStreamReader(in)); <ide> StringBuilder total = new StringBuilder(); <ide> while ((line = r.readLine()) != null) { <ide> total.append(line); <ide> } <del> Log.d(TAG, "" + total.toString()); <ide> in.close(); <ide> return true; <ide> } else { <del> Log.d(TAG, "Unable to Authenticate User"); <ide> return false; <ide> } <ide> } catch (Exception e) { <ide> } <ide> return parsedMails; <ide> } else { <del> Log.d(TAG, "Unable to Authenticate User"); <ide> return parsedMails; <ide> } <ide> } catch (Exception e) { <ide> } <ide> } <ide> if (!storedEmailFound) { <del> Log.d(TAG, "Not found, deleted"); <ide> storedEmail.delete(); <ide> } <ide> } <ide> indexOfLatestEmailInFetchedList = i; <ide> } <ide> <del> Log.d(TAG, "indexOfLastEmailInFetchedList" + indexOfLastEmailInFetchedList + " indexOfLatestEmailInFetchedList " + indexOfLatestEmailInFetchedList); <del> <ide> /** <ide> * Two cases : Refresh or Load More <ide> */ <ide> if (refreshType.equals(Constants.REFRESH_TYPE_REFRESH)) { <del> Log.d(TAG, "Type refresh"); <ide> for (int m = 0; m < indexOfLatestEmailInFetchedList; m++) { <ide> EmailMessage fetchedEmail = fetchedEmails.get(m); <ide> EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); <ide> } else if (refreshType.equals(Constants.REFRESH_TYPE_LOAD_MORE)) { <ide> /* Check if fetchedEmailSize is big enough to load lengthToLoad */ <ide> lengthToLoad = (lengthToLoad + indexOfLastEmailInFetchedList) <= (fetchedEmails.size()) ? (lengthToLoad) : (fetchedEmails.size() - indexOfLastEmailInFetchedList); <del> Log.d(TAG, "Length to load is " + lengthToLoad + " starting from " + indexOfLastEmailInFetchedList); <ide> for (int m = indexOfLastEmailInFetchedList; m < indexOfLastEmailInFetchedList + lengthToLoad; m++) { <ide> EmailMessage fetchedEmail = fetchedEmails.get(m); <ide> EmailMessage emailMessage = EmailMessage.saveNewEmailMessage(user, fetchedEmail.contentID, fetchedEmail.fromName, fetchedEmail.fromAddress, fetchedEmail.subject, fetchedEmail.dateInMillis, fetchedEmail.readUnread, fetchedEmail.totalAttachments, fetchedEmail.important); <ide> <ide> return emailMessage; <ide> } else { <del> Log.d(TAG, "Unable to Authenticate User"); <ide> return null; <ide> } <ide> } catch (Exception e) {
Java
apache-2.0
bb7fca99d71e95249cecfe1929d7fd9c0638194c
0
gurbuzali/hazelcast-jet,gurbuzali/hazelcast-jet
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl.connector; import com.hazelcast.cache.ICache; import com.hazelcast.cache.journal.EventJournalCacheEvent; import com.hazelcast.config.CacheSimpleConfig; import com.hazelcast.config.Config; import com.hazelcast.config.EventJournalConfig; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.Job; import com.hazelcast.jet.config.JetConfig; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.core.JetTestSupport; import com.hazelcast.jet.core.Vertex; import com.hazelcast.jet.core.processor.SourceProcessors; import com.hazelcast.jet.IListJet; import com.hazelcast.jet.IMapJet; import com.hazelcast.jet.ICacheJet; import com.hazelcast.jet.JetCacheManager; import com.hazelcast.map.journal.EventJournalMapEvent; import com.hazelcast.projection.Projections; import com.hazelcast.query.Predicates; import com.hazelcast.query.TruePredicate; import com.hazelcast.test.HazelcastParallelClassRunner; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Map; import java.util.Map.Entry; import java.util.stream.IntStream; import static com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST; import static com.hazelcast.jet.Util.mapPutEvents; import static com.hazelcast.jet.core.Edge.between; import static com.hazelcast.jet.core.WatermarkEmissionPolicy.suppressDuplicates; import static com.hazelcast.jet.core.WatermarkGenerationParams.noWatermarks; import static com.hazelcast.jet.core.WatermarkGenerationParams.wmGenParams; import static com.hazelcast.jet.core.WatermarkPolicies.limitingLag; import static com.hazelcast.jet.core.processor.SinkProcessors.writeCacheP; import static com.hazelcast.jet.core.processor.SinkProcessors.writeListP; import static com.hazelcast.jet.core.processor.SinkProcessors.writeMapP; import static com.hazelcast.jet.core.processor.SourceProcessors.readCacheP; import static com.hazelcast.jet.core.processor.SourceProcessors.readListP; import static com.hazelcast.jet.core.processor.SourceProcessors.readMapP; import static com.hazelcast.jet.core.processor.SourceProcessors.streamCacheP; import static com.hazelcast.jet.core.processor.SourceProcessors.streamMapP; import static com.hazelcast.query.impl.predicates.PredicateTestUtils.entry; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(HazelcastParallelClassRunner.class) public class HazelcastConnectorTest extends JetTestSupport { private static final int ENTRY_COUNT = 100; private JetInstance jetInstance; private String sourceName; private String sinkName; private String streamSourceName; private String streamSinkName; @Before public void setup() { JetConfig jetConfig = new JetConfig(); Config hazelcastConfig = jetConfig.getHazelcastConfig(); hazelcastConfig.addCacheConfig(new CacheSimpleConfig().setName("*")); hazelcastConfig.addEventJournalConfig(new EventJournalConfig().setCacheName("stream*").setMapName("stream*")); jetInstance = createJetMember(jetConfig); JetInstance jetInstance2 = createJetMember(jetConfig); sourceName = randomString(); sinkName = randomString(); streamSourceName = "stream" + sourceName; streamSinkName = "stream" + sinkName; // workaround for `cache is not created` exception, create cache locally on all nodes JetCacheManager cacheManager = jetInstance2.getCacheManager(); cacheManager.getCache(sourceName); cacheManager.getCache(sinkName); cacheManager.getCache(streamSourceName); cacheManager.getCache(streamSinkName); } @Test public void when_readMap_and_writeMap() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName)); Vertex sink = dag.newVertex("sink", writeMapP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getMap(sinkName).size()); } @Test public void when_readMap_withNativePredicateAndProjection() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, Predicates.greaterThan("this", "0"), Projections.singleAttribute("value") ) ); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); IListJet<Object> list = jetInstance.getList(sinkName); assertEquals(ENTRY_COUNT - 1, list.size()); for (int i = 0; i < ENTRY_COUNT; i++) { assertEquals(i != 0, list.contains(i)); } } @Test public void when_readMap_withProjectionToNull_then_nullsSkipped() { IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, new TruePredicate<>(), Projections.singleAttribute("value") )); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); checkContents_projectedToNull(sinkName); } public void checkContents_projectedToNull(String sinkName) { assertEquals( IntStream.range(0, ENTRY_COUNT) .filter(i -> i % 2 != 0) .mapToObj(String::valueOf) .sorted() .collect(joining("\n")), jetInstance.getHazelcastInstance().<String>getList(sinkName).stream() .sorted() .collect(joining("\n"))); } @Test public void when_readMap_withPredicateAndDistributedFunction() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, e -> !e.getKey().equals(0), Map.Entry::getKey)); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); IListJet<Object> list = jetInstance.getList(sinkName); assertEquals(ENTRY_COUNT - 1, list.size()); assertFalse(list.contains(0)); assertTrue(list.contains(1)); } @Test public void when_streamMap() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamMapP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); assertSizeEventually(ENTRY_COUNT, jetInstance.getList(streamSinkName)); job.cancel(); } @Test public void when_streamMap_withProjectionToNull_then_nullsSkipped() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.streamMapP(streamSourceName, mapPutEvents(), (EventJournalMapEvent<Integer, Entry<Integer, String>> entry) -> entry.getNewValue().getValue(), START_FROM_OLDEST, noWatermarks())); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); assertTrueEventually(() -> checkContents_projectedToNull(streamSinkName), 10); job.cancel(); } @Test public void when_streamMap_withFilterAndProjection() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamMapP(streamSourceName, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName)); assertFalse(jetInstance.getList(streamSinkName).contains(0)); assertTrue(jetInstance.getList(streamSinkName).contains(1)); job.cancel(); } @Test public void when_readCache_and_writeCache() { ICache<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readCacheP(sourceName)); Vertex sink = dag.newVertex("sink", writeCacheP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getCacheManager().getCache(sinkName).size()); } @Test public void when_streamCache() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); assertSizeEventually(ENTRY_COUNT, jetInstance.getList(streamSinkName)); job.cancel(); } @Test public void when_streamCache_withFilterAndProjection() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamCacheP(streamSourceName, event -> !event.getKey().equals(0), EventJournalCacheEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName)); assertFalse(jetInstance.getList(streamSinkName).contains(0)); assertTrue(jetInstance.getList(streamSinkName).contains(1)); job.cancel(); } @Test public void when_readList_and_writeList() { IListJet<Integer> list = jetInstance.getList(sourceName); list.addAll(range(0, ENTRY_COUNT).boxed().collect(toList())); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readListP(sourceName)).localParallelism(1); Vertex sink = dag.newVertex("sink", writeListP(sinkName)).localParallelism(1); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getList(sinkName).size()); } @Test public void test_defaultFilter_mapJournal() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamMapP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); sourceMap.put(1, 1); // ADDED sourceMap.remove(1); // REMOVED - filtered out sourceMap.put(1, 2); // ADDED IListJet<Entry<Integer, Integer>> sinkList = jetInstance.getList(streamSinkName); assertTrueEventually(() -> { assertEquals(2, sinkList.size()); Entry<Integer, Integer> e = sinkList.get(0); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(1), e.getValue()); e = sinkList.get(1); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(2), e.getValue()); }, 10); job.cancel(); } @Test public void test_defaultFilter_cacheJournal() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Object, Object> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); sourceCache.put(1, 1); // ADDED sourceCache.remove(1); // REMOVED - filtered out sourceCache.put(1, 2); // UPDATED IListJet<Entry<Integer, Integer>> sinkList = jetInstance.getList(streamSinkName); assertTrueEventually(() -> { assertEquals(2, sinkList.size()); Entry<Integer, Integer> e = sinkList.get(0); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(1), e.getValue()); e = sinkList.get(1); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(2), e.getValue()); }, 10); job.cancel(); } }
hazelcast-jet-core/src/test/java/com/hazelcast/jet/impl/connector/HazelcastConnectorTest.java
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.impl.connector; import com.hazelcast.cache.ICache; import com.hazelcast.cache.journal.EventJournalCacheEvent; import com.hazelcast.config.CacheSimpleConfig; import com.hazelcast.config.Config; import com.hazelcast.config.EventJournalConfig; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.Job; import com.hazelcast.jet.config.JetConfig; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.core.JetTestSupport; import com.hazelcast.jet.core.Vertex; import com.hazelcast.jet.core.processor.SourceProcessors; import com.hazelcast.jet.IListJet; import com.hazelcast.jet.IMapJet; import com.hazelcast.jet.ICacheJet; import com.hazelcast.jet.JetCacheManager; import com.hazelcast.map.journal.EventJournalMapEvent; import com.hazelcast.projection.Projections; import com.hazelcast.query.Predicates; import com.hazelcast.query.TruePredicate; import com.hazelcast.test.HazelcastParallelClassRunner; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Map; import java.util.Map.Entry; import java.util.stream.IntStream; import static com.hazelcast.jet.pipeline.JournalInitialPosition.START_FROM_OLDEST; import static com.hazelcast.jet.Util.mapPutEvents; import static com.hazelcast.jet.core.Edge.between; import static com.hazelcast.jet.core.WatermarkEmissionPolicy.suppressDuplicates; import static com.hazelcast.jet.core.WatermarkGenerationParams.noWatermarks; import static com.hazelcast.jet.core.WatermarkGenerationParams.wmGenParams; import static com.hazelcast.jet.core.WatermarkPolicies.limitingLag; import static com.hazelcast.jet.core.processor.SinkProcessors.writeCacheP; import static com.hazelcast.jet.core.processor.SinkProcessors.writeListP; import static com.hazelcast.jet.core.processor.SinkProcessors.writeMapP; import static com.hazelcast.jet.core.processor.SourceProcessors.readCacheP; import static com.hazelcast.jet.core.processor.SourceProcessors.readListP; import static com.hazelcast.jet.core.processor.SourceProcessors.readMapP; import static com.hazelcast.jet.core.processor.SourceProcessors.streamCacheP; import static com.hazelcast.jet.core.processor.SourceProcessors.streamMapP; import static com.hazelcast.query.impl.predicates.PredicateTestUtils.entry; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import static java.util.stream.IntStream.range; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(HazelcastParallelClassRunner.class) public class HazelcastConnectorTest extends JetTestSupport { private static final int ENTRY_COUNT = 100; private JetInstance jetInstance; private String sourceName; private String sinkName; private String streamSourceName; private String streamSinkName; @Before public void setup() { JetConfig jetConfig = new JetConfig(); Config hazelcastConfig = jetConfig.getHazelcastConfig(); hazelcastConfig.addCacheConfig(new CacheSimpleConfig().setName("*")); hazelcastConfig.addEventJournalConfig(new EventJournalConfig().setCacheName("stream*").setMapName("stream*")); jetInstance = createJetMember(jetConfig); JetInstance jetInstance2 = createJetMember(jetConfig); sourceName = randomString(); sinkName = randomString(); streamSourceName = "stream" + sourceName; streamSinkName = "stream" + sinkName; // workaround for `cache is not created` exception, create cache locally on all nodes JetCacheManager cacheManager = jetInstance2.getCacheManager(); cacheManager.getCache(sourceName); cacheManager.getCache(sinkName); cacheManager.getCache(streamSourceName); cacheManager.getCache(streamSinkName); } @Test public void when_readMap_and_writeMap() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName)); Vertex sink = dag.newVertex("sink", writeMapP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getMap(sinkName).size()); } @Test public void when_readMap_withNativePredicateAndProjection() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, Predicates.greaterThan("this", "0"), Projections.singleAttribute("value") ) ); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); IListJet<Object> list = jetInstance.getList(sinkName); assertEquals(ENTRY_COUNT - 1, list.size()); for (int i = 0; i < ENTRY_COUNT; i++) { assertEquals(i != 0, list.contains(i)); } } @Test public void when_readMap_withProjectionToNull_then_nullsSkipped() { IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, new TruePredicate<>(), Projections.singleAttribute("value") )); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); checkContents_projectedToNull(sinkName); } public void checkContents_projectedToNull(String sinkName) { assertEquals( IntStream.range(0, ENTRY_COUNT) .filter(i -> i % 2 != 0) .mapToObj(String::valueOf) .sorted() .collect(joining("\n")), jetInstance.getHazelcastInstance().<String>getList(sinkName).stream() .sorted() .collect(joining("\n"))); } @Test public void when_readMap_withPredicateAndDistributedFunction() { IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readMapP(sourceName, e -> !e.getKey().equals(0), Map.Entry::getKey)); Vertex sink = dag.newVertex("sink", writeListP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); IListJet<Object> list = jetInstance.getList(sinkName); assertEquals(ENTRY_COUNT - 1, list.size()); assertFalse(list.contains(0)); assertTrue(list.contains(1)); } @Test public void when_streamMap() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamMapP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); assertSizeEventually(ENTRY_COUNT, jetInstance.getList(streamSinkName)); job.cancel(); } @Test public void when_streamMap_withProjectionToNull_then_nullsSkipped() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.streamMapP(streamSourceName, mapPutEvents(), (EventJournalMapEvent<Integer, Entry<Integer, String>> entry) -> entry.getNewValue().getValue(), START_FROM_OLDEST, noWatermarks())); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); assertTrueEventually(() -> checkContents_projectedToNull(streamSinkName), 3); job.cancel(); } @Test public void when_streamMap_withFilterAndProjection() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamMapP(streamSourceName, event -> event.getKey() != 0, EventJournalMapEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, i)); assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName)); assertFalse(jetInstance.getList(streamSinkName).contains(0)); assertTrue(jetInstance.getList(streamSinkName).contains(1)); job.cancel(); } @Test public void when_readCache_and_writeCache() { ICache<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(sourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readCacheP(sourceName)); Vertex sink = dag.newVertex("sink", writeCacheP(sinkName)); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getCacheManager().getCache(sinkName).size()); } @Test public void when_streamCache() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); assertSizeEventually(ENTRY_COUNT, jetInstance.getList(streamSinkName)); job.cancel(); } @Test public void when_streamCache_withFilterAndProjection() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", SourceProcessors.<Integer, Integer, Integer>streamCacheP(streamSourceName, event -> !event.getKey().equals(0), EventJournalCacheEvent::getKey, START_FROM_OLDEST, wmGenParams(i -> i, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Integer, Integer> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); range(0, ENTRY_COUNT).forEach(i -> sourceCache.put(i, i)); assertSizeEventually(ENTRY_COUNT - 1, jetInstance.getList(streamSinkName)); assertFalse(jetInstance.getList(streamSinkName).contains(0)); assertTrue(jetInstance.getList(streamSinkName).contains(1)); job.cancel(); } @Test public void when_readList_and_writeList() { IListJet<Integer> list = jetInstance.getList(sourceName); list.addAll(range(0, ENTRY_COUNT).boxed().collect(toList())); DAG dag = new DAG(); Vertex source = dag.newVertex("source", readListP(sourceName)).localParallelism(1); Vertex sink = dag.newVertex("sink", writeListP(sinkName)).localParallelism(1); dag.edge(between(source, sink)); jetInstance.newJob(dag).join(); assertEquals(ENTRY_COUNT, jetInstance.getList(sinkName).size()); } @Test public void test_defaultFilter_mapJournal() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamMapP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); IMapJet<Integer, Integer> sourceMap = jetInstance.getMap(streamSourceName); sourceMap.put(1, 1); // ADDED sourceMap.remove(1); // REMOVED - filtered out sourceMap.put(1, 2); // ADDED IListJet<Entry<Integer, Integer>> sinkList = jetInstance.getList(streamSinkName); assertTrueEventually(() -> { assertEquals(2, sinkList.size()); Entry<Integer, Integer> e = sinkList.get(0); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(1), e.getValue()); e = sinkList.get(1); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(2), e.getValue()); }, 3); job.cancel(); } @Test public void test_defaultFilter_cacheJournal() { DAG dag = new DAG(); Vertex source = dag.newVertex("source", streamCacheP(streamSourceName, START_FROM_OLDEST, wmGenParams(Entry<Integer, Integer>::getValue, limitingLag(0), suppressDuplicates(), 10_000))); Vertex sink = dag.newVertex("sink", writeListP(streamSinkName)); dag.edge(between(source, sink)); Job job = jetInstance.newJob(dag); ICacheJet<Object, Object> sourceCache = jetInstance.getCacheManager().getCache(streamSourceName); sourceCache.put(1, 1); // ADDED sourceCache.remove(1); // REMOVED - filtered out sourceCache.put(1, 2); // UPDATED IListJet<Entry<Integer, Integer>> sinkList = jetInstance.getList(streamSinkName); assertTrueEventually(() -> { assertEquals(2, sinkList.size()); Entry<Integer, Integer> e = sinkList.get(0); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(1), e.getValue()); e = sinkList.get(1); assertEquals(Integer.valueOf(1), e.getKey()); assertEquals(Integer.valueOf(2), e.getValue()); }, 3); job.cancel(); } }
Attempt to fix test failure (#738) Fixes #735
hazelcast-jet-core/src/test/java/com/hazelcast/jet/impl/connector/HazelcastConnectorTest.java
Attempt to fix test failure (#738)
<ide><path>azelcast-jet-core/src/test/java/com/hazelcast/jet/impl/connector/HazelcastConnectorTest.java <ide> IMapJet<Integer, Entry<Integer, String>> sourceMap = jetInstance.getMap(streamSourceName); <ide> range(0, ENTRY_COUNT).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); <ide> <del> assertTrueEventually(() -> checkContents_projectedToNull(streamSinkName), 3); <add> assertTrueEventually(() -> checkContents_projectedToNull(streamSinkName), 10); <ide> job.cancel(); <ide> } <ide> <ide> e = sinkList.get(1); <ide> assertEquals(Integer.valueOf(1), e.getKey()); <ide> assertEquals(Integer.valueOf(2), e.getValue()); <del> }, 3); <add> }, 10); <ide> <ide> job.cancel(); <ide> } <ide> e = sinkList.get(1); <ide> assertEquals(Integer.valueOf(1), e.getKey()); <ide> assertEquals(Integer.valueOf(2), e.getValue()); <del> }, 3); <add> }, 10); <ide> <ide> job.cancel(); <ide> }
Java
apache-2.0
2e1e0b3ea57af56fccbc1d7fc275b693337c1e41
0
Distrotech/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,semonte/intellij-community,diorcety/intellij-community,xfournet/intellij-community,apixandru/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,FHannes/intellij-community,samthor/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,signed/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,signed/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,jexp/idea2,diorcety/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,FHannes/intellij-community,orekyuu/intellij-community,caot/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,petteyg/intellij-community,hurricup/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,consulo/consulo,robovm/robovm-studio,blademainer/intellij-community,clumsy/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,ibinti/intellij-community,blademainer/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,kdwink/intellij-community,amith01994/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,izonder/intellij-community,fnouama/intellij-community,da1z/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,asedunov/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,ernestp/consulo,ibinti/intellij-community,Lekanich/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,kdwink/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,da1z/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,jexp/idea2,jexp/idea2,akosyakov/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,izonder/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,jexp/idea2,ahb0327/intellij-community,signed/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,da1z/intellij-community,nicolargo/intellij-community,signed/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,holmes/intellij-community,hurricup/intellij-community,hurricup/intellij-community,semonte/intellij-community,Distrotech/intellij-community,kool79/intellij-community,apixandru/intellij-community,da1z/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,samthor/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,izonder/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,caot/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,allotria/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,dslomov/intellij-community,fnouama/intellij-community,holmes/intellij-community,hurricup/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,fnouama/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,slisson/intellij-community,adedayo/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,caot/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,ernestp/consulo,TangHao1987/intellij-community,supersven/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,retomerz/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,izonder/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,allotria/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,retomerz/intellij-community,joewalnes/idea-community,signed/intellij-community,jexp/idea2,amith01994/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,consulo/consulo,MER-GROUP/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,ibinti/intellij-community,kdwink/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,consulo/consulo,ibinti/intellij-community,jagguli/intellij-community,caot/intellij-community,kool79/intellij-community,da1z/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,kool79/intellij-community,izonder/intellij-community,robovm/robovm-studio,adedayo/intellij-community,caot/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,vladmm/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,jexp/idea2,salguarnieri/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,ryano144/intellij-community,ernestp/consulo,suncycheng/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,amith01994/intellij-community,allotria/intellij-community,izonder/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,consulo/consulo,caot/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,FHannes/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,signed/intellij-community,Lekanich/intellij-community,allotria/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,izonder/intellij-community,asedunov/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,holmes/intellij-community,caot/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,allotria/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,diorcety/intellij-community,jexp/idea2,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,semonte/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,robovm/robovm-studio,retomerz/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,dslomov/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,kool79/intellij-community,fitermay/intellij-community,xfournet/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,vladmm/intellij-community,holmes/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,amith01994/intellij-community,blademainer/intellij-community,fnouama/intellij-community,signed/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,kdwink/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,apixandru/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,signed/intellij-community,consulo/consulo,muntasirsyed/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,semonte/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,blademainer/intellij-community,ernestp/consulo,signed/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,dslomov/intellij-community,clumsy/intellij-community,ernestp/consulo,muntasirsyed/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,robovm/robovm-studio,joewalnes/idea-community,jexp/idea2,dslomov/intellij-community,kool79/intellij-community,suncycheng/intellij-community,da1z/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,supersven/intellij-community,xfournet/intellij-community,fnouama/intellij-community,hurricup/intellij-community,kool79/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,petteyg/intellij-community,caot/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,holmes/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,slisson/intellij-community,ibinti/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,da1z/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,semonte/intellij-community,samthor/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,clumsy/intellij-community,kool79/intellij-community,robovm/robovm-studio,retomerz/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,da1z/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,petteyg/intellij-community,holmes/intellij-community,supersven/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,semonte/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,slisson/intellij-community,adedayo/intellij-community,supersven/intellij-community,akosyakov/intellij-community,samthor/intellij-community,slisson/intellij-community,apixandru/intellij-community,petteyg/intellij-community,joewalnes/idea-community,supersven/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,ernestp/consulo,fnouama/intellij-community,kool79/intellij-community,supersven/intellij-community,kdwink/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,allotria/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,clumsy/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,FHannes/intellij-community,ibinti/intellij-community,caot/intellij-community,ahb0327/intellij-community,da1z/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,joewalnes/idea-community,gnuhub/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,holmes/intellij-community,supersven/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,caot/intellij-community,supersven/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,FHannes/intellij-community,fitermay/intellij-community,kool79/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,jagguli/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,ibinti/intellij-community,holmes/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,semonte/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,semonte/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ahb0327/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,izonder/intellij-community,allotria/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,kdwink/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,da1z/intellij-community,slisson/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,allotria/intellij-community,semonte/intellij-community,caot/intellij-community,apixandru/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,ibinti/intellij-community,semonte/intellij-community,asedunov/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,hurricup/intellij-community,ryano144/intellij-community,jagguli/intellij-community,izonder/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,xfournet/intellij-community,samthor/intellij-community,kdwink/intellij-community
package com.intellij.psi.impl.source.resolve; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.filters.ClassFilter; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.scope.ElementClassHint; import com.intellij.psi.scope.NameHint; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.conflictResolvers.JavaVariableConflictResolver; import com.intellij.psi.scope.processor.ConflictFilterProcessor; import com.intellij.psi.util.PsiUtil; import java.util.ArrayList; /** * @author ik, dsl */ public class VariableResolverProcessor extends ConflictFilterProcessor implements NameHint, ElementClassHint { private static final ClassFilter ourFilter = new ClassFilter(PsiVariable.class); private final PsiElement myFromElement; private boolean myStaticScopeFlag = false; private PsiClass myAccessClass = null; private PsiElement myCurrentFileContext = null; public VariableResolverProcessor(String name, PsiElement place, PsiClass accessClass){ super(name, null, ourFilter, new PsiConflictResolver[]{new JavaVariableConflictResolver()}, new ArrayList()); myFromElement = place; myAccessClass = accessClass; } public VariableResolverProcessor(PsiJavaCodeReferenceElement fromElement) { super(fromElement.getText(), null, ourFilter, new PsiConflictResolver[]{new JavaVariableConflictResolver()}, new ArrayList()); myFromElement = fromElement; PsiElement qualifier = fromElement.getQualifier(); PsiElement referenceName = fromElement.getReferenceNameElement(); if (referenceName instanceof PsiIdentifier){ setName(referenceName.getText()); } if (qualifier instanceof PsiExpression){ final JavaResolveResult accessClass = PsiUtil.getAccessObjectClass((PsiExpression)qualifier); final PsiElement element = accessClass.getElement(); if (element instanceof PsiTypeParameter) { final PsiManager manager = element.getManager(); final PsiClassType type = manager.getElementFactory().createType((PsiTypeParameter) element); final PsiType accessType = accessClass.getSubstitutor().substitute(type); if(accessType instanceof PsiArrayType) { LanguageLevel languageLevel = PsiUtil.getLanguageLevel(qualifier); myAccessClass = manager.getElementFactory().getArrayClass(languageLevel); } else if(accessType instanceof PsiClassType) myAccessClass = ((PsiClassType)accessType).resolve(); } else if (element instanceof PsiClass) myAccessClass = (PsiClass) element; } } public final void handleEvent(Event event, Object associated) { super.handleEvent(event, associated); if(event == Event.START_STATIC){ myStaticScopeFlag = true; } else if (Event.SET_CURRENT_FILE_CONTEXT.equals(event)) { myCurrentFileContext = (PsiElement)associated; } } public void add(PsiElement element, PsiSubstitutor substitutor) { final boolean staticProblem = myStaticScopeFlag && !(((PsiVariable)element).hasModifierProperty(PsiModifier.STATIC)); super.add(new CandidateInfo(element, substitutor, myFromElement, myAccessClass, staticProblem, myCurrentFileContext)); } public boolean shouldProcess(Class elementClass) { return PsiVariable.class.isAssignableFrom(elementClass); } public boolean execute(PsiElement element, PsiSubstitutor substitutor) { if (!(element instanceof PsiField)) { super.execute(element, substitutor); return myResults.size() == 0; } return super.execute(element, substitutor); } }
source/com/intellij/psi/impl/source/resolve/VariableResolverProcessor.java
package com.intellij.psi.impl.source.resolve; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.filters.ClassFilter; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.scope.ElementClassHint; import com.intellij.psi.scope.NameHint; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.conflictResolvers.JavaVariableConflictResolver; import com.intellij.psi.scope.processor.ConflictFilterProcessor; import com.intellij.psi.util.PsiUtil; import java.util.ArrayList; /** * @author ik, dsl */ public class VariableResolverProcessor extends ConflictFilterProcessor implements NameHint, ElementClassHint { private static final ClassFilter ourFilter = new ClassFilter(PsiVariable.class); private final PsiElement myFromElement; private boolean myStaticScopeFlag = false; private PsiClass myAccessClass = null; private PsiElement myCurrentFileContext = null; public VariableResolverProcessor(String name, PsiElement place, PsiClass accessClass){ super(name, null, ourFilter, new PsiConflictResolver[]{new JavaVariableConflictResolver()}, new ArrayList()); myFromElement = place; myAccessClass = accessClass; } public VariableResolverProcessor(PsiJavaCodeReferenceElement fromElement) { super(fromElement.getText(), null, ourFilter, new PsiConflictResolver[]{new JavaVariableConflictResolver()}, new ArrayList()); myFromElement = fromElement; PsiElement qualifier = fromElement.getQualifier(); PsiElement referenceName = fromElement.getReferenceNameElement(); if (referenceName instanceof PsiIdentifier){ setName(referenceName.getText()); } if (qualifier instanceof PsiExpression){ final JavaResolveResult accessClass = PsiUtil.getAccessObjectClass((PsiExpression)qualifier); final PsiElement element = accessClass.getElement(); if (element instanceof PsiTypeParameter) { final PsiManager manager = element.getManager(); final PsiClassType type = manager.getElementFactory().createType((PsiTypeParameter) element); final PsiType accessType = accessClass.getSubstitutor().substitute(type); if(accessType instanceof PsiArrayType) { LanguageLevel languageLevel = PsiUtil.getLanguageLevel(qualifier); myAccessClass = manager.getElementFactory().getArrayClass(languageLevel); } else if(accessType instanceof PsiClassType) myAccessClass = ((PsiClassType)accessType).resolve(); } else if (element instanceof PsiClass) myAccessClass = (PsiClass) element; } } public final void handleEvent(Event event, Object associated) { super.handleEvent(event, associated); if(event == Event.START_STATIC){ myStaticScopeFlag = true; } else if (Event.SET_CURRENT_FILE_CONTEXT.equals(event)) { myCurrentFileContext = (PsiElement)associated; } } public void add(PsiElement element, PsiSubstitutor substitutor) { final boolean staticProblem = myStaticScopeFlag && !(((PsiVariable)element).hasModifierProperty(PsiModifier.STATIC)); super.add(new CandidateInfo(element, substitutor, myFromElement, myAccessClass, staticProblem, myCurrentFileContext)); } public boolean shouldProcess(Class elementClass) { return PsiVariable.class.isAssignableFrom(elementClass); } public boolean execute(PsiElement element, PsiSubstitutor substitutor) { if (!(element instanceof PsiField) && (myName == null || PsiUtil.checkName(element, myName))) { super.execute(element, substitutor); return myResults.size() == 0; } return super.execute(element, substitutor); } }
resolve fix
source/com/intellij/psi/impl/source/resolve/VariableResolverProcessor.java
resolve fix
<ide><path>ource/com/intellij/psi/impl/source/resolve/VariableResolverProcessor.java <ide> } <ide> <ide> public boolean execute(PsiElement element, PsiSubstitutor substitutor) { <del> if (!(element instanceof PsiField) && (myName == null || PsiUtil.checkName(element, myName))) { <add> if (!(element instanceof PsiField)) { <ide> super.execute(element, substitutor); <ide> return myResults.size() == 0; <ide> }
Java
apache-2.0
1e5caeb87699d51c7702e733d56c4cee2408f273
0
mengmoya/onos,osinstom/onos,opennetworkinglab/onos,maheshraju-Huawei/actn,mengmoya/onos,VinodKumarS-Huawei/ietf96yang,opennetworkinglab/onos,sonu283304/onos,sdnwiselab/onos,osinstom/onos,kuujo/onos,kuujo/onos,oplinkoms/onos,sonu283304/onos,lsinfo3/onos,oplinkoms/onos,Shashikanth-Huawei/bmp,Shashikanth-Huawei/bmp,sdnwiselab/onos,oplinkoms/onos,kuujo/onos,y-higuchi/onos,donNewtonAlpha/onos,oplinkoms/onos,mengmoya/onos,gkatsikas/onos,sdnwiselab/onos,VinodKumarS-Huawei/ietf96yang,y-higuchi/onos,oplinkoms/onos,kuujo/onos,y-higuchi/onos,mengmoya/onos,donNewtonAlpha/onos,maheshraju-Huawei/actn,gkatsikas/onos,oplinkoms/onos,LorenzReinhart/ONOSnew,kuujo/onos,maheshraju-Huawei/actn,kuujo/onos,osinstom/onos,donNewtonAlpha/onos,sonu283304/onos,gkatsikas/onos,Shashikanth-Huawei/bmp,Shashikanth-Huawei/bmp,gkatsikas/onos,lsinfo3/onos,y-higuchi/onos,gkatsikas/onos,sdnwiselab/onos,gkatsikas/onos,osinstom/onos,lsinfo3/onos,VinodKumarS-Huawei/ietf96yang,donNewtonAlpha/onos,kuujo/onos,donNewtonAlpha/onos,VinodKumarS-Huawei/ietf96yang,sdnwiselab/onos,lsinfo3/onos,sdnwiselab/onos,opennetworkinglab/onos,maheshraju-Huawei/actn,opennetworkinglab/onos,oplinkoms/onos,Shashikanth-Huawei/bmp,LorenzReinhart/ONOSnew,LorenzReinhart/ONOSnew,y-higuchi/onos,LorenzReinhart/ONOSnew,opennetworkinglab/onos,maheshraju-Huawei/actn,sonu283304/onos,VinodKumarS-Huawei/ietf96yang,mengmoya/onos,LorenzReinhart/ONOSnew,osinstom/onos,opennetworkinglab/onos
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.cordvtn; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.dhcp.DhcpService; import org.onosproject.mastership.MastershipService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.Host; import org.onosproject.net.HostId; import org.onosproject.net.HostLocation; import org.onosproject.net.Port; import org.onosproject.net.config.ConfigFactory; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.NetworkConfigRegistry; import org.onosproject.net.config.NetworkConfigService; import org.onosproject.net.config.basics.SubjectFactories; import org.onosproject.net.device.DeviceService; import org.onosproject.net.driver.DriverService; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.group.GroupService; import org.onosproject.net.host.DefaultHostDescription; import org.onosproject.net.host.HostDescription; import org.onosproject.net.host.HostEvent; import org.onosproject.net.host.HostListener; import org.onosproject.net.host.HostProvider; import org.onosproject.net.host.HostProviderRegistry; import org.onosproject.net.host.HostProviderService; import org.onosproject.net.host.HostService; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.onosproject.net.provider.AbstractProvider; import org.onosproject.net.provider.ProviderId; import org.onosproject.openstacknetworking.OpenstackNetworkingService; import org.onosproject.openstacknetworking.OpenstackNetwork; import org.onosproject.openstacknetworking.OpenstackPort; import org.onosproject.openstacknetworking.OpenstackSubnet; import org.slf4j.Logger; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static org.onlab.util.Tools.groupedThreads; import static org.slf4j.LoggerFactory.getLogger; /** * Provisions virtual tenant networks with service chaining capability * in OpenStack environment. */ @Component(immediate = true) @Service public class CordVtn extends AbstractProvider implements CordVtnService, HostProvider { protected final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected NetworkConfigRegistry configRegistry; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected NetworkConfigService configService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected HostProviderRegistry hostProviderRegistry; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected HostService hostService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DriverService driverService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FlowRuleService flowRuleService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MastershipService mastershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected GroupService groupService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected OpenstackNetworkingService openstackService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DhcpService dhcpService; private final ConfigFactory configFactory = new ConfigFactory(SubjectFactories.APP_SUBJECT_FACTORY, CordVtnConfig.class, "cordvtn") { @Override public CordVtnConfig createConfig() { return new CordVtnConfig(); } }; private static final String DEFAULT_TUNNEL = "vxlan"; private static final String SERVICE_ID = "serviceId"; private static final String OPENSTACK_VM_ID = "openstackVmId"; private static final String OPENSTACK_PORT_ID = "openstackPortId"; private static final String DATA_PLANE_IP = "dataPlaneIp"; private static final String DATA_PLANE_INTF = "dataPlaneIntf"; private static final String S_TAG = "stag"; private static final Ip4Address DEFAULT_DNS = Ip4Address.valueOf("8.8.8.8"); private final ExecutorService eventExecutor = newSingleThreadScheduledExecutor(groupedThreads("onos/cordvtn", "event-handler")); private final PacketProcessor packetProcessor = new InternalPacketProcessor(); private final HostListener hostListener = new InternalHostListener(); private final NetworkConfigListener configListener = new InternalConfigListener(); private ApplicationId appId; private HostProviderService hostProvider; private CordVtnRuleInstaller ruleInstaller; private CordVtnArpProxy arpProxy; private volatile MacAddress privateGatewayMac = MacAddress.NONE; /** * Creates an cordvtn host location provider. */ public CordVtn() { super(new ProviderId("host", CORDVTN_APP_ID)); } @Activate protected void activate() { appId = coreService.registerApplication("org.onosproject.cordvtn"); ruleInstaller = new CordVtnRuleInstaller(appId, flowRuleService, deviceService, driverService, groupService, mastershipService, DEFAULT_TUNNEL); arpProxy = new CordVtnArpProxy(appId, packetService, hostService); packetService.addProcessor(packetProcessor, PacketProcessor.director(0)); arpProxy.requestPacket(); hostService.addListener(hostListener); hostProvider = hostProviderRegistry.register(this); configRegistry.registerConfigFactory(configFactory); configService.addListener(configListener); readConfiguration(); log.info("Started"); } @Deactivate protected void deactivate() { hostProviderRegistry.unregister(this); hostService.removeListener(hostListener); packetService.removeProcessor(packetProcessor); configRegistry.unregisterConfigFactory(configFactory); configService.removeListener(configListener); eventExecutor.shutdown(); log.info("Stopped"); } @Override public void triggerProbe(Host host) { /* * Note: In CORD deployment, we assume that all hosts are configured. * Therefore no probe is required. */ } @Override public void createServiceDependency(CordServiceId tServiceId, CordServiceId pServiceId, boolean isBidirectional) { CordService tService = getCordService(tServiceId); CordService pService = getCordService(pServiceId); if (tService == null || pService == null) { log.error("Failed to create CordService for {}", tServiceId.id()); return; } log.info("Service dependency from {} to {} created.", tService.id().id(), pService.id().id()); ruleInstaller.populateServiceDependencyRules(tService, pService, isBidirectional); } @Override public void removeServiceDependency(CordServiceId tServiceId, CordServiceId pServiceId) { CordService tService = getCordService(tServiceId); CordService pService = getCordService(pServiceId); if (tService == null || pService == null) { log.error("Failed to create CordService for {}", tServiceId.id()); return; } log.info("Service dependency from {} to {} removed.", tService.id().id(), pService.id().id()); ruleInstaller.removeServiceDependencyRules(tService, pService); } @Override public void addServiceVm(CordVtnNode node, ConnectPoint connectPoint) { Port port = deviceService.getPort(connectPoint.deviceId(), connectPoint.port()); OpenstackPort vPort = openstackService.port(port); if (vPort == null) { log.warn("Failed to get OpenstackPort for {}", getPortName(port)); return; } MacAddress mac = vPort.macAddress(); HostId hostId = HostId.hostId(mac); Host host = hostService.getHost(hostId); if (host != null) { // Host is already known to the system, no HOST_ADDED event is triggered in this case. // It happens when the application is restarted. String vmId = host.annotations().value(OPENSTACK_VM_ID); if (vmId != null && vmId.equals(vPort.deviceId())) { serviceVmAdded(host); return; } else { hostProvider.hostVanished(host.id()); } } Set<IpAddress> ip = Sets.newHashSet(vPort.fixedIps().values()); DefaultAnnotations.Builder annotations = DefaultAnnotations.builder() .set(SERVICE_ID, vPort.networkId()) .set(OPENSTACK_VM_ID, vPort.deviceId()) .set(OPENSTACK_PORT_ID, vPort.id()) .set(DATA_PLANE_IP, node.dpIp().ip().toString()) .set(DATA_PLANE_INTF, node.dpIntf()); String serviceVlan = getServiceVlan(vPort); if (serviceVlan != null) { annotations.set(S_TAG, serviceVlan); } HostDescription hostDesc = new DefaultHostDescription( mac, VlanId.NONE, new HostLocation(connectPoint, System.currentTimeMillis()), ip, annotations.build()); hostProvider.hostDetected(hostId, hostDesc, false); } @Override public void removeServiceVm(ConnectPoint connectPoint) { hostService.getConnectedHosts(connectPoint) .stream() .forEach(host -> hostProvider.hostVanished(host.id())); } @Override public void updateVirtualSubscriberGateways(HostId vSgHostId, String serviceVlan, Map<IpAddress, MacAddress> vSgs) { Host vSgVm = hostService.getHost(vSgHostId); if (vSgVm == null || !vSgVm.annotations().value(S_TAG).equals(serviceVlan)) { log.debug("Invalid vSG updates for {}", serviceVlan); return; } log.info("Updates vSGs in {} with {}", vSgVm.id(), vSgs.toString()); vSgs.entrySet().stream() .forEach(entry -> addVirtualSubscriberGateway( vSgVm, entry.getKey(), entry.getValue(), serviceVlan)); hostService.getConnectedHosts(vSgVm.location()).stream() .filter(host -> !host.mac().equals(vSgVm.mac())) .filter(host -> !vSgs.values().contains(host.mac())) .forEach(host -> { log.info("Removed vSG {}", host.toString()); hostProvider.hostVanished(host.id()); }); ruleInstaller.populateSubscriberGatewayRules(vSgVm, vSgs.keySet()); } /** * Adds virtual subscriber gateway to the system. * * @param vSgHost host virtual machine of this vSG * @param vSgIp vSG ip address * @param vSgMac vSG mac address * @param serviceVlan service vlan */ private void addVirtualSubscriberGateway(Host vSgHost, IpAddress vSgIp, MacAddress vSgMac, String serviceVlan) { HostId hostId = HostId.hostId(vSgMac); Host host = hostService.getHost(hostId); if (host != null) { log.trace("vSG with {} already exists", vSgMac.toString()); return; } log.info("vSG with IP({}) MAC({}) detected", vSgIp.toString(), vSgMac.toString()); DefaultAnnotations.Builder annotations = DefaultAnnotations.builder() .set(S_TAG, serviceVlan); HostDescription hostDesc = new DefaultHostDescription( vSgMac, VlanId.NONE, vSgHost.location(), Sets.newHashSet(vSgIp), annotations.build()); hostProvider.hostDetected(hostId, hostDesc, false); } /** * Returns CordService by service ID. * * @param serviceId service id * @return cord service, or null if it fails to get network from OpenStack */ private CordService getCordService(CordServiceId serviceId) { OpenstackNetwork vNet = openstackService.network(serviceId.id()); if (vNet == null) { log.warn("Couldn't find OpenStack network for service {}", serviceId.id()); return null; } OpenstackSubnet subnet = vNet.subnets().stream() .findFirst() .orElse(null); if (subnet == null) { log.warn("Couldn't find OpenStack subnet for service {}", serviceId.id()); return null; } Set<CordServiceId> tServices = Sets.newHashSet(); // TODO get tenant services from XOS Map<Host, IpAddress> hosts = getHostsWithOpenstackNetwork(vNet) .stream() .collect(Collectors.toMap(host -> host, this::getTunnelIp)); return new CordService(vNet, subnet, hosts, tServices); } /** * Returns CordService by OpenStack network. * * @param vNet OpenStack network * @return cord service */ private CordService getCordService(OpenstackNetwork vNet) { checkNotNull(vNet); CordServiceId serviceId = CordServiceId.of(vNet.id()); OpenstackSubnet subnet = vNet.subnets().stream() .findFirst() .orElse(null); if (subnet == null) { log.warn("Couldn't find OpenStack subnet for service {}", serviceId); return null; } Set<CordServiceId> tServices = Sets.newHashSet(); // TODO get tenant services from XOS Map<Host, IpAddress> hosts = getHostsWithOpenstackNetwork(vNet) .stream() .collect(Collectors.toMap(host -> host, this::getTunnelIp)); return new CordService(vNet, subnet, hosts, tServices); } /** * Returns IP address for tunneling for a given host. * * @param host host * @return ip address, or null */ private IpAddress getTunnelIp(Host host) { String ip = host.annotations().value(DATA_PLANE_IP); return ip == null ? null : IpAddress.valueOf(ip); } /** * Returns port name. * * @param port port * @return port name */ private String getPortName(Port port) { return port.annotations().value("portName"); } /** * Returns s-tag from a given OpenStack port. * * @param vPort openstack port * @return s-tag string */ private String getServiceVlan(OpenstackPort vPort) { checkNotNull(vPort); if (vPort.name() != null && vPort.name().startsWith(S_TAG)) { return vPort.name().split("-")[1]; } else { return null; } } /** * Returns hosts associated with a given OpenStack network. * * @param vNet openstack network * @return set of hosts */ private Set<Host> getHostsWithOpenstackNetwork(OpenstackNetwork vNet) { checkNotNull(vNet); Set<Host> hosts = openstackService.ports(vNet.id()).stream() .filter(port -> port.deviceOwner().contains("compute")) .map(port -> hostService.getHostsByMac(port.macAddress()) .stream() .findFirst() .orElse(null)) .collect(Collectors.toSet()); hosts.remove(null); return hosts; } /** * Returns public ip addresses of vSGs running inside a give vSG host. * * @param vSgHost vSG host * @return map of ip and mac address, or empty map */ private Map<IpAddress, MacAddress> getSubscriberGateways(Host vSgHost) { String vPortId = vSgHost.annotations().value(OPENSTACK_PORT_ID); String serviceVlan = vSgHost.annotations().value(S_TAG); OpenstackPort vPort = openstackService.port(vPortId); if (vPort == null) { log.warn("Failed to get OpenStack port {} for VM {}", vPortId, vSgHost.id()); return Maps.newHashMap(); } if (!serviceVlan.equals(getServiceVlan(vPort))) { log.error("Host({}) s-tag does not match with vPort s-tag", vSgHost.id()); return Maps.newHashMap(); } return vPort.allowedAddressPairs(); } /** * Registers static DHCP lease for a given host. * * @param host host * @param service cord service */ private void registerDhcpLease(Host host, CordService service) { List<Ip4Address> options = Lists.newArrayList(); options.add(Ip4Address.makeMaskPrefix(service.serviceIpRange().prefixLength())); options.add(service.serviceIp().getIp4Address()); options.add(service.serviceIp().getIp4Address()); options.add(DEFAULT_DNS); log.debug("Set static DHCP mapping for {}", host.mac()); dhcpService.setStaticMapping(host.mac(), host.ipAddresses().stream().findFirst().get().getIp4Address(), true, options); } /** * Handles VM detected situation. * * @param host host */ private void serviceVmAdded(Host host) { String vNetId = host.annotations().value(SERVICE_ID); if (vNetId == null) { // ignore this host, it is not the service VM, or it's a vSG return; } OpenstackNetwork vNet = openstackService.network(vNetId); if (vNet == null) { log.warn("Failed to get OpenStack network {} for VM {}({}).", vNetId, host.id(), host.annotations().value(OPENSTACK_VM_ID)); return; } log.info("VM {} is detected, MAC: {} IP: {}", host.annotations().value(OPENSTACK_VM_ID), host.mac(), host.ipAddresses().stream().findFirst().get()); CordService service = getCordService(vNet); if (service == null) { return; } switch (service.serviceType()) { case MANAGEMENT: ruleInstaller.populateManagementNetworkRules(host, service); break; case PRIVATE: case PRIVATE_INDIRECT: case PRIVATE_DIRECT: arpProxy.addGateway(service.serviceIp(), privateGatewayMac); case PUBLIC_INDIRECT: case PUBLIC_DIRECT: default: // TODO check if the service needs an update on its group buckets after done CORD-433 ruleInstaller.updateServiceGroup(service); // sends gratuitous ARP here for the case of adding existing VMs // when ONOS or cordvtn app is restarted arpProxy.sendGratuitousArpForGateway(service.serviceIp(), Sets.newHashSet(host)); break; } registerDhcpLease(host, service); ruleInstaller.populateBasicConnectionRules(host, getTunnelIp(host), vNet); String serviceVlan = host.annotations().value(S_TAG); if (serviceVlan != null) { log.debug("vSG VM detected {}", host.id()); Map<IpAddress, MacAddress> vSgs = getSubscriberGateways(host); vSgs.entrySet().stream() .forEach(entry -> addVirtualSubscriberGateway( host, entry.getKey(), entry.getValue(), serviceVlan)); ruleInstaller.populateSubscriberGatewayRules(host, vSgs.keySet()); } } /** * Handles VM removed situation. * * @param host host */ private void serviceVmRemoved(Host host) { String vNetId = host.annotations().value(SERVICE_ID); if (vNetId == null) { // ignore it, it's not the service VM or it's a vSG String serviceVlan = host.annotations().value(S_TAG); if (serviceVlan != null) { log.info("vSG {} removed", host.id()); } return; } OpenstackNetwork vNet = openstackService.network(vNetId); if (vNet == null) { log.warn("Failed to get OpenStack network {} for VM {}({}).", vNetId, host.id(), host.annotations().value(OPENSTACK_VM_ID)); return; } log.info("VM {} is vanished, MAC: {} IP: {}", host.annotations().value(OPENSTACK_VM_ID), host.mac(), host.ipAddresses().stream().findFirst().get()); ruleInstaller.removeBasicConnectionRules(host); dhcpService.removeStaticMapping(host.mac()); CordService service = getCordService(vNet); if (service == null) { return; } switch (service.serviceType()) { case MANAGEMENT: ruleInstaller.removeManagementNetworkRules(host, service); break; case PRIVATE: case PRIVATE_INDIRECT: case PRIVATE_DIRECT: if (getHostsWithOpenstackNetwork(vNet).isEmpty()) { arpProxy.removeGateway(service.serviceIp()); } case PUBLIC_INDIRECT: case PUBLIC_DIRECT: default: // TODO check if the service needs an update on its group buckets after done CORD-433 ruleInstaller.updateServiceGroup(service); break; } } /** * Sets service network gateway MAC address and sends out gratuitous ARP to all * VMs to update the gateway MAC address. * * @param newMac mac address to update */ private void setPrivateGatewayMac(MacAddress newMac) { if (newMac == null || newMac.equals(privateGatewayMac)) { // no updates, do nothing return; } privateGatewayMac = newMac; log.debug("Set service gateway MAC address to {}", privateGatewayMac.toString()); // TODO get existing service list from XOS and replace the loop below Set<String> vNets = Sets.newHashSet(); hostService.getHosts().forEach(host -> vNets.add(host.annotations().value(SERVICE_ID))); vNets.remove(null); vNets.stream().forEach(vNet -> { CordService service = getCordService(CordServiceId.of(vNet)); if (service != null) { arpProxy.addGateway(service.serviceIp(), privateGatewayMac); arpProxy.sendGratuitousArpForGateway(service.serviceIp(), service.hosts().keySet()); } }); } /** * Sets public gateway MAC address. * * @param publicGateways gateway ip and mac address pairs */ private void setPublicGatewayMac(Map<IpAddress, MacAddress> publicGateways) { publicGateways.entrySet() .stream() .forEach(entry -> { arpProxy.addGateway(entry.getKey(), entry.getValue()); log.debug("Added public gateway IP {}, MAC {}", entry.getKey().toString(), entry.getValue().toString()); }); // TODO notice gateway MAC change to VMs holds this gateway IP } /** * Updates configurations. */ private void readConfiguration() { CordVtnConfig config = configRegistry.getConfig(appId, CordVtnConfig.class); if (config == null) { log.debug("No configuration found"); return; } setPrivateGatewayMac(config.privateGatewayMac()); setPublicGatewayMac(config.publicGateways()); } private class InternalHostListener implements HostListener { @Override public void event(HostEvent event) { Host host = event.subject(); switch (event.type()) { case HOST_ADDED: eventExecutor.submit(() -> serviceVmAdded(host)); break; case HOST_REMOVED: eventExecutor.submit(() -> serviceVmRemoved(host)); break; default: break; } } } private class InternalPacketProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } Ethernet ethPacket = context.inPacket().parsed(); if (ethPacket == null || ethPacket.getEtherType() != Ethernet.TYPE_ARP) { return; } arpProxy.processArpPacket(context, ethPacket); } } private class InternalConfigListener implements NetworkConfigListener { @Override public void event(NetworkConfigEvent event) { if (!event.configClass().equals(CordVtnConfig.class)) { return; } switch (event.type()) { case CONFIG_ADDED: case CONFIG_UPDATED: log.info("Network configuration changed"); eventExecutor.execute(CordVtn.this::readConfiguration); break; default: break; } } } }
apps/cordvtn/src/main/java/org/onosproject/cordvtn/CordVtn.java
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.cordvtn; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.Ethernet; import org.onlab.packet.Ip4Address; import org.onlab.packet.IpAddress; import org.onlab.packet.MacAddress; import org.onlab.packet.VlanId; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.dhcp.DhcpService; import org.onosproject.mastership.MastershipService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.Host; import org.onosproject.net.HostId; import org.onosproject.net.HostLocation; import org.onosproject.net.Port; import org.onosproject.net.config.ConfigFactory; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.NetworkConfigRegistry; import org.onosproject.net.config.NetworkConfigService; import org.onosproject.net.config.basics.SubjectFactories; import org.onosproject.net.device.DeviceService; import org.onosproject.net.driver.DriverService; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.group.GroupService; import org.onosproject.net.host.DefaultHostDescription; import org.onosproject.net.host.HostDescription; import org.onosproject.net.host.HostEvent; import org.onosproject.net.host.HostListener; import org.onosproject.net.host.HostProvider; import org.onosproject.net.host.HostProviderRegistry; import org.onosproject.net.host.HostProviderService; import org.onosproject.net.host.HostService; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.onosproject.net.provider.AbstractProvider; import org.onosproject.net.provider.ProviderId; import org.onosproject.openstacknetworking.OpenstackNetworkingService; import org.onosproject.openstacknetworking.OpenstackNetwork; import org.onosproject.openstacknetworking.OpenstackPort; import org.onosproject.openstacknetworking.OpenstackSubnet; import org.slf4j.Logger; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static org.onlab.util.Tools.groupedThreads; import static org.slf4j.LoggerFactory.getLogger; /** * Provisions virtual tenant networks with service chaining capability * in OpenStack environment. */ @Component(immediate = true) @Service public class CordVtn extends AbstractProvider implements CordVtnService, HostProvider { protected final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected NetworkConfigRegistry configRegistry; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected NetworkConfigService configService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected HostProviderRegistry hostProviderRegistry; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected HostService hostService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DriverService driverService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FlowRuleService flowRuleService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected MastershipService mastershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected GroupService groupService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected OpenstackNetworkingService openstackService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected DhcpService dhcpService; private final ConfigFactory configFactory = new ConfigFactory(SubjectFactories.APP_SUBJECT_FACTORY, CordVtnConfig.class, "cordvtn") { @Override public CordVtnConfig createConfig() { return new CordVtnConfig(); } }; private static final String DEFAULT_TUNNEL = "vxlan"; private static final String SERVICE_ID = "serviceId"; private static final String OPENSTACK_VM_ID = "openstackVmId"; private static final String OPENSTACK_PORT_ID = "openstackPortId"; private static final String DATA_PLANE_IP = "dataPlaneIp"; private static final String DATA_PLANE_INTF = "dataPlaneIntf"; private static final String S_TAG = "stag"; private static final Ip4Address DEFAULT_DNS = Ip4Address.valueOf("8.8.8.8"); private final ExecutorService eventExecutor = newSingleThreadScheduledExecutor(groupedThreads("onos/cordvtn", "event-handler")); private final PacketProcessor packetProcessor = new InternalPacketProcessor(); private final HostListener hostListener = new InternalHostListener(); private final NetworkConfigListener configListener = new InternalConfigListener(); private ApplicationId appId; private HostProviderService hostProvider; private CordVtnRuleInstaller ruleInstaller; private CordVtnArpProxy arpProxy; private volatile MacAddress privateGatewayMac = MacAddress.NONE; /** * Creates an cordvtn host location provider. */ public CordVtn() { super(new ProviderId("host", CORDVTN_APP_ID)); } @Activate protected void activate() { appId = coreService.registerApplication("org.onosproject.cordvtn"); ruleInstaller = new CordVtnRuleInstaller(appId, flowRuleService, deviceService, driverService, groupService, mastershipService, DEFAULT_TUNNEL); arpProxy = new CordVtnArpProxy(appId, packetService, hostService); packetService.addProcessor(packetProcessor, PacketProcessor.director(0)); arpProxy.requestPacket(); hostService.addListener(hostListener); hostProvider = hostProviderRegistry.register(this); configRegistry.registerConfigFactory(configFactory); configService.addListener(configListener); readConfiguration(); log.info("Started"); } @Deactivate protected void deactivate() { hostProviderRegistry.unregister(this); hostService.removeListener(hostListener); packetService.removeProcessor(packetProcessor); configRegistry.unregisterConfigFactory(configFactory); configService.removeListener(configListener); eventExecutor.shutdown(); log.info("Stopped"); } @Override public void triggerProbe(Host host) { /* * Note: In CORD deployment, we assume that all hosts are configured. * Therefore no probe is required. */ } @Override public void createServiceDependency(CordServiceId tServiceId, CordServiceId pServiceId, boolean isBidirectional) { CordService tService = getCordService(tServiceId); CordService pService = getCordService(pServiceId); if (tService == null || pService == null) { log.error("Failed to create CordService for {}", tServiceId.id()); return; } log.info("Service dependency from {} to {} created.", tService.id().id(), pService.id().id()); ruleInstaller.populateServiceDependencyRules(tService, pService, isBidirectional); } @Override public void removeServiceDependency(CordServiceId tServiceId, CordServiceId pServiceId) { CordService tService = getCordService(tServiceId); CordService pService = getCordService(pServiceId); if (tService == null || pService == null) { log.error("Failed to create CordService for {}", tServiceId.id()); return; } log.info("Service dependency from {} to {} removed.", tService.id().id(), pService.id().id()); ruleInstaller.removeServiceDependencyRules(tService, pService); } @Override public void addServiceVm(CordVtnNode node, ConnectPoint connectPoint) { Port port = deviceService.getPort(connectPoint.deviceId(), connectPoint.port()); OpenstackPort vPort = openstackService.port(port); if (vPort == null) { log.warn("Failed to get OpenstackPort for {}", getPortName(port)); return; } MacAddress mac = vPort.macAddress(); HostId hostId = HostId.hostId(mac); Host host = hostService.getHost(hostId); if (host != null) { // Host is already known to the system, no HOST_ADDED event is triggered in this case. // It happens when the application is restarted. String vmId = host.annotations().value(OPENSTACK_VM_ID); if (vmId != null && vmId.equals(vPort.deviceId())) { serviceVmAdded(host); return; } else { hostProvider.hostVanished(host.id()); } } Set<IpAddress> ip = Sets.newHashSet(vPort.fixedIps().values()); DefaultAnnotations.Builder annotations = DefaultAnnotations.builder() .set(SERVICE_ID, vPort.networkId()) .set(OPENSTACK_VM_ID, vPort.deviceId()) .set(OPENSTACK_PORT_ID, vPort.id()) .set(DATA_PLANE_IP, node.dpIp().ip().toString()) .set(DATA_PLANE_INTF, node.dpIntf()); String serviceVlan = getServiceVlan(vPort); if (serviceVlan != null) { annotations.set(S_TAG, serviceVlan); } HostDescription hostDesc = new DefaultHostDescription( mac, VlanId.NONE, new HostLocation(connectPoint, System.currentTimeMillis()), ip, annotations.build()); hostProvider.hostDetected(hostId, hostDesc, false); } @Override public void removeServiceVm(ConnectPoint connectPoint) { hostService.getConnectedHosts(connectPoint) .stream() .forEach(host -> hostProvider.hostVanished(host.id())); } @Override public void updateVirtualSubscriberGateways(HostId vSgHostId, String serviceVlan, Map<IpAddress, MacAddress> vSgs) { Host vSgVm = hostService.getHost(vSgHostId); if (vSgVm == null || !vSgVm.annotations().value(S_TAG).equals(serviceVlan)) { log.debug("Invalid vSG updates for {}", serviceVlan); return; } log.info("Updates vSGs in {} with {}", vSgVm.id(), vSgs.toString()); vSgs.entrySet().stream() .forEach(entry -> addVirtualSubscriberGateway( vSgVm, entry.getKey(), entry.getValue(), serviceVlan)); hostService.getConnectedHosts(vSgVm.location()).stream() .filter(host -> !host.mac().equals(vSgVm.mac())) .filter(host -> !vSgs.values().contains(host.mac())) .forEach(host -> { log.info("Removed vSG {}", host.toString()); hostProvider.hostVanished(host.id()); }); ruleInstaller.populateSubscriberGatewayRules(vSgVm, vSgs.keySet()); } /** * Adds virtual subscriber gateway to the system. * * @param vSgHost host virtual machine of this vSG * @param vSgIp vSG ip address * @param vSgMac vSG mac address * @param serviceVlan service vlan */ private void addVirtualSubscriberGateway(Host vSgHost, IpAddress vSgIp, MacAddress vSgMac, String serviceVlan) { HostId hostId = HostId.hostId(vSgMac); Host host = hostService.getHost(hostId); if (host != null) { log.trace("vSG with {} already exists", vSgMac.toString()); return; } log.info("vSG with IP({}) MAC({}) detected", vSgIp.toString(), vSgMac.toString()); DefaultAnnotations.Builder annotations = DefaultAnnotations.builder() .set(S_TAG, serviceVlan); HostDescription hostDesc = new DefaultHostDescription( vSgMac, VlanId.NONE, vSgHost.location(), Sets.newHashSet(vSgIp), annotations.build()); hostProvider.hostDetected(hostId, hostDesc, false); } /** * Returns CordService by service ID. * * @param serviceId service id * @return cord service, or null if it fails to get network from OpenStack */ private CordService getCordService(CordServiceId serviceId) { OpenstackNetwork vNet = openstackService.network(serviceId.id()); if (vNet == null) { log.warn("Couldn't find OpenStack network for service {}", serviceId.id()); return null; } OpenstackSubnet subnet = vNet.subnets().stream() .findFirst() .orElse(null); if (subnet == null) { log.warn("Couldn't find OpenStack subnet for service {}", serviceId.id()); return null; } Set<CordServiceId> tServices = Sets.newHashSet(); // TODO get tenant services from XOS Map<Host, IpAddress> hosts = getHostsWithOpenstackNetwork(vNet) .stream() .collect(Collectors.toMap(host -> host, this::getTunnelIp)); return new CordService(vNet, subnet, hosts, tServices); } /** * Returns CordService by OpenStack network. * * @param vNet OpenStack network * @return cord service */ private CordService getCordService(OpenstackNetwork vNet) { checkNotNull(vNet); CordServiceId serviceId = CordServiceId.of(vNet.id()); OpenstackSubnet subnet = vNet.subnets().stream() .findFirst() .orElse(null); if (subnet == null) { log.warn("Couldn't find OpenStack subnet for service {}", serviceId); return null; } Set<CordServiceId> tServices = Sets.newHashSet(); // TODO get tenant services from XOS Map<Host, IpAddress> hosts = getHostsWithOpenstackNetwork(vNet) .stream() .collect(Collectors.toMap(host -> host, this::getTunnelIp)); return new CordService(vNet, subnet, hosts, tServices); } /** * Returns IP address for tunneling for a given host. * * @param host host * @return ip address, or null */ private IpAddress getTunnelIp(Host host) { String ip = host.annotations().value(DATA_PLANE_IP); return ip == null ? null : IpAddress.valueOf(ip); } /** * Returns port name. * * @param port port * @return port name */ private String getPortName(Port port) { return port.annotations().value("portName"); } /** * Returns s-tag from a given OpenStack port. * * @param vPort openstack port * @return s-tag string */ private String getServiceVlan(OpenstackPort vPort) { checkNotNull(vPort); if (vPort.name() != null && vPort.name().startsWith(S_TAG)) { return vPort.name().split("-")[1]; } else { return null; } } /** * Returns hosts associated with a given OpenStack network. * * @param vNet openstack network * @return set of hosts */ private Set<Host> getHostsWithOpenstackNetwork(OpenstackNetwork vNet) { checkNotNull(vNet); Set<Host> hosts = openstackService.ports(vNet.id()).stream() .filter(port -> port.deviceOwner().contains("compute")) .map(port -> hostService.getHostsByMac(port.macAddress()) .stream() .findFirst() .orElse(null)) .collect(Collectors.toSet()); hosts.remove(null); return hosts; } /** * Returns public ip addresses of vSGs running inside a give vSG host. * * @param vSgHost vSG host * @return map of ip and mac address, or empty map */ private Map<IpAddress, MacAddress> getSubscriberGateways(Host vSgHost) { String vPortId = vSgHost.annotations().value(OPENSTACK_PORT_ID); String serviceVlan = vSgHost.annotations().value(S_TAG); OpenstackPort vPort = openstackService.port(vPortId); if (vPort == null) { log.warn("Failed to get OpenStack port {} for VM {}", vPortId, vSgHost.id()); return Maps.newHashMap(); } if (!serviceVlan.equals(getServiceVlan(vPort))) { log.error("Host({}) s-tag does not match with vPort s-tag", vSgHost.id()); return Maps.newHashMap(); } return vPort.allowedAddressPairs(); } /** * Registers static DHCP lease for a given host. * * @param host host * @param service cord service */ private void registerDhcpLease(Host host, CordService service) { List<Ip4Address> options = Lists.newArrayList(); options.add(Ip4Address.makeMaskPrefix(service.serviceIpRange().prefixLength())); options.add(service.serviceIp().getIp4Address()); options.add(service.serviceIp().getIp4Address()); options.add(DEFAULT_DNS); log.debug("Set static DHCP mapping for {}", host.mac()); dhcpService.setStaticMapping(host.mac(), host.ipAddresses().stream().findFirst().get().getIp4Address(), true, options); } /** * Handles VM detected situation. * * @param host host */ private void serviceVmAdded(Host host) { String vNetId = host.annotations().value(SERVICE_ID); if (vNetId == null) { // ignore this host, it is not the service VM, or it's a vSG return; } OpenstackNetwork vNet = openstackService.network(vNetId); if (vNet == null) { log.warn("Failed to get OpenStack network {} for VM {}({}).", vNetId, host.id(), host.annotations().value(OPENSTACK_VM_ID)); return; } log.info("VM {} is detected, MAC: {} IP: {}", host.annotations().value(OPENSTACK_VM_ID), host.mac(), host.ipAddresses().stream().findFirst().get()); CordService service = getCordService(vNet); if (service == null) { return; } if (service.serviceType().equals(CordService.ServiceType.MANAGEMENT)) { ruleInstaller.populateManagementNetworkRules(host, service); } else { // TODO check if the service needs an update on its group buckets after done CORD-433 ruleInstaller.updateServiceGroup(service); arpProxy.addGateway(service.serviceIp(), privateGatewayMac); // sends gratuitous ARP here for the case of adding existing VMs // when ONOS or cordvtn app is restarted arpProxy.sendGratuitousArpForGateway(service.serviceIp(), Sets.newHashSet(host)); } registerDhcpLease(host, service); ruleInstaller.populateBasicConnectionRules(host, getTunnelIp(host), vNet); String serviceVlan = host.annotations().value(S_TAG); if (serviceVlan != null) { log.debug("vSG VM detected {}", host.id()); Map<IpAddress, MacAddress> vSgs = getSubscriberGateways(host); vSgs.entrySet().stream() .forEach(entry -> addVirtualSubscriberGateway( host, entry.getKey(), entry.getValue(), serviceVlan)); ruleInstaller.populateSubscriberGatewayRules(host, vSgs.keySet()); } } /** * Handles VM removed situation. * * @param host host */ private void serviceVmRemoved(Host host) { String vNetId = host.annotations().value(SERVICE_ID); if (vNetId == null) { // ignore it, it's not the service VM or it's a vSG String serviceVlan = host.annotations().value(S_TAG); if (serviceVlan != null) { log.info("vSG {} removed", host.id()); } return; } OpenstackNetwork vNet = openstackService.network(vNetId); if (vNet == null) { log.warn("Failed to get OpenStack network {} for VM {}({}).", vNetId, host.id(), host.annotations().value(OPENSTACK_VM_ID)); return; } log.info("VM {} is vanished, MAC: {} IP: {}", host.annotations().value(OPENSTACK_VM_ID), host.mac(), host.ipAddresses().stream().findFirst().get()); ruleInstaller.removeBasicConnectionRules(host); dhcpService.removeStaticMapping(host.mac()); CordService service = getCordService(vNet); if (service == null) { return; } if (service.serviceType().equals(CordService.ServiceType.MANAGEMENT)) { ruleInstaller.removeManagementNetworkRules(host, service); } else { // TODO check if the service needs an update on its group buckets after done CORD-433 ruleInstaller.updateServiceGroup(service); if (getHostsWithOpenstackNetwork(vNet).isEmpty()) { arpProxy.removeGateway(service.serviceIp()); } } } /** * Sets service network gateway MAC address and sends out gratuitous ARP to all * VMs to update the gateway MAC address. * * @param newMac mac address to update */ private void setPrivateGatewayMac(MacAddress newMac) { if (newMac == null || newMac.equals(privateGatewayMac)) { // no updates, do nothing return; } privateGatewayMac = newMac; log.debug("Set service gateway MAC address to {}", privateGatewayMac.toString()); // TODO get existing service list from XOS and replace the loop below Set<String> vNets = Sets.newHashSet(); hostService.getHosts().forEach(host -> vNets.add(host.annotations().value(SERVICE_ID))); vNets.remove(null); vNets.stream().forEach(vNet -> { CordService service = getCordService(CordServiceId.of(vNet)); if (service != null) { arpProxy.addGateway(service.serviceIp(), privateGatewayMac); arpProxy.sendGratuitousArpForGateway(service.serviceIp(), service.hosts().keySet()); } }); } /** * Sets public gateway MAC address. * * @param publicGateways gateway ip and mac address pairs */ private void setPublicGatewayMac(Map<IpAddress, MacAddress> publicGateways) { publicGateways.entrySet() .stream() .forEach(entry -> { arpProxy.addGateway(entry.getKey(), entry.getValue()); log.debug("Added public gateway IP {}, MAC {}", entry.getKey().toString(), entry.getValue().toString()); }); // TODO notice gateway MAC change to VMs holds this gateway IP } /** * Updates configurations. */ private void readConfiguration() { CordVtnConfig config = configRegistry.getConfig(appId, CordVtnConfig.class); if (config == null) { log.debug("No configuration found"); return; } setPrivateGatewayMac(config.privateGatewayMac()); setPublicGatewayMac(config.publicGateways()); } private class InternalHostListener implements HostListener { @Override public void event(HostEvent event) { Host host = event.subject(); switch (event.type()) { case HOST_ADDED: eventExecutor.submit(() -> serviceVmAdded(host)); break; case HOST_REMOVED: eventExecutor.submit(() -> serviceVmRemoved(host)); break; default: break; } } } private class InternalPacketProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } Ethernet ethPacket = context.inPacket().parsed(); if (ethPacket == null || ethPacket.getEtherType() != Ethernet.TYPE_ARP) { return; } arpProxy.processArpPacket(context, ethPacket); } } private class InternalConfigListener implements NetworkConfigListener { @Override public void event(NetworkConfigEvent event) { if (!event.configClass().equals(CordVtnConfig.class)) { return; } switch (event.type()) { case CONFIG_ADDED: case CONFIG_UPDATED: log.info("Network configuration changed"); eventExecutor.execute(CordVtn.this::readConfiguration); break; default: break; } } } }
Don't proxy ARP with the private gateway MAC address for public network. Change-Id: Ice16a39cc9fe349e22f61131d24f898744a765db
apps/cordvtn/src/main/java/org/onosproject/cordvtn/CordVtn.java
Don't proxy ARP with the private gateway MAC address for public network.
<ide><path>pps/cordvtn/src/main/java/org/onosproject/cordvtn/CordVtn.java <ide> return; <ide> } <ide> <del> if (service.serviceType().equals(CordService.ServiceType.MANAGEMENT)) { <del> ruleInstaller.populateManagementNetworkRules(host, service); <del> } else { <del> // TODO check if the service needs an update on its group buckets after done CORD-433 <del> ruleInstaller.updateServiceGroup(service); <del> arpProxy.addGateway(service.serviceIp(), privateGatewayMac); <del> <del> // sends gratuitous ARP here for the case of adding existing VMs <del> // when ONOS or cordvtn app is restarted <del> arpProxy.sendGratuitousArpForGateway(service.serviceIp(), Sets.newHashSet(host)); <add> switch (service.serviceType()) { <add> case MANAGEMENT: <add> ruleInstaller.populateManagementNetworkRules(host, service); <add> break; <add> case PRIVATE: <add> case PRIVATE_INDIRECT: <add> case PRIVATE_DIRECT: <add> arpProxy.addGateway(service.serviceIp(), privateGatewayMac); <add> case PUBLIC_INDIRECT: <add> case PUBLIC_DIRECT: <add> default: <add> // TODO check if the service needs an update on its group buckets after done CORD-433 <add> ruleInstaller.updateServiceGroup(service); <add> // sends gratuitous ARP here for the case of adding existing VMs <add> // when ONOS or cordvtn app is restarted <add> arpProxy.sendGratuitousArpForGateway(service.serviceIp(), Sets.newHashSet(host)); <add> break; <ide> } <ide> <ide> registerDhcpLease(host, service); <ide> return; <ide> } <ide> <del> if (service.serviceType().equals(CordService.ServiceType.MANAGEMENT)) { <del> ruleInstaller.removeManagementNetworkRules(host, service); <del> } else { <del> // TODO check if the service needs an update on its group buckets after done CORD-433 <del> ruleInstaller.updateServiceGroup(service); <del> <del> if (getHostsWithOpenstackNetwork(vNet).isEmpty()) { <del> arpProxy.removeGateway(service.serviceIp()); <del> } <add> switch (service.serviceType()) { <add> case MANAGEMENT: <add> ruleInstaller.removeManagementNetworkRules(host, service); <add> break; <add> case PRIVATE: <add> case PRIVATE_INDIRECT: <add> case PRIVATE_DIRECT: <add> if (getHostsWithOpenstackNetwork(vNet).isEmpty()) { <add> arpProxy.removeGateway(service.serviceIp()); <add> } <add> case PUBLIC_INDIRECT: <add> case PUBLIC_DIRECT: <add> default: <add> // TODO check if the service needs an update on its group buckets after done CORD-433 <add> ruleInstaller.updateServiceGroup(service); <add> break; <ide> } <ide> } <ide>
Java
apache-2.0
a896e0539ce36225676e279b4907ac899e62afa4
0
ricepanda/rice-git3,ricepanda/rice-git2,ricepanda/rice-git2,kuali/rice-playground,ricepanda/rice-git2,ricepanda/rice-git3,kuali/rice-playground,kuali/rice-playground,ricepanda/rice-git3,kuali/rice-playground,ricepanda/rice-git3,ricepanda/rice-git2
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.web.form; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.uif.UifConstants; import org.kuali.rice.krad.uif.UifConstants.ViewType; import org.kuali.rice.krad.uif.UifParameters; import org.kuali.rice.krad.uif.UifPropertyPaths; import org.kuali.rice.krad.uif.component.Component; import org.kuali.rice.krad.uif.lifecycle.ViewPostMetadata; import org.kuali.rice.krad.uif.service.ViewHelperService; import org.kuali.rice.krad.uif.service.ViewService; import org.kuali.rice.krad.uif.util.SessionTransient; import org.kuali.rice.krad.uif.view.View; import org.kuali.rice.krad.uif.view.ViewModel; import org.kuali.rice.krad.util.KRADUtils; import org.kuali.rice.krad.web.bind.RequestAccessible; import org.springframework.web.multipart.MultipartFile; /** * Base form class for views within the KRAD User Interface Framework. * * <p>Holds properties necessary to determine the {@link org.kuali.rice.krad.uif.view.View} instance that * will be used to render the user interface</p> * * @author Kuali Rice Team ([email protected]) */ public class UifFormBase implements ViewModel { private static final long serialVersionUID = 8432543267099454434L; @RequestAccessible protected String viewId; @RequestAccessible protected String viewName; @RequestAccessible protected ViewType viewTypeName; @RequestAccessible protected String pageId; @RequestAccessible protected String methodToCall; @RequestAccessible protected String formKey; @RequestAccessible @SessionTransient protected String requestedFormKey; @RequestAccessible protected String flowKey; protected String sessionId; protected int sessionTimeoutInterval; @SessionTransient protected HistoryFlow historyFlow; @SessionTransient protected HistoryManager historyManager; @RequestAccessible @SessionTransient protected String jumpToId; @SessionTransient protected String jumpToName; @RequestAccessible @SessionTransient protected String focusId; @RequestAccessible @SessionTransient protected boolean dirtyForm; protected String formPostUrl; protected String controllerMapping; @SessionTransient private String requestUrl; private Map<String, String[]> initialRequestParameters; protected String state; protected List<String> viewsThatNeedDefaultValuesApplied; @RequestAccessible protected boolean renderedInLightBox; @RequestAccessible protected boolean renderedInIframe; @SessionTransient protected String growlScript; @SessionTransient protected View view; protected ViewPostMetadata viewPostMetadata; protected Map<String, String> viewRequestParameters; protected List<String> readOnlyFieldsList; protected Map<String, Object> newCollectionLines; @RequestAccessible @SessionTransient protected String triggerActionId; @RequestAccessible @SessionTransient protected Map<String, String> actionParameters; protected Map<String, Object> clientStateForSyncing; @SessionTransient protected Map<String, Set<String>> selectedCollectionLines; protected Set<String> selectedLookupResultsCache; protected List<Object> addedCollectionItems; @SessionTransient protected MultipartFile attachmentFile; // navigation @RequestAccessible protected String returnLocation; @RequestAccessible protected String returnFormKey; @RequestAccessible @SessionTransient protected boolean ajaxRequest; @RequestAccessible @SessionTransient protected String ajaxReturnType; @SessionTransient private String requestJsonTemplate; @SessionTransient private boolean collectionPagingRequest; // dialog fields @RequestAccessible @SessionTransient protected String returnDialogId; @SessionTransient protected String returnDialogResponse; protected Map<String, String> dialogExplanations; protected Map<String, DialogResponse> dialogResponses; @SessionTransient protected boolean requestRedirected; @RequestAccessible @SessionTransient protected String updateComponentId; @SessionTransient private Component updateComponent; @RequestAccessible protected Map<String, Object> extensionData; protected Map<String, String> queryParameters; public UifFormBase() { renderedInLightBox = false; renderedInIframe = false; requestRedirected = false; readOnlyFieldsList = new ArrayList<String>(); viewRequestParameters = new HashMap<String, String>(); newCollectionLines = new HashMap<String, Object>(); actionParameters = new HashMap<String, String>(); clientStateForSyncing = new HashMap<String, Object>(); selectedCollectionLines = new HashMap<String, Set<String>>(); selectedLookupResultsCache = new HashSet<String>(); addedCollectionItems = new ArrayList<Object>(); dialogExplanations = new HashMap<String, String>(); dialogResponses = new HashMap<String,DialogResponse>(); extensionData = new HashMap<String, Object>(); queryParameters = new HashMap<String, String>(); } /** * {@inheritDoc} */ @Override public void preBind(HttpServletRequest request) { // do nothing - here for framework } /** * {@inheritDoc} */ @Override public void postBind(HttpServletRequest request) { // assign form key if this is a new form or the requested form key is not in session UifFormManager uifFormManager = (UifFormManager) request.getSession().getAttribute(UifParameters.FORM_MANAGER); if (StringUtils.isBlank(formKey) || !uifFormManager.hasSessionForm(formKey)) { formKey = generateFormKey(); } // default form post URL to request URL formPostUrl = request.getRequestURL().toString(); if (request.getSession() != null) { sessionId = request.getSession().getId(); sessionTimeoutInterval = request.getSession().getMaxInactiveInterval(); } //set controller mapping property controllerMapping = request.getPathInfo(); // get any sent client view state and parse into map if (request.getParameterMap().containsKey(UifParameters.CLIENT_VIEW_STATE)) { String clientStateJSON = request.getParameter(UifParameters.CLIENT_VIEW_STATE); if (StringUtils.isNotBlank(clientStateJSON)) { // change single quotes to double quotes (necessary because the reverse was done for sending) clientStateJSON = StringUtils.replace(clientStateJSON, "\\'", "\""); clientStateJSON = StringUtils.replace(clientStateJSON, "\\[", "["); clientStateJSON = StringUtils.replace(clientStateJSON, "\\]", "]"); clientStateJSON = StringUtils.replace(clientStateJSON, "'", "\""); ObjectMapper mapper = new ObjectMapper(); try { clientStateForSyncing = mapper.readValue(clientStateJSON, Map.class); } catch (IOException e) { throw new RuntimeException("Unable to decode client side state JSON: " + clientStateJSON, e); } } } // populate read only fields list if (request.getParameter(UifParameters.READ_ONLY_FIELDS) != null) { String readOnlyFields = request.getParameter(UifParameters.READ_ONLY_FIELDS); setReadOnlyFieldsList(KRADUtils.convertStringParameterToList(readOnlyFields)); } // collect dialog response, or initialize new map of responses if (request.getParameter(UifParameters.RETURN_FROM_DIALOG) != null) { String dialogExplanation = null; if ((dialogExplanations != null) && dialogExplanations.containsKey(returnDialogId)) { dialogExplanation = dialogExplanations.get(returnDialogId); } DialogResponse response = new DialogResponse(returnDialogId, returnDialogResponse, dialogExplanation); this.dialogResponses.put(this.returnDialogId, response); } else { this.dialogResponses = new HashMap<String, DialogResponse>(); } // clean parameters from XSS attacks that will be written out as hiddens this.pageId = KRADUtils.stripXSSPatterns(this.pageId); this.methodToCall = KRADUtils.stripXSSPatterns(this.methodToCall); this.formKey = KRADUtils.stripXSSPatterns(this.formKey); this.requestedFormKey = KRADUtils.stripXSSPatterns(this.requestedFormKey); this.flowKey = KRADUtils.stripXSSPatterns(this.flowKey); this.sessionId = KRADUtils.stripXSSPatterns(this.sessionId); this.formPostUrl = KRADUtils.stripXSSPatterns(this.formPostUrl); this.returnLocation = KRADUtils.stripXSSPatterns(this.returnLocation); this.returnFormKey = KRADUtils.stripXSSPatterns(this.returnFormKey); this.requestUrl = KRADUtils.stripXSSPatterns(this.requestUrl); } /** * {@inheritDoc} */ @Override public void preRender(HttpServletRequest request) { // clear dialog properties so previous values do not appear for new dialogs this.returnDialogId = null; this.returnDialogResponse = null; this.dialogExplanations = new HashMap<String, String>(); } /** * Creates the unique id used to store this "conversation" in the session. * The default method generates a java UUID. * * @return UUID */ protected String generateFormKey() { return UUID.randomUUID().toString(); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewId() */ @Override public String getViewId() { return this.viewId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewId(String) */ @Override public void setViewId(String viewId) { this.viewId = viewId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewName() */ @Override public String getViewName() { return this.viewName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewName(String) */ @Override public void setViewName(String viewName) { this.viewName = viewName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewTypeName() */ @Override public ViewType getViewTypeName() { return this.viewTypeName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewTypeName(org.kuali.rice.krad.uif.UifConstants.ViewType) */ @Override public void setViewTypeName(ViewType viewTypeName) { this.viewTypeName = viewTypeName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getPageId() */ @Override public String getPageId() { return this.pageId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setPageId(String) */ @Override public void setPageId(String pageId) { this.pageId = pageId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getFormPostUrl() */ @Override public String getFormPostUrl() { return this.formPostUrl; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setFormPostUrl(String) */ @Override public void setFormPostUrl(String formPostUrl) { this.formPostUrl = formPostUrl; } /** * Name of the controllerMapping for this form (includes slash) * * @return the controllerMapping string */ public String getControllerMapping() { return controllerMapping; } /** * The current {@link HistoryFlow} for this form which stores a trail of urls/breadcrumbs primarily used for * path-based breadcrumb display * * @return the {@link HistoryFlow} */ public HistoryFlow getHistoryFlow() { return historyFlow; } /** * Set the current HistoryFlow for this form * * @param historyFlow */ public void setHistoryFlow(HistoryFlow historyFlow) { this.historyFlow = historyFlow; } /** * The current {@link HistoryManager} that was pulled from session which store all {@link HistoryFlow} objects in * the current session to keep track of the path the user has taken across views (primarily used by path-based * breadcrumbs) * * @return the HistoryManager */ public HistoryManager getHistoryManager() { return historyManager; } /** * Set the current HistoryManager * * @param historyManager */ public void setHistoryManager(HistoryManager historyManager) { this.historyManager = historyManager; } /** * The flowKey representing the HistoryFlow this form may be in. * * <p>This allows for a flow to continue by key or start (if set to "start"). * If null or blank, no flow (or path based * breadcrumbs) are being tracked.</p> * * @return the flowKey */ public String getFlowKey() { return flowKey; } /** * Set the flowKey * * @param flowKey */ public void setFlowKey(String flowKey) { this.flowKey = flowKey; } /** * The original requestUrl for the View represented by this form (url received by the controller for initial * request) * * @return the requestUrl */ public String getRequestUrl() { return requestUrl; } /** * Set the requestUrl * * @param requestUrl */ public void setRequestUrl(String requestUrl) { this.requestUrl = requestUrl; } /** * The requestParameters represent all the parameters in the query string that were initially passed to this View * by the initial request * * @return the requestParameters */ public Map<String, String[]> getInitialRequestParameters() { return initialRequestParameters; } /** * Set the requestParameters * * @param requestParameters */ public void setInitialRequestParameters(Map<String, String[]> requestParameters) { this.initialRequestParameters = requestParameters; } public String getReturnLocation() { return this.returnLocation; } public void setReturnLocation(String returnLocation) { this.returnLocation = returnLocation; } public String getReturnFormKey() { return this.returnFormKey; } public void setReturnFormKey(String returnFormKey) { this.returnFormKey = returnFormKey; } /** * Holds the id for the user's current session * * <p> * The user's session id is used to track when a timeout has occurred and enforce the policy * configured with the {@link org.kuali.rice.krad.uif.view.ViewSessionPolicy}. This property gets initialized * in the {@link #postBind(javax.servlet.http.HttpServletRequest)} method and then is written out as a * hidden on the view. Therefore each post done on the view will send back the session id when the view was * rendering, and the {@link org.kuali.rice.krad.web.filter.UifSessionTimeoutFilter} can use that to determine * if a timeout has occurred * </p> * * @return id for the user's current session */ public String getSessionId() { return sessionId; } /** * Holds the configured session timeout interval * * <p> * Holds the session timeout interval so it can be referenced to give the user notifications (for example the * session timeout warning reads this property). This is initialized from the session object in * {@link #postBind(javax.servlet.http.HttpServletRequest)} * </p> * * @return amount of time in milliseconds before the session will timeout */ public int getSessionTimeoutInterval() { return sessionTimeoutInterval; } /** * Identifies the controller method that should be invoked to fulfill a * request. The value will be matched up against the 'params' setting on the * {@code RequestMapping} annotation for the controller method * * @return String method to call */ public String getMethodToCall() { return this.methodToCall; } /** * Setter for the method to call * * @param methodToCall */ public void setMethodToCall(String methodToCall) { this.methodToCall = methodToCall; } /** * {@inheritDoc} */ @Override public Map<String, String> getViewRequestParameters() { return this.viewRequestParameters; } /** * {@inheritDoc} */ @Override public void setViewRequestParameters(Map<String, String> viewRequestParameters) { this.viewRequestParameters = viewRequestParameters; } /** * {@inheritDoc} */ @Override public List<String> getReadOnlyFieldsList() { return readOnlyFieldsList; } /** * {@inheritDoc} */ @Override public void setReadOnlyFieldsList(List<String> readOnlyFieldsList) { this.readOnlyFieldsList = readOnlyFieldsList; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getNewCollectionLines() */ @Override public Map<String, Object> getNewCollectionLines() { return this.newCollectionLines; } /** * {@inheritDoc} */ @Override public void setNewCollectionLines(Map<String, Object> newCollectionLines) { this.newCollectionLines = newCollectionLines; } /** * {@inheritDoc} */ @Override public String getTriggerActionId() { return triggerActionId; } /** * {@inheritDoc} */ @Override public void setTriggerActionId(String triggerActionId) { this.triggerActionId = triggerActionId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getActionParameters() */ @Override public Map<String, String> getActionParameters() { return this.actionParameters; } /** * Returns the action parameters map as a {@code Properties} instance * * @return Properties action parameters */ public Properties getActionParametersAsProperties() { return KRADUtils.convertMapToProperties(actionParameters); } /** * {@inheritDoc} */ @Override public void setActionParameters(Map<String, String> actionParameters) { this.actionParameters = actionParameters; } /** * Retrieves the value for the given action parameter, or empty string if * not found * * @param actionParameterName - name of the action parameter to retrieve value for * @return String parameter value or empty string */ public String getActionParamaterValue(String actionParameterName) { if ((actionParameters != null) && actionParameters.containsKey(actionParameterName)) { return actionParameters.get(actionParameterName); } return ""; } /** * Returns the action event that was sent in the action parameters (if any) * * <p> * The action event is a special action parameter that can be sent to indicate a type of action being taken. This * can be looked at by the view or components to render differently * </p> * * TODO: make sure action parameters are getting reinitialized on each request * * @return String action event name or blank if action event was not sent */ public String getActionEvent() { if ((actionParameters != null) && actionParameters.containsKey(UifConstants.UrlParams.ACTION_EVENT)) { return actionParameters.get(UifConstants.UrlParams.ACTION_EVENT); } return ""; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getClientStateForSyncing() */ @Override public Map<String, Object> getClientStateForSyncing() { return clientStateForSyncing; } /** * Setter for the client state * * @param clientStateForSyncing */ public void setClientStateForSyncing(Map<String, Object> clientStateForSyncing) { this.clientStateForSyncing = clientStateForSyncing; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getSelectedCollectionLines() */ @Override public Map<String, Set<String>> getSelectedCollectionLines() { return selectedCollectionLines; } /** * {@inheritDoc} */ @Override public void setSelectedCollectionLines(Map<String, Set<String>> selectedCollectionLines) { this.selectedCollectionLines = selectedCollectionLines; } /** * Holds Set of String identifiers for lines that were selected in a lookup collection results * across multiple pages. * The value in the cache is preserved in the session across multiple requests. This allows for the * server side paging of results to retain the user choices as they move through the pages. * * @return set of identifiers */ public Set<String> getSelectedLookupResultsCache() { return selectedLookupResultsCache; } /** * Sets the lookup result selection cache values * * @param selectedLookupResultsCache */ public void setSelectedLookupResultsCache(Set<String> selectedLookupResultsCache) { this.selectedLookupResultsCache = selectedLookupResultsCache; } /** * Key string that identifies the form instance in session storage * * <p> * When the view is posted, the previous form instance is retrieved and then * populated from the request parameters. This key string is retrieve the * session form from the session service * </p> * * @return String form session key */ public String getFormKey() { return this.formKey; } /** * Setter for the form's session key * * @param formKey */ public void setFormKey(String formKey) { this.formKey = formKey; } /** * This is the formKey sent on the original request. It may differ from the actual form key stored in formKey * based on if the form still exists in session by this key or not. * * @return the original requested form key */ public String getRequestedFormKey() { return requestedFormKey; } /** * Set the requestedFormKey * * @param requestedFormKey */ public void setRequestedFormKey(String requestedFormKey) { this.requestedFormKey = requestedFormKey; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewsThatNeedDefaultValuesApplied() */ @Override public List<String> getViewsThatNeedDefaultValuesApplied() { if(viewsThatNeedDefaultValuesApplied == null) { viewsThatNeedDefaultValuesApplied = new ArrayList<String>(); } return viewsThatNeedDefaultValuesApplied; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setgetViewsThatNeedDefaultValuesApplied(List<String>) */ @Override public void setViewsThatNeedDefaultValuesApplied(List<String> viewsThatNeedDefaultValuesApplied) { this.viewsThatNeedDefaultValuesApplied = viewsThatNeedDefaultValuesApplied; } /** * Adds unique view id to list of views that need default values applied. * * @param viewid */ public void addViewThatNeedsDefaultValuesApplied(String viewId) { if(!getViewsThatNeedDefaultValuesApplied().contains(viewId)) { viewsThatNeedDefaultValuesApplied.add(viewId); } } /** * Indicates whether a redirect has been requested for the view * * @return boolean true if redirect was requested, false if not */ public boolean isRequestRedirected() { return requestRedirected; } /** * Setter for the request redirect indicator * * @param requestRedirected */ public void setRequestRedirected(boolean requestRedirected) { this.requestRedirected = requestRedirected; } /** * Holder for files that are attached through the view * * @return MultipartFile representing the attachment */ public MultipartFile getAttachmentFile() { return this.attachmentFile; } /** * Setter for the form's attachment file * * @param attachmentFile */ public void setAttachmentFile(MultipartFile attachmentFile) { this.attachmentFile = attachmentFile; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getUpdateComponentId() */ @Override public String getUpdateComponentId() { return updateComponentId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setUpdateComponentId(java.lang.String) */ @Override public void setUpdateComponentId(String updateComponentId) { this.updateComponentId = updateComponentId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getUpdateComponent() */ public Component getUpdateComponent() { return updateComponent; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setUpdateComponent(org.kuali.rice.krad.uif.component.Component) */ public void setUpdateComponent(Component updateComponent) { this.updateComponent = updateComponent; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getView() */ @Override public View getView() { return this.view; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setView(org.kuali.rice.krad.uif.view.View) */ @Override public void setView(View view) { this.view = view; } /** * Returns an instance of the view's configured view helper service. * * <p>First checks if there is an initialized view containing a view helper instance. If not, and there is * a view id on the form, a call is made to retrieve the view helper instance or class configuration.</p> * * {@inheritDoc} */ @Override public ViewHelperService getViewHelperService() { if ((getView() != null) && (getView().getViewHelperService() != null)) { return getView().getViewHelperService(); } String viewId = getViewId(); if (StringUtils.isBlank(viewId) && (getView() != null)) { viewId = getView().getId(); } if (StringUtils.isBlank(viewId)) { return null; } ViewHelperService viewHelperService = (ViewHelperService) KRADServiceLocatorWeb.getDataDictionaryService().getDictionaryBeanProperty(viewId, UifPropertyPaths.VIEW_HELPER_SERVICE); if (viewHelperService == null) { Class<?> viewHelperServiceClass = (Class<?>) KRADServiceLocatorWeb.getDataDictionaryService().getDictionaryBeanProperty(viewId, UifPropertyPaths.VIEW_HELPER_SERVICE_CLASS); if (viewHelperServiceClass != null) { try { viewHelperService = (ViewHelperService) viewHelperServiceClass.newInstance(); } catch (Exception e) { throw new RuntimeException("Unable to instantiate view helper class: " + viewHelperServiceClass, e); } } } return viewHelperService; } /** * {@inheritDoc} */ @Override public ViewPostMetadata getViewPostMetadata() { return viewPostMetadata; } /** * @see UifFormBase#getViewPostMetadata() */ @Override public void setViewPostMetadata(ViewPostMetadata viewPostMetadata) { this.viewPostMetadata = viewPostMetadata; } /** * Instance of the {@code ViewService} that can be used to retrieve * {@code View} instances * * @return ViewService implementation */ protected ViewService getViewService() { return KRADServiceLocatorWeb.getViewService(); } /** * The jumpToId for this form, the element with this id will be jumped to automatically * when the form is loaded in the view. * Using "TOP" or "BOTTOM" will jump to the top or the bottom of the resulting page. * jumpToId always takes precedence over jumpToName, if set. * * @return the jumpToId */ public String getJumpToId() { return this.jumpToId; } /** * @param jumpToId the jumpToId to set */ public void setJumpToId(String jumpToId) { this.jumpToId = jumpToId; } /** * The jumpToName for this form, the element with this name will be jumped to automatically * when the form is loaded in the view. * WARNING: jumpToId always takes precedence over jumpToName, if set. * * @return the jumpToName */ public String getJumpToName() { return this.jumpToName; } /** * @param jumpToName the jumpToName to set */ public void setJumpToName(String jumpToName) { this.jumpToName = jumpToName; } /** * Field to place focus on when the page loads * An empty focusId will result in focusing on the first visible input element by default. * * @return the focusId */ public String getFocusId() { return this.focusId; } /** * @param focusId the focusId to set */ public void setFocusId(String focusId) { this.focusId = focusId; } /** * True when the form is considered dirty (data has changed from original value), false otherwise * * <p>For most scenarios, this flag should NOT be set to true. * If this is set, it must be managed explicitly by the application. This flag exists for marking a * form dirty from a server call, so it must be changed to false when the form is no longer considered dirty. * The krad save Action and navigate methodToCall resets this flag back to false, but any other setting of * this flag must be managed by custom configuration/methods, if custom dirtyForm management is needed.</p> * * @return true if the form is considered dirty, false otherwise */ public boolean isDirtyForm() { return dirtyForm; } /** * Sets the dirtyForm flag * * <p>For most scenarios, this flag should NOT be set to true. * If this is set, it must be managed explicitly by the application. This flag exists for marking a * form dirty from a server call, so it must be changed to false when the form is no longer considered dirty. * The krad save Action and navigate methodToCall resets this flag back to false, but any other setting of * this flag must be managed by custom configuration/methods, if custom dirtyForm management is needed.</p> * * @param dirtyForm */ public void setDirtyForm(boolean dirtyForm) { this.dirtyForm = dirtyForm; } /** * Set the dirtyForm flag using a String that will be converted to boolean * * @param dirtyForm */ public void setDirtyForm(String dirtyForm) { if(dirtyForm != null){ this.dirtyForm = Boolean.parseBoolean(dirtyForm); } } /** * Indicates whether the view is rendered within a lightbox * * <p> * Some discussion (for example how a close button behaves) need to change based on whether the * view is rendered within a lightbox or the standard browser window. This boolean is true when it is * within a lightbox * </p> * * @return boolean true if view is rendered within a lightbox, false if not */ public boolean isRenderedInLightBox() { return this.renderedInLightBox; } /** * Setter for the rendered within lightbox indicator * * @param renderedInLightBox */ public void setRenderedInLightBox(boolean renderedInLightBox) { this.renderedInLightBox = renderedInLightBox; } /** * Indicates whether the view is rendered within an iframe (this setting must be passed to the View on the url) * * @return boolean true if view is rendered within a iframe, false if not */ public boolean isRenderedInIframe() { return renderedInIframe; } /** * @see org.kuali.rice.krad.web.form.UifFormBase#isRenderedInIframe() */ public void setRenderedInIframe(boolean renderedInIframe) { this.renderedInIframe = renderedInIframe; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getGrowlScript() */ @Override public String getGrowlScript() { return growlScript; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setGrowlScript(String) */ @Override public void setGrowlScript(String growlScript) { this.growlScript = growlScript; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getState() */ @Override public String getState() { return state; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setState(String) */ @Override public void setState(String state) { this.state = state; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isAjaxRequest() */ @Override public boolean isAjaxRequest() { return ajaxRequest; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setAjaxRequest(boolean) */ @Override public void setAjaxRequest(boolean ajaxRequest) { this.ajaxRequest = ajaxRequest; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getAjaxReturnType() */ @Override public String getAjaxReturnType() { return ajaxReturnType; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setAjaxReturnType(String) */ @Override public void setAjaxReturnType(String ajaxReturnType) { this.ajaxReturnType = ajaxReturnType; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateComponentRequest() */ @Override public boolean isUpdateComponentRequest() { return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATECOMPONENT.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateDialogRequest() */ @Override public boolean isUpdateDialogRequest() { return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATEDIALOG.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdatePageRequest() */ @Override public boolean isUpdatePageRequest() { return StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATEPAGE.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateNoneRequest() */ @Override public boolean isUpdateNoneRequest() { //return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( // UifConstants.AjaxReturnTypes.UPDATENONE.getKey()); return StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATENONE.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isJsonRequest() */ @Override public boolean isJsonRequest() { return StringUtils.isNotBlank(getRequestJsonTemplate()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getRequestJsonTemplate() */ @Override public String getRequestJsonTemplate() { return requestJsonTemplate; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setRequestJsonTemplate */ @Override public void setRequestJsonTemplate(String requestJsonTemplate) { this.requestJsonTemplate = requestJsonTemplate; } /** * {@inheritDoc} */ @Override public boolean isCollectionPagingRequest() { return collectionPagingRequest; } /** * {@inheritDoc} */ @Override public void setCollectionPagingRequest(boolean collectionPagingRequest) { this.collectionPagingRequest = collectionPagingRequest; } /** * Used by the dialog framework to set the dialog id for a return dialog call (when the server has * triggered a dialog). * * <p>Note this is a request only property. On a return call the value for this gets pulled and used to * create an entry in {@link UifFormBase#getDialogResponses()}</p> * * @return String id for the dialog being returned from */ public String getReturnDialogId() { return returnDialogId; } /** * @see UifFormBase#getReturnDialogId() */ public void setReturnDialogId(String returnDialogId) { this.returnDialogId = returnDialogId; } /** * Used by the dialog framework to set the dialog response for a return dialog call (when the server has * triggered a dialog). * * <p>Note this is a request only property. On a return call the value for this gets pulled and used to * create an entry in {@link UifFormBase#getDialogResponses()}</p> * * @return String response for the dialog being returned from */ public String getReturnDialogResponse() { return returnDialogResponse; } /** * @see UifFormBase#getReturnDialogResponse() */ public void setReturnDialogResponse(String returnDialogResponse) { this.returnDialogResponse = returnDialogResponse; } /** * {@inheritDoc} */ @Override public Map<String, String> getDialogExplanations() { return dialogExplanations; } /** * {@inheritDoc} */ @Override public void setDialogExplanations(Map<String, String> dialogExplanations) { this.dialogExplanations = dialogExplanations; } /** * {@inheritDoc} */ @Override public Map<String, DialogResponse> getDialogResponses() { return dialogResponses; } /** * {@inheritDoc} */ @Override public DialogResponse getDialogResponse(String dialogId) { if ((dialogResponses != null) && dialogResponses.containsKey(dialogId)) { return dialogResponses.get(dialogId); } return null; } /** * {@inheritDoc} */ @Override public void setDialogResponses(Map<String, DialogResponse> dialogResponses) { this.dialogResponses = dialogResponses; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getExtensionData() */ @Override public Map<String, Object> getExtensionData() { return extensionData; } /** * {@inheritDoc} */ @Override public void setExtensionData(Map<String, Object> extensionData) { this.extensionData = extensionData; } /** * A generic map for query parameters * * @return Map<String, String> */ public Map<String, String> getQueryParameters() { return queryParameters; } /** * Setter for the generic query parameters * * @param queryParameters */ public void setQueryParameters(Map<String, String> queryParameters) { this.queryParameters = queryParameters; } /** * The {@code List} that contains all newly added items for the collections on the model * * <p> * This list contains the new items for all the collections on the model. * </p> * * @return List of the newly added item lists */ public List getAddedCollectionItems() { return addedCollectionItems; } /** * Setter for the newly added item list * * @param addedCollectionItems */ public void setAddedCollectionItems(List addedCollectionItems) { this.addedCollectionItems = addedCollectionItems; } /** * Indicates whether an collection item has been newly added * * <p> * Tests collection items against the list of newly added items on the model. This list gets cleared when the view * is submitted and the items are persisted. * </p> * * @param item - the item to test against list of newly added items * @return boolean true if the item has been newly added */ public boolean isAddedCollectionItem(Object item) { return addedCollectionItems.contains(item); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append( getClass().getSimpleName() ).append(" [viewId=").append(this.viewId).append(", viewName=").append(this.viewName) .append(", viewTypeName=").append(this.viewTypeName).append(", pageId=").append(this.pageId) .append(", methodToCall=").append(this.methodToCall).append(", formKey=").append(this.formKey) .append(", requestedFormKey=").append(this.requestedFormKey).append("]"); return builder.toString(); } }
rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/web/form/UifFormBase.java
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.web.form; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.uif.UifConstants; import org.kuali.rice.krad.uif.UifConstants.ViewType; import org.kuali.rice.krad.uif.UifParameters; import org.kuali.rice.krad.uif.UifPropertyPaths; import org.kuali.rice.krad.uif.component.Component; import org.kuali.rice.krad.uif.lifecycle.ViewPostMetadata; import org.kuali.rice.krad.uif.service.ViewHelperService; import org.kuali.rice.krad.uif.service.ViewService; import org.kuali.rice.krad.uif.util.SessionTransient; import org.kuali.rice.krad.uif.view.View; import org.kuali.rice.krad.uif.view.ViewModel; import org.kuali.rice.krad.util.KRADUtils; import org.kuali.rice.krad.web.bind.RequestAccessible; import org.springframework.web.multipart.MultipartFile; /** * Base form class for views within the KRAD User Interface Framework. * * <p>Holds properties necessary to determine the {@link org.kuali.rice.krad.uif.view.View} instance that * will be used to render the user interface</p> * * @author Kuali Rice Team ([email protected]) */ public class UifFormBase implements ViewModel { private static final long serialVersionUID = 8432543267099454434L; @RequestAccessible protected String viewId; @RequestAccessible protected String viewName; @RequestAccessible protected ViewType viewTypeName; @RequestAccessible protected String pageId; @RequestAccessible protected String methodToCall; @RequestAccessible protected String formKey; @RequestAccessible @SessionTransient protected String requestedFormKey; @RequestAccessible protected String flowKey; protected String sessionId; protected int sessionTimeoutInterval; @SessionTransient protected HistoryFlow historyFlow; @SessionTransient protected HistoryManager historyManager; @RequestAccessible @SessionTransient protected String jumpToId; @SessionTransient protected String jumpToName; @RequestAccessible @SessionTransient protected String focusId; @RequestAccessible @SessionTransient protected boolean dirtyForm; protected String formPostUrl; protected String controllerMapping; @SessionTransient private String requestUrl; private Map<String, String[]> initialRequestParameters; protected String state; protected List<String> viewsThatNeedDefaultValuesApplied; @RequestAccessible protected boolean renderedInLightBox; @RequestAccessible protected boolean renderedInIframe; @SessionTransient protected String growlScript; @SessionTransient protected View view; protected ViewPostMetadata viewPostMetadata; protected Map<String, String> viewRequestParameters; protected List<String> readOnlyFieldsList; protected Map<String, Object> newCollectionLines; @RequestAccessible @SessionTransient protected String triggerActionId; @RequestAccessible @SessionTransient protected Map<String, String> actionParameters; protected Map<String, Object> clientStateForSyncing; @SessionTransient protected Map<String, Set<String>> selectedCollectionLines; protected Set<String> selectedLookupResultsCache; protected List<Object> addedCollectionItems; @SessionTransient protected MultipartFile attachmentFile; // navigation @RequestAccessible protected String returnLocation; @RequestAccessible protected String returnFormKey; @RequestAccessible @SessionTransient protected boolean ajaxRequest; @RequestAccessible @SessionTransient protected String ajaxReturnType; @SessionTransient private String requestJsonTemplate; @SessionTransient private boolean collectionPagingRequest; // dialog fields @RequestAccessible @SessionTransient protected String returnDialogId; @SessionTransient protected String returnDialogResponse; protected Map<String, String> dialogExplanations; protected Map<String, DialogResponse> dialogResponses; @SessionTransient protected boolean requestRedirected; @RequestAccessible @SessionTransient protected String updateComponentId; @SessionTransient private Component updateComponent; @RequestAccessible protected Map<String, Object> extensionData; protected Map<String, String> queryParameters; public UifFormBase() { renderedInLightBox = false; renderedInIframe = false; requestRedirected = false; readOnlyFieldsList = new ArrayList<String>(); viewRequestParameters = new HashMap<String, String>(); newCollectionLines = new HashMap<String, Object>(); actionParameters = new HashMap<String, String>(); clientStateForSyncing = new HashMap<String, Object>(); selectedCollectionLines = new HashMap<String, Set<String>>(); selectedLookupResultsCache = new HashSet<String>(); addedCollectionItems = new ArrayList<Object>(); dialogExplanations = new HashMap<String, String>(); dialogResponses = new HashMap<String,DialogResponse>(); extensionData = new HashMap<String, Object>(); queryParameters = new HashMap<String, String>(); } /** * {@inheritDoc} */ @Override public void preBind(HttpServletRequest request) { // do nothing - here for framework } /** * {@inheritDoc} */ @Override public void postBind(HttpServletRequest request) { // assign form key if this is a new form or the requested form key is not in session UifFormManager uifFormManager = (UifFormManager) request.getSession().getAttribute(UifParameters.FORM_MANAGER); if (StringUtils.isBlank(formKey) || !uifFormManager.hasSessionForm(formKey)) { formKey = generateFormKey(); } // default form post URL to request URL formPostUrl = request.getRequestURL().toString(); if (request.getSession() != null) { sessionId = request.getSession().getId(); sessionTimeoutInterval = request.getSession().getMaxInactiveInterval(); } //set controller mapping property controllerMapping = request.getPathInfo(); // get any sent client view state and parse into map if (request.getParameterMap().containsKey(UifParameters.CLIENT_VIEW_STATE)) { String clientStateJSON = request.getParameter(UifParameters.CLIENT_VIEW_STATE); if (StringUtils.isNotBlank(clientStateJSON)) { // change single quotes to double quotes (necessary because the reverse was done for sending) clientStateJSON = StringUtils.replace(clientStateJSON, "'", "\""); ObjectMapper mapper = new ObjectMapper(); try { clientStateForSyncing = mapper.readValue(clientStateJSON, Map.class); } catch (IOException e) { throw new RuntimeException("Unable to decode client side state JSON: " + clientStateJSON, e); } } } // populate read only fields list if (request.getParameter(UifParameters.READ_ONLY_FIELDS) != null) { String readOnlyFields = request.getParameter(UifParameters.READ_ONLY_FIELDS); setReadOnlyFieldsList(KRADUtils.convertStringParameterToList(readOnlyFields)); } // collect dialog response, or initialize new map of responses if (request.getParameter(UifParameters.RETURN_FROM_DIALOG) != null) { String dialogExplanation = null; if ((dialogExplanations != null) && dialogExplanations.containsKey(returnDialogId)) { dialogExplanation = dialogExplanations.get(returnDialogId); } DialogResponse response = new DialogResponse(returnDialogId, returnDialogResponse, dialogExplanation); this.dialogResponses.put(this.returnDialogId, response); } else { this.dialogResponses = new HashMap<String, DialogResponse>(); } // clean parameters from XSS attacks that will be written out as hiddens this.pageId = KRADUtils.stripXSSPatterns(this.pageId); this.methodToCall = KRADUtils.stripXSSPatterns(this.methodToCall); this.formKey = KRADUtils.stripXSSPatterns(this.formKey); this.requestedFormKey = KRADUtils.stripXSSPatterns(this.requestedFormKey); this.flowKey = KRADUtils.stripXSSPatterns(this.flowKey); this.sessionId = KRADUtils.stripXSSPatterns(this.sessionId); this.formPostUrl = KRADUtils.stripXSSPatterns(this.formPostUrl); this.returnLocation = KRADUtils.stripXSSPatterns(this.returnLocation); this.returnFormKey = KRADUtils.stripXSSPatterns(this.returnFormKey); this.requestUrl = KRADUtils.stripXSSPatterns(this.requestUrl); } /** * {@inheritDoc} */ @Override public void preRender(HttpServletRequest request) { // clear dialog properties so previous values do not appear for new dialogs this.returnDialogId = null; this.returnDialogResponse = null; this.dialogExplanations = new HashMap<String, String>(); } /** * Creates the unique id used to store this "conversation" in the session. * The default method generates a java UUID. * * @return UUID */ protected String generateFormKey() { return UUID.randomUUID().toString(); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewId() */ @Override public String getViewId() { return this.viewId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewId(String) */ @Override public void setViewId(String viewId) { this.viewId = viewId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewName() */ @Override public String getViewName() { return this.viewName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewName(String) */ @Override public void setViewName(String viewName) { this.viewName = viewName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewTypeName() */ @Override public ViewType getViewTypeName() { return this.viewTypeName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setViewTypeName(org.kuali.rice.krad.uif.UifConstants.ViewType) */ @Override public void setViewTypeName(ViewType viewTypeName) { this.viewTypeName = viewTypeName; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getPageId() */ @Override public String getPageId() { return this.pageId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setPageId(String) */ @Override public void setPageId(String pageId) { this.pageId = pageId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getFormPostUrl() */ @Override public String getFormPostUrl() { return this.formPostUrl; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setFormPostUrl(String) */ @Override public void setFormPostUrl(String formPostUrl) { this.formPostUrl = formPostUrl; } /** * Name of the controllerMapping for this form (includes slash) * * @return the controllerMapping string */ public String getControllerMapping() { return controllerMapping; } /** * The current {@link HistoryFlow} for this form which stores a trail of urls/breadcrumbs primarily used for * path-based breadcrumb display * * @return the {@link HistoryFlow} */ public HistoryFlow getHistoryFlow() { return historyFlow; } /** * Set the current HistoryFlow for this form * * @param historyFlow */ public void setHistoryFlow(HistoryFlow historyFlow) { this.historyFlow = historyFlow; } /** * The current {@link HistoryManager} that was pulled from session which store all {@link HistoryFlow} objects in * the current session to keep track of the path the user has taken across views (primarily used by path-based * breadcrumbs) * * @return the HistoryManager */ public HistoryManager getHistoryManager() { return historyManager; } /** * Set the current HistoryManager * * @param historyManager */ public void setHistoryManager(HistoryManager historyManager) { this.historyManager = historyManager; } /** * The flowKey representing the HistoryFlow this form may be in. * * <p>This allows for a flow to continue by key or start (if set to "start"). * If null or blank, no flow (or path based * breadcrumbs) are being tracked.</p> * * @return the flowKey */ public String getFlowKey() { return flowKey; } /** * Set the flowKey * * @param flowKey */ public void setFlowKey(String flowKey) { this.flowKey = flowKey; } /** * The original requestUrl for the View represented by this form (url received by the controller for initial * request) * * @return the requestUrl */ public String getRequestUrl() { return requestUrl; } /** * Set the requestUrl * * @param requestUrl */ public void setRequestUrl(String requestUrl) { this.requestUrl = requestUrl; } /** * The requestParameters represent all the parameters in the query string that were initially passed to this View * by the initial request * * @return the requestParameters */ public Map<String, String[]> getInitialRequestParameters() { return initialRequestParameters; } /** * Set the requestParameters * * @param requestParameters */ public void setInitialRequestParameters(Map<String, String[]> requestParameters) { this.initialRequestParameters = requestParameters; } public String getReturnLocation() { return this.returnLocation; } public void setReturnLocation(String returnLocation) { this.returnLocation = returnLocation; } public String getReturnFormKey() { return this.returnFormKey; } public void setReturnFormKey(String returnFormKey) { this.returnFormKey = returnFormKey; } /** * Holds the id for the user's current session * * <p> * The user's session id is used to track when a timeout has occurred and enforce the policy * configured with the {@link org.kuali.rice.krad.uif.view.ViewSessionPolicy}. This property gets initialized * in the {@link #postBind(javax.servlet.http.HttpServletRequest)} method and then is written out as a * hidden on the view. Therefore each post done on the view will send back the session id when the view was * rendering, and the {@link org.kuali.rice.krad.web.filter.UifSessionTimeoutFilter} can use that to determine * if a timeout has occurred * </p> * * @return id for the user's current session */ public String getSessionId() { return sessionId; } /** * Holds the configured session timeout interval * * <p> * Holds the session timeout interval so it can be referenced to give the user notifications (for example the * session timeout warning reads this property). This is initialized from the session object in * {@link #postBind(javax.servlet.http.HttpServletRequest)} * </p> * * @return amount of time in milliseconds before the session will timeout */ public int getSessionTimeoutInterval() { return sessionTimeoutInterval; } /** * Identifies the controller method that should be invoked to fulfill a * request. The value will be matched up against the 'params' setting on the * {@code RequestMapping} annotation for the controller method * * @return String method to call */ public String getMethodToCall() { return this.methodToCall; } /** * Setter for the method to call * * @param methodToCall */ public void setMethodToCall(String methodToCall) { this.methodToCall = methodToCall; } /** * {@inheritDoc} */ @Override public Map<String, String> getViewRequestParameters() { return this.viewRequestParameters; } /** * {@inheritDoc} */ @Override public void setViewRequestParameters(Map<String, String> viewRequestParameters) { this.viewRequestParameters = viewRequestParameters; } /** * {@inheritDoc} */ @Override public List<String> getReadOnlyFieldsList() { return readOnlyFieldsList; } /** * {@inheritDoc} */ @Override public void setReadOnlyFieldsList(List<String> readOnlyFieldsList) { this.readOnlyFieldsList = readOnlyFieldsList; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getNewCollectionLines() */ @Override public Map<String, Object> getNewCollectionLines() { return this.newCollectionLines; } /** * {@inheritDoc} */ @Override public void setNewCollectionLines(Map<String, Object> newCollectionLines) { this.newCollectionLines = newCollectionLines; } /** * {@inheritDoc} */ @Override public String getTriggerActionId() { return triggerActionId; } /** * {@inheritDoc} */ @Override public void setTriggerActionId(String triggerActionId) { this.triggerActionId = triggerActionId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getActionParameters() */ @Override public Map<String, String> getActionParameters() { return this.actionParameters; } /** * Returns the action parameters map as a {@code Properties} instance * * @return Properties action parameters */ public Properties getActionParametersAsProperties() { return KRADUtils.convertMapToProperties(actionParameters); } /** * {@inheritDoc} */ @Override public void setActionParameters(Map<String, String> actionParameters) { this.actionParameters = actionParameters; } /** * Retrieves the value for the given action parameter, or empty string if * not found * * @param actionParameterName - name of the action parameter to retrieve value for * @return String parameter value or empty string */ public String getActionParamaterValue(String actionParameterName) { if ((actionParameters != null) && actionParameters.containsKey(actionParameterName)) { return actionParameters.get(actionParameterName); } return ""; } /** * Returns the action event that was sent in the action parameters (if any) * * <p> * The action event is a special action parameter that can be sent to indicate a type of action being taken. This * can be looked at by the view or components to render differently * </p> * * TODO: make sure action parameters are getting reinitialized on each request * * @return String action event name or blank if action event was not sent */ public String getActionEvent() { if ((actionParameters != null) && actionParameters.containsKey(UifConstants.UrlParams.ACTION_EVENT)) { return actionParameters.get(UifConstants.UrlParams.ACTION_EVENT); } return ""; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getClientStateForSyncing() */ @Override public Map<String, Object> getClientStateForSyncing() { return clientStateForSyncing; } /** * Setter for the client state * * @param clientStateForSyncing */ public void setClientStateForSyncing(Map<String, Object> clientStateForSyncing) { this.clientStateForSyncing = clientStateForSyncing; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getSelectedCollectionLines() */ @Override public Map<String, Set<String>> getSelectedCollectionLines() { return selectedCollectionLines; } /** * {@inheritDoc} */ @Override public void setSelectedCollectionLines(Map<String, Set<String>> selectedCollectionLines) { this.selectedCollectionLines = selectedCollectionLines; } /** * Holds Set of String identifiers for lines that were selected in a lookup collection results * across multiple pages. * The value in the cache is preserved in the session across multiple requests. This allows for the * server side paging of results to retain the user choices as they move through the pages. * * @return set of identifiers */ public Set<String> getSelectedLookupResultsCache() { return selectedLookupResultsCache; } /** * Sets the lookup result selection cache values * * @param selectedLookupResultsCache */ public void setSelectedLookupResultsCache(Set<String> selectedLookupResultsCache) { this.selectedLookupResultsCache = selectedLookupResultsCache; } /** * Key string that identifies the form instance in session storage * * <p> * When the view is posted, the previous form instance is retrieved and then * populated from the request parameters. This key string is retrieve the * session form from the session service * </p> * * @return String form session key */ public String getFormKey() { return this.formKey; } /** * Setter for the form's session key * * @param formKey */ public void setFormKey(String formKey) { this.formKey = formKey; } /** * This is the formKey sent on the original request. It may differ from the actual form key stored in formKey * based on if the form still exists in session by this key or not. * * @return the original requested form key */ public String getRequestedFormKey() { return requestedFormKey; } /** * Set the requestedFormKey * * @param requestedFormKey */ public void setRequestedFormKey(String requestedFormKey) { this.requestedFormKey = requestedFormKey; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getViewsThatNeedDefaultValuesApplied() */ @Override public List<String> getViewsThatNeedDefaultValuesApplied() { if(viewsThatNeedDefaultValuesApplied == null) { viewsThatNeedDefaultValuesApplied = new ArrayList<String>(); } return viewsThatNeedDefaultValuesApplied; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setgetViewsThatNeedDefaultValuesApplied(List<String>) */ @Override public void setViewsThatNeedDefaultValuesApplied(List<String> viewsThatNeedDefaultValuesApplied) { this.viewsThatNeedDefaultValuesApplied = viewsThatNeedDefaultValuesApplied; } /** * Adds unique view id to list of views that need default values applied. * * @param viewid */ public void addViewThatNeedsDefaultValuesApplied(String viewId) { if(!getViewsThatNeedDefaultValuesApplied().contains(viewId)) { viewsThatNeedDefaultValuesApplied.add(viewId); } } /** * Indicates whether a redirect has been requested for the view * * @return boolean true if redirect was requested, false if not */ public boolean isRequestRedirected() { return requestRedirected; } /** * Setter for the request redirect indicator * * @param requestRedirected */ public void setRequestRedirected(boolean requestRedirected) { this.requestRedirected = requestRedirected; } /** * Holder for files that are attached through the view * * @return MultipartFile representing the attachment */ public MultipartFile getAttachmentFile() { return this.attachmentFile; } /** * Setter for the form's attachment file * * @param attachmentFile */ public void setAttachmentFile(MultipartFile attachmentFile) { this.attachmentFile = attachmentFile; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getUpdateComponentId() */ @Override public String getUpdateComponentId() { return updateComponentId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setUpdateComponentId(java.lang.String) */ @Override public void setUpdateComponentId(String updateComponentId) { this.updateComponentId = updateComponentId; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getUpdateComponent() */ public Component getUpdateComponent() { return updateComponent; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setUpdateComponent(org.kuali.rice.krad.uif.component.Component) */ public void setUpdateComponent(Component updateComponent) { this.updateComponent = updateComponent; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getView() */ @Override public View getView() { return this.view; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setView(org.kuali.rice.krad.uif.view.View) */ @Override public void setView(View view) { this.view = view; } /** * Returns an instance of the view's configured view helper service. * * <p>First checks if there is an initialized view containing a view helper instance. If not, and there is * a view id on the form, a call is made to retrieve the view helper instance or class configuration.</p> * * {@inheritDoc} */ @Override public ViewHelperService getViewHelperService() { if ((getView() != null) && (getView().getViewHelperService() != null)) { return getView().getViewHelperService(); } String viewId = getViewId(); if (StringUtils.isBlank(viewId) && (getView() != null)) { viewId = getView().getId(); } if (StringUtils.isBlank(viewId)) { return null; } ViewHelperService viewHelperService = (ViewHelperService) KRADServiceLocatorWeb.getDataDictionaryService().getDictionaryBeanProperty(viewId, UifPropertyPaths.VIEW_HELPER_SERVICE); if (viewHelperService == null) { Class<?> viewHelperServiceClass = (Class<?>) KRADServiceLocatorWeb.getDataDictionaryService().getDictionaryBeanProperty(viewId, UifPropertyPaths.VIEW_HELPER_SERVICE_CLASS); if (viewHelperServiceClass != null) { try { viewHelperService = (ViewHelperService) viewHelperServiceClass.newInstance(); } catch (Exception e) { throw new RuntimeException("Unable to instantiate view helper class: " + viewHelperServiceClass, e); } } } return viewHelperService; } /** * {@inheritDoc} */ @Override public ViewPostMetadata getViewPostMetadata() { return viewPostMetadata; } /** * @see UifFormBase#getViewPostMetadata() */ @Override public void setViewPostMetadata(ViewPostMetadata viewPostMetadata) { this.viewPostMetadata = viewPostMetadata; } /** * Instance of the {@code ViewService} that can be used to retrieve * {@code View} instances * * @return ViewService implementation */ protected ViewService getViewService() { return KRADServiceLocatorWeb.getViewService(); } /** * The jumpToId for this form, the element with this id will be jumped to automatically * when the form is loaded in the view. * Using "TOP" or "BOTTOM" will jump to the top or the bottom of the resulting page. * jumpToId always takes precedence over jumpToName, if set. * * @return the jumpToId */ public String getJumpToId() { return this.jumpToId; } /** * @param jumpToId the jumpToId to set */ public void setJumpToId(String jumpToId) { this.jumpToId = jumpToId; } /** * The jumpToName for this form, the element with this name will be jumped to automatically * when the form is loaded in the view. * WARNING: jumpToId always takes precedence over jumpToName, if set. * * @return the jumpToName */ public String getJumpToName() { return this.jumpToName; } /** * @param jumpToName the jumpToName to set */ public void setJumpToName(String jumpToName) { this.jumpToName = jumpToName; } /** * Field to place focus on when the page loads * An empty focusId will result in focusing on the first visible input element by default. * * @return the focusId */ public String getFocusId() { return this.focusId; } /** * @param focusId the focusId to set */ public void setFocusId(String focusId) { this.focusId = focusId; } /** * True when the form is considered dirty (data has changed from original value), false otherwise * * <p>For most scenarios, this flag should NOT be set to true. * If this is set, it must be managed explicitly by the application. This flag exists for marking a * form dirty from a server call, so it must be changed to false when the form is no longer considered dirty. * The krad save Action and navigate methodToCall resets this flag back to false, but any other setting of * this flag must be managed by custom configuration/methods, if custom dirtyForm management is needed.</p> * * @return true if the form is considered dirty, false otherwise */ public boolean isDirtyForm() { return dirtyForm; } /** * Sets the dirtyForm flag * * <p>For most scenarios, this flag should NOT be set to true. * If this is set, it must be managed explicitly by the application. This flag exists for marking a * form dirty from a server call, so it must be changed to false when the form is no longer considered dirty. * The krad save Action and navigate methodToCall resets this flag back to false, but any other setting of * this flag must be managed by custom configuration/methods, if custom dirtyForm management is needed.</p> * * @param dirtyForm */ public void setDirtyForm(boolean dirtyForm) { this.dirtyForm = dirtyForm; } /** * Set the dirtyForm flag using a String that will be converted to boolean * * @param dirtyForm */ public void setDirtyForm(String dirtyForm) { if(dirtyForm != null){ this.dirtyForm = Boolean.parseBoolean(dirtyForm); } } /** * Indicates whether the view is rendered within a lightbox * * <p> * Some discussion (for example how a close button behaves) need to change based on whether the * view is rendered within a lightbox or the standard browser window. This boolean is true when it is * within a lightbox * </p> * * @return boolean true if view is rendered within a lightbox, false if not */ public boolean isRenderedInLightBox() { return this.renderedInLightBox; } /** * Setter for the rendered within lightbox indicator * * @param renderedInLightBox */ public void setRenderedInLightBox(boolean renderedInLightBox) { this.renderedInLightBox = renderedInLightBox; } /** * Indicates whether the view is rendered within an iframe (this setting must be passed to the View on the url) * * @return boolean true if view is rendered within a iframe, false if not */ public boolean isRenderedInIframe() { return renderedInIframe; } /** * @see org.kuali.rice.krad.web.form.UifFormBase#isRenderedInIframe() */ public void setRenderedInIframe(boolean renderedInIframe) { this.renderedInIframe = renderedInIframe; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getGrowlScript() */ @Override public String getGrowlScript() { return growlScript; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setGrowlScript(String) */ @Override public void setGrowlScript(String growlScript) { this.growlScript = growlScript; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getState() */ @Override public String getState() { return state; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setState(String) */ @Override public void setState(String state) { this.state = state; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isAjaxRequest() */ @Override public boolean isAjaxRequest() { return ajaxRequest; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setAjaxRequest(boolean) */ @Override public void setAjaxRequest(boolean ajaxRequest) { this.ajaxRequest = ajaxRequest; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getAjaxReturnType() */ @Override public String getAjaxReturnType() { return ajaxReturnType; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setAjaxReturnType(String) */ @Override public void setAjaxReturnType(String ajaxReturnType) { this.ajaxReturnType = ajaxReturnType; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateComponentRequest() */ @Override public boolean isUpdateComponentRequest() { return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATECOMPONENT.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateDialogRequest() */ @Override public boolean isUpdateDialogRequest() { return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATEDIALOG.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdatePageRequest() */ @Override public boolean isUpdatePageRequest() { return StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATEPAGE.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isUpdateNoneRequest() */ @Override public boolean isUpdateNoneRequest() { //return isAjaxRequest() && StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( // UifConstants.AjaxReturnTypes.UPDATENONE.getKey()); return StringUtils.isNotBlank(getAjaxReturnType()) && getAjaxReturnType().equals( UifConstants.AjaxReturnTypes.UPDATENONE.getKey()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#isJsonRequest() */ @Override public boolean isJsonRequest() { return StringUtils.isNotBlank(getRequestJsonTemplate()); } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getRequestJsonTemplate() */ @Override public String getRequestJsonTemplate() { return requestJsonTemplate; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#setRequestJsonTemplate */ @Override public void setRequestJsonTemplate(String requestJsonTemplate) { this.requestJsonTemplate = requestJsonTemplate; } /** * {@inheritDoc} */ @Override public boolean isCollectionPagingRequest() { return collectionPagingRequest; } /** * {@inheritDoc} */ @Override public void setCollectionPagingRequest(boolean collectionPagingRequest) { this.collectionPagingRequest = collectionPagingRequest; } /** * Used by the dialog framework to set the dialog id for a return dialog call (when the server has * triggered a dialog). * * <p>Note this is a request only property. On a return call the value for this gets pulled and used to * create an entry in {@link UifFormBase#getDialogResponses()}</p> * * @return String id for the dialog being returned from */ public String getReturnDialogId() { return returnDialogId; } /** * @see UifFormBase#getReturnDialogId() */ public void setReturnDialogId(String returnDialogId) { this.returnDialogId = returnDialogId; } /** * Used by the dialog framework to set the dialog response for a return dialog call (when the server has * triggered a dialog). * * <p>Note this is a request only property. On a return call the value for this gets pulled and used to * create an entry in {@link UifFormBase#getDialogResponses()}</p> * * @return String response for the dialog being returned from */ public String getReturnDialogResponse() { return returnDialogResponse; } /** * @see UifFormBase#getReturnDialogResponse() */ public void setReturnDialogResponse(String returnDialogResponse) { this.returnDialogResponse = returnDialogResponse; } /** * {@inheritDoc} */ @Override public Map<String, String> getDialogExplanations() { return dialogExplanations; } /** * {@inheritDoc} */ @Override public void setDialogExplanations(Map<String, String> dialogExplanations) { this.dialogExplanations = dialogExplanations; } /** * {@inheritDoc} */ @Override public Map<String, DialogResponse> getDialogResponses() { return dialogResponses; } /** * {@inheritDoc} */ @Override public DialogResponse getDialogResponse(String dialogId) { if ((dialogResponses != null) && dialogResponses.containsKey(dialogId)) { return dialogResponses.get(dialogId); } return null; } /** * {@inheritDoc} */ @Override public void setDialogResponses(Map<String, DialogResponse> dialogResponses) { this.dialogResponses = dialogResponses; } /** * @see org.kuali.rice.krad.uif.view.ViewModel#getExtensionData() */ @Override public Map<String, Object> getExtensionData() { return extensionData; } /** * {@inheritDoc} */ @Override public void setExtensionData(Map<String, Object> extensionData) { this.extensionData = extensionData; } /** * A generic map for query parameters * * @return Map<String, String> */ public Map<String, String> getQueryParameters() { return queryParameters; } /** * Setter for the generic query parameters * * @param queryParameters */ public void setQueryParameters(Map<String, String> queryParameters) { this.queryParameters = queryParameters; } /** * The {@code List} that contains all newly added items for the collections on the model * * <p> * This list contains the new items for all the collections on the model. * </p> * * @return List of the newly added item lists */ public List getAddedCollectionItems() { return addedCollectionItems; } /** * Setter for the newly added item list * * @param addedCollectionItems */ public void setAddedCollectionItems(List addedCollectionItems) { this.addedCollectionItems = addedCollectionItems; } /** * Indicates whether an collection item has been newly added * * <p> * Tests collection items against the list of newly added items on the model. This list gets cleared when the view * is submitted and the items are persisted. * </p> * * @param item - the item to test against list of newly added items * @return boolean true if the item has been newly added */ public boolean isAddedCollectionItem(Object item) { return addedCollectionItems.contains(item); } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append( getClass().getSimpleName() ).append(" [viewId=").append(this.viewId).append(", viewName=").append(this.viewName) .append(", viewTypeName=").append(this.viewTypeName).append(", pageId=").append(this.pageId) .append(", methodToCall=").append(this.methodToCall).append(", formKey=").append(this.formKey) .append(", requestedFormKey=").append(this.requestedFormKey).append("]"); return builder.toString(); } }
KULRICE-12672 AFT Failure LabsLookupWithUserControlAft myPrincipalName not present in the managed type git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@46547 7a7aa7f6-c479-11dc-97e2-85a2497f191d
rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/web/form/UifFormBase.java
KULRICE-12672 AFT Failure LabsLookupWithUserControlAft myPrincipalName not present in the managed type
<ide><path>ice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/web/form/UifFormBase.java <ide> String clientStateJSON = request.getParameter(UifParameters.CLIENT_VIEW_STATE); <ide> if (StringUtils.isNotBlank(clientStateJSON)) { <ide> // change single quotes to double quotes (necessary because the reverse was done for sending) <del> clientStateJSON = StringUtils.replace(clientStateJSON, "'", "\""); <add> clientStateJSON = StringUtils.replace(clientStateJSON, "\\'", "\""); <add> clientStateJSON = StringUtils.replace(clientStateJSON, "\\[", "["); <add> clientStateJSON = StringUtils.replace(clientStateJSON, "\\]", "]"); <add> clientStateJSON = StringUtils.replace(clientStateJSON, "'", "\""); <ide> <ide> ObjectMapper mapper = new ObjectMapper(); <ide> try {
Java
apache-2.0
65fdcf6181d13b33e7452b49315f0a5a1f1a967c
0
gerashegalov/Impala,theyaa/Impala,tempbottle/Impala,XiaominZhang/Impala,ImpalaToGo/ImpalaToGo,theyaa/Impala,scalingdata/Impala,bowlofstew/Impala,gerashegalov/Impala,ImpalaToGo/ImpalaToGo,theyaa/Impala,gerashegalov/Impala,XiaominZhang/Impala,cloudera/recordservice,cchanning/Impala,cgvarela/Impala,gerashegalov/Impala,cchanning/Impala,cloudera/recordservice,caseyching/Impala,cchanning/Impala,brightchen/Impala,rdblue/Impala,bowlofstew/Impala,grundprinzip/Impala,lirui-intel/Impala,bowlofstew/Impala,rdblue/Impala,kapilrastogi/Impala,lnliuxing/Impala,brightchen/Impala,placrosse/ImpalaToGo,scalingdata/Impala,ImpalaToGo/ImpalaToGo,tempbottle/Impala,brightchen/Impala,bowlofstew/Impala,cloudera/recordservice,caseyching/Impala,caseyching/Impala,rdblue/Impala,bowlofstew/Impala,kapilrastogi/Impala,lirui-intel/Impala,lnliuxing/Impala,bratatidas9/Impala-1,placrosse/ImpalaToGo,kapilrastogi/Impala,gerashegalov/Impala,theyaa/Impala,XiaominZhang/Impala,bratatidas9/Impala-1,bratatidas9/Impala-1,kapilrastogi/Impala,bratatidas9/Impala-1,theyaa/Impala,cgvarela/Impala,henryr/Impala,kapilrastogi/Impala,brightchen/Impala,caseyching/Impala,placrosse/ImpalaToGo,grundprinzip/Impala,cgvarela/Impala,bowlofstew/Impala,lirui-intel/Impala,XiaominZhang/Impala,bratatidas9/Impala-1,rdblue/Impala,placrosse/ImpalaToGo,tempbottle/Impala,lnliuxing/Impala,scalingdata/Impala,tempbottle/Impala,XiaominZhang/Impala,tempbottle/Impala,gerashegalov/Impala,cchanning/Impala,lirui-intel/Impala,lnliuxing/Impala,henryr/Impala,ImpalaToGo/ImpalaToGo,lnliuxing/Impala,XiaominZhang/Impala,cloudera/recordservice,kapilrastogi/Impala,lirui-intel/Impala,lirui-intel/Impala,caseyching/Impala,brightchen/Impala,lnliuxing/Impala,grundprinzip/Impala,caseyching/Impala,rdblue/Impala,tempbottle/Impala,ImpalaToGo/ImpalaToGo,bowlofstew/Impala,cloudera/recordservice,cchanning/Impala,rdblue/Impala,grundprinzip/Impala,placrosse/ImpalaToGo,lnliuxing/Impala,henryr/Impala,scalingdata/Impala,ImpalaToGo/ImpalaToGo,brightchen/Impala,henryr/Impala,cchanning/Impala,cgvarela/Impala,gerashegalov/Impala,theyaa/Impala,bratatidas9/Impala-1,cloudera/recordservice,henryr/Impala,XiaominZhang/Impala,caseyching/Impala,bratatidas9/Impala-1,lirui-intel/Impala,scalingdata/Impala,cgvarela/Impala,henryr/Impala,cgvarela/Impala,theyaa/Impala,grundprinzip/Impala,cgvarela/Impala,placrosse/ImpalaToGo,rdblue/Impala,kapilrastogi/Impala,grundprinzip/Impala,cchanning/Impala,brightchen/Impala,scalingdata/Impala,tempbottle/Impala,cloudera/recordservice
// Copyright (c) 2012 Cloudera, Inc. All rights reserved. package com.cloudera.impala.datagenerator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.hbase.util.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; /** * Splits HBase tables into regions and deterministically assigns regions to region * servers. */ class HBaseTestDataRegionAssigment { public class TableNotFoundException extends Exception { public TableNotFoundException(String s) { super(s); } } private final static Logger LOG = LoggerFactory.getLogger( HBaseTestDataRegionAssigment.class); private final Configuration conf; private final HBaseAdmin hbaseAdmin; private final List<ServerName> sortedRS; // sorted list of region server name private final String[] splitPoints = { "1", "3", "5", "7", "9"}; // Number of times to retry a series region-split/wait-for-split calls. private final static int MAX_SPLIT_ATTEMPTS = 10; // Maximum time in ms to wait for a region to be split. private final static int WAIT_FOR_SPLIT_TIMEOUT = 10000; public HBaseTestDataRegionAssigment() throws IOException { conf = new Configuration(); hbaseAdmin = new HBaseAdmin(conf); ClusterStatus clusterStatus = hbaseAdmin.getClusterStatus(); Collection<ServerName> regionServerNames = clusterStatus.getServers(); sortedRS = new ArrayList<ServerName>(regionServerNames); Collections.sort(sortedRS); } public void close() throws IOException { hbaseAdmin.close(); } /** * Split the table regions according to splitPoints and pair up adjacent regions to the * same server. Each region pair in ([unbound:1,1:3], [3:5,5:7], [7:9,9:unbound]) * will be on the same server. * The table must have data loaded and only a single region. */ public void performAssigment(String tableName) throws IOException, InterruptedException, TableNotFoundException { HTableDescriptor[] desc = hbaseAdmin.listTables(tableName); if (desc == null || desc.length == 0) { throw new TableNotFoundException("Table " + tableName + " not found."); } if (hbaseAdmin.getTableRegions(tableName.getBytes()).size() == 1) { // Split into regions // The table has one region only to begin with. The logic of // blockUntilRegionSplit requires that the input regionName has performed a split. // If the table has already been split (i.e. regions count > 1), the same split // call will be a no-op and this will cause blockUntilRegionSplit to break. for (int i = 0; i < splitPoints.length; ++i) { hbaseAdmin.majorCompact(tableName); List<HRegionInfo> regions = hbaseAdmin.getTableRegions(tableName.getBytes()); HRegionInfo splitRegion = regions.get(regions.size() - 1); int attempt = 1; boolean done = false; while (!done && attempt < MAX_SPLIT_ATTEMPTS) { // HBase seems to not always properly receive/process this split RPC, // so we need to retry the split/block several times. hbaseAdmin.split(splitRegion.getRegionNameAsString(), splitPoints[i]); done = blockUntilRegionSplit(conf, WAIT_FOR_SPLIT_TIMEOUT, splitRegion.getRegionName(), true); Thread.sleep(100); ++attempt; } if (!done) { throw new IllegalStateException( String.format("Failed to split region '%s' after %s attempts.", splitRegion.getRegionNameAsString(), WAIT_FOR_SPLIT_TIMEOUT)); } LOG.info(String.format("Split region '%s' after %s attempts.", splitRegion.getRegionNameAsString(), attempt)); } } // Sort the region by start key List<HRegionInfo> regions = hbaseAdmin.getTableRegions(tableName.getBytes()); Preconditions.checkArgument(regions.size() == splitPoints.length + 1); Collections.sort(regions); // Pair up two adjacent regions to the same region server. That is, // region server 1 <- regions (unbound:1), (1:3) // region server 2 <- regions (3:5), (5:7) // region server 3 <- regions (7:9), (9:unbound) NavigableMap<HRegionInfo, ServerName> expectedLocs = Maps.newTreeMap(); for (int i = 0; i < regions.size(); ++i) { HRegionInfo regionInfo = regions.get(i); int rsIdx = (i / 2) % sortedRS.size(); ServerName regionServerName = sortedRS.get(rsIdx); hbaseAdmin.move(regionInfo.getEncodedNameAsBytes(), regionServerName.getServerName().getBytes()); expectedLocs.put(regionInfo, regionServerName); } // hbaseAdmin.move() is an asynchronous operation. HBase tests use sleep to wait for // the move to complete. It should be done in 10sec. int sleepCnt = 0; HTable hbaseTable = new HTable(conf, tableName); try { while(!expectedLocs.equals(hbaseTable.getRegionLocations()) && sleepCnt < 100) { Thread.sleep(100); ++sleepCnt; } NavigableMap<HRegionInfo, ServerName> actualLocs = hbaseTable.getRegionLocations(); Preconditions.checkArgument(expectedLocs.equals(actualLocs)); // Log the actual region location map for (Map.Entry<HRegionInfo, ServerName> entry: actualLocs.entrySet()) { LOG.info(printKey(entry.getKey().getStartKey()) + " -> " + entry.getValue().getHostAndPort()); } // Force a major compaction such that the HBase table is backed by deterministic // physical artifacts (files, WAL, etc.). Our #rows estimate relies on the sizes of // these physical artifacts. LOG.info("Major compacting HBase table: " + tableName); hbaseAdmin.majorCompact(tableName); } finally { IOUtils.closeQuietly(hbaseTable); } } /** * Returns non-printable characters in escaped octal, otherwise returns the characters. */ public static String printKey(byte[] key) { StringBuilder result = new StringBuilder(); for (int i = 0; i < key.length; ++i) { if (!Character.isISOControl(key[i])) { result.append((char) key[i]); } else { result.append("\\"); result.append(Integer.toOctalString(key[i])); } } return result.toString(); } /** * The following static methods blockUntilRegionSplit, getRegionRow, * blockUntilRegionIsOpened and blockUntilRegionIsInMeta are copied from * org.apache.hadoop.hbase.regionserver.TestEndToEndSplitTransaction * to help block until a region split is completed. * * The original code was modified to return a true/false in case of success/failure. * * Blocks until the region split is complete in META and region server opens the * daughters */ private static boolean blockUntilRegionSplit(Configuration conf, long timeout, final byte[] regionName, boolean waitForDaughters) throws IOException, InterruptedException { long start = System.currentTimeMillis(); HRegionInfo daughterA = null, daughterB = null; HTable metaTable = new HTable(conf, TableName.META_TABLE_NAME); try { while (System.currentTimeMillis() - start < timeout) { Result result = getRegionRow(metaTable, regionName); if (result == null) { break; } HRegionInfo region = HRegionInfo.getHRegionInfo(result); if(region.isSplitParent()) { PairOfSameType<HRegionInfo> pair = HRegionInfo.getDaughterRegions(result); daughterA = pair.getFirst(); daughterB = pair.getSecond(); break; } Threads.sleep(100); } if (daughterA == null || daughterB == null) return false; //if we are here, this means the region split is complete or timed out if (waitForDaughters) { long rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsInMeta(metaTable, rem, daughterA); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsInMeta(metaTable, rem, daughterB); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsOpened(conf, rem, daughterA); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsOpened(conf, rem, daughterB); } } finally { IOUtils.closeQuietly(metaTable); } return true; } private static Result getRegionRow(HTable metaTable, byte[] regionName) throws IOException { Get get = new Get(regionName); return metaTable.get(get); } private static void blockUntilRegionIsInMeta(HTable metaTable, long timeout, HRegionInfo hri) throws IOException, InterruptedException { long start = System.currentTimeMillis(); while (System.currentTimeMillis() - start < timeout) { Result result = getRegionRow(metaTable, hri.getRegionName()); if (result != null) { HRegionInfo info = HRegionInfo.getHRegionInfo(result); if (info != null && !info.isOffline()) { break; } } Threads.sleep(10); } } /** * Starting with HBase 0.95.2 the Get class' c'tor no longer accepts * empty key strings leading to the rather undesirable behavior that this method * is not guaranteed to succeed. This method repeatedly attempts to 'get' the start key * of the given region from the region server to detect when the region server becomes * available. However, the first region has an empty array as the start key causing the * Get c'tor to throw an exception as stated above. The end key cannot be used instead * because it is an exclusive upper bound. */ private static void blockUntilRegionIsOpened(Configuration conf, long timeout, HRegionInfo hri) throws IOException, InterruptedException { long start = System.currentTimeMillis(); HTable table = new HTable(conf, hri.getTableName()); try { byte [] row = hri.getStartKey(); // Check for null/empty row. If we find one, use a key that is likely to // be in first region. If key '0' happens not to be in the given region // then an exception will be thrown. if (row == null || row.length <= 0) row = new byte [] {'0'}; Get get = new Get(row); while (System.currentTimeMillis() - start < timeout) { try { table.get(get); break; } catch(IOException ex) { //wait some more } Threads.sleep(10); } } finally { IOUtils.closeQuietly(table); } } /** * args contains a list of hbase table names. This program will split the hbase tables * into regions and assign each region to a specific region server. */ public static void main(String args[]) throws IOException, InterruptedException, TableNotFoundException { HBaseTestDataRegionAssigment assignment = new HBaseTestDataRegionAssigment(); for (String htable: args) { assignment.performAssigment(htable); } assignment.close(); // Exit forcefully because of HDFS-6057. Otherwise, there the JVM won't exit due to a // non-daemon thread still being up. System.exit(0); } }
testdata/src/main/java/com/cloudera/impala/datagenerator/HBaseTestDataRegionAssigment.java
// Copyright (c) 2012 Cloudera, Inc. All rights reserved. package com.cloudera.impala.datagenerator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.hbase.util.Threads; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; /** * Splits HBase tables into regions and deterministically assigns regions to region * servers. */ class HBaseTestDataRegionAssigment { public class TableNotFoundException extends Exception { public TableNotFoundException(String s) { super(s); } } private final static Logger LOG = LoggerFactory.getLogger( HBaseTestDataRegionAssigment.class); private final Configuration conf; private final HBaseAdmin hbaseAdmin; private final List<ServerName> sortedRS; // sorted list of region server name private final String[] splitPoints = { "1", "3", "5", "7", "9"}; public HBaseTestDataRegionAssigment() throws IOException { conf = new Configuration(); hbaseAdmin = new HBaseAdmin(conf); ClusterStatus clusterStatus = hbaseAdmin.getClusterStatus(); Collection<ServerName> regionServerNames = clusterStatus.getServers(); sortedRS = new ArrayList<ServerName>(regionServerNames); Collections.sort(sortedRS); } public void close() throws IOException { hbaseAdmin.close(); } /** * Split the table regions according to splitPoints and pair up adjacent regions to the * same server. Each region pair in ([unbound:1,1:3], [3:5,5:7], [7:9,9:unbound]) * will be on the same server. * The table must have data loaded and only a single region. */ public void performAssigment(String tableName) throws IOException, InterruptedException, TableNotFoundException { HTableDescriptor[] desc = hbaseAdmin.listTables(tableName); if (desc == null || desc.length == 0) { throw new TableNotFoundException("Table " + tableName + " not found."); } if (hbaseAdmin.getTableRegions(tableName.getBytes()).size() == 1) { // Split into regions // The table has one region only to begin with. The logic of // blockUntilRegionSplit requires that the input regionName has performed a split. // If the table has already been split (i.e. regions count > 1), the same split // call will be a no-op and this will cause blockUntilRegionSplit to break. for (int i = 0; i < splitPoints.length; ++i) { List<HRegionInfo> regions = hbaseAdmin.getTableRegions(tableName.getBytes()); HRegionInfo splitRegion = regions.get(regions.size() - 1); hbaseAdmin.split(splitRegion.getRegionNameAsString(), splitPoints[i]); blockUntilRegionSplit(conf, 50000, splitRegion.getRegionName(), true); } } // Sort the region by start key List<HRegionInfo> regions = hbaseAdmin.getTableRegions(tableName.getBytes()); Preconditions.checkArgument(regions.size() == splitPoints.length + 1); Collections.sort(regions); // Pair up two adjacent regions to the same region server. That is, // region server 1 <- regions (unbound:1), (1:3) // region server 2 <- regions (3:5), (5:7) // region server 3 <- regions (7:9), (9:unbound) NavigableMap<HRegionInfo, ServerName> expectedLocs = Maps.newTreeMap(); for (int i = 0; i < regions.size(); ++i) { HRegionInfo regionInfo = regions.get(i); int rsIdx = (i / 2) % sortedRS.size(); ServerName regionServerName = sortedRS.get(rsIdx); hbaseAdmin.move(regionInfo.getEncodedNameAsBytes(), regionServerName.getServerName().getBytes()); expectedLocs.put(regionInfo, regionServerName); } // hbaseAdmin.move() is an asynchronous operation. HBase tests use sleep to wait for // the move to complete. It should be done in 10sec. int sleepCnt = 0; HTable hbaseTable = new HTable(conf, tableName); try { while(!expectedLocs.equals(hbaseTable.getRegionLocations()) && sleepCnt < 100) { Thread.sleep(100); ++sleepCnt; } NavigableMap<HRegionInfo, ServerName> actualLocs = hbaseTable.getRegionLocations(); Preconditions.checkArgument(expectedLocs.equals(actualLocs)); // Log the actual region location map for (Map.Entry<HRegionInfo, ServerName> entry: actualLocs.entrySet()) { LOG.info(printKey(entry.getKey().getStartKey()) + " -> " + entry.getValue().getHostAndPort()); } // Force a major compaction such that the HBase table is backed by deterministic // physical artifacts (files, WAL, etc.). Our #rows estimate relies on the sizes of // these physical artifacts. LOG.info("Major compacting HBase table: " + tableName); hbaseAdmin.majorCompact(tableName); } finally { IOUtils.closeQuietly(hbaseTable); } } /** * Returns non-printable characters in escaped octal, otherwise returns the characters. */ public static String printKey(byte[] key) { StringBuilder result = new StringBuilder(); for (int i = 0; i < key.length; ++i) { if (!Character.isISOControl(key[i])) { result.append((char) key[i]); } else { result.append("\\"); result.append(Integer.toOctalString(key[i])); } } return result.toString(); } /** * The following static methods blockUntilRegionSplit, getRegionRow, * blockUntilRegionIsOpened and blockUntilRegionIsInMeta are copied from * org.apache.hadoop.hbase.regionserver.TestEndToEndSplitTransaction * to help block until a region split is completed. * * Blocks until the region split is complete in META and region server opens the * daughters */ private static void blockUntilRegionSplit(Configuration conf, long timeout, final byte[] regionName, boolean waitForDaughters) throws IOException, InterruptedException { long start = System.currentTimeMillis(); HRegionInfo daughterA = null, daughterB = null; HTable metaTable = new HTable(conf, TableName.META_TABLE_NAME); try { while (System.currentTimeMillis() - start < timeout) { Result result = getRegionRow(metaTable, regionName); if (result == null) { break; } HRegionInfo region = HRegionInfo.getHRegionInfo(result); if(region.isSplitParent()) { PairOfSameType<HRegionInfo> pair = HRegionInfo.getDaughterRegions(result); daughterA = pair.getFirst(); daughterB = pair.getSecond(); break; } Threads.sleep(100); } //if we are here, this means the region split is complete or timed out if (waitForDaughters) { long rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsInMeta(metaTable, rem, daughterA); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsInMeta(metaTable, rem, daughterB); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsOpened(conf, rem, daughterA); rem = timeout - (System.currentTimeMillis() - start); blockUntilRegionIsOpened(conf, rem, daughterB); } } finally { IOUtils.closeQuietly(metaTable); } } private static Result getRegionRow(HTable metaTable, byte[] regionName) throws IOException { Get get = new Get(regionName); return metaTable.get(get); } private static void blockUntilRegionIsInMeta(HTable metaTable, long timeout, HRegionInfo hri) throws IOException, InterruptedException { long start = System.currentTimeMillis(); while (System.currentTimeMillis() - start < timeout) { Result result = getRegionRow(metaTable, hri.getRegionName()); if (result != null) { HRegionInfo info = HRegionInfo.getHRegionInfo(result); if (info != null && !info.isOffline()) { break; } } Threads.sleep(10); } } /** * Starting with HBase 0.95.2 the Get class' c'tor no longer accepts * empty key strings leading to the rather undesirable behavior that this method * is not guaranteed to succeed. This method repeatedly attempts to 'get' the start key * of the given region from the region server to detect when the region server becomes * available. However, the first region has an empty array as the start key causing the * Get c'tor to throw an exception as stated above. The end key cannot be used instead * because it is an exclusive upper bound. */ private static void blockUntilRegionIsOpened(Configuration conf, long timeout, HRegionInfo hri) throws IOException, InterruptedException { long start = System.currentTimeMillis(); HTable table = new HTable(conf, hri.getTableName()); try { byte [] row = hri.getStartKey(); // Check for null/empty row. If we find one, use a key that is likely to // be in first region. If key '0' happens not to be in the given region // then an exception will be thrown. if (row == null || row.length <= 0) row = new byte [] {'0'}; Get get = new Get(row); while (System.currentTimeMillis() - start < timeout) { try { table.get(get); break; } catch(IOException ex) { //wait some more } Threads.sleep(10); } } finally { IOUtils.closeQuietly(table); } } /** * args contains a list of hbase table names. This program will split the hbase tables * into regions and assign each region to a specific region server. */ public static void main(String args[]) throws IOException, InterruptedException, TableNotFoundException { HBaseTestDataRegionAssigment assignment = new HBaseTestDataRegionAssigment(); for (String htable: args) { assignment.performAssigment(htable); } assignment.close(); // Exit forcefully because of HDFS-6057. Otherwise, there the JVM won't exit due to a // non-daemon thread still being up. System.exit(0); } }
Fix HBase region splitting for tests. It appears that HBase sometimes ignores an admin.splitRegion() RPC, which made our region splitting fail. As a workaround, this patch adds another retry loop such that the split/wait sequence is attempted multiple times. Change-Id: I9aa8ab87bba79ea11b79c50f15328b8be844924d Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4557 Reviewed-by: Lenni Kuff <[email protected]> Tested-by: Alex Behm <[email protected]>
testdata/src/main/java/com/cloudera/impala/datagenerator/HBaseTestDataRegionAssigment.java
Fix HBase region splitting for tests.
<ide><path>estdata/src/main/java/com/cloudera/impala/datagenerator/HBaseTestDataRegionAssigment.java <ide> private final List<ServerName> sortedRS; // sorted list of region server name <ide> private final String[] splitPoints = { "1", "3", "5", "7", "9"}; <ide> <add> // Number of times to retry a series region-split/wait-for-split calls. <add> private final static int MAX_SPLIT_ATTEMPTS = 10; <add> <add> // Maximum time in ms to wait for a region to be split. <add> private final static int WAIT_FOR_SPLIT_TIMEOUT = 10000; <add> <ide> public HBaseTestDataRegionAssigment() throws IOException { <ide> conf = new Configuration(); <ide> hbaseAdmin = new HBaseAdmin(conf); <ide> // If the table has already been split (i.e. regions count > 1), the same split <ide> // call will be a no-op and this will cause blockUntilRegionSplit to break. <ide> for (int i = 0; i < splitPoints.length; ++i) { <add> hbaseAdmin.majorCompact(tableName); <ide> List<HRegionInfo> regions = hbaseAdmin.getTableRegions(tableName.getBytes()); <ide> HRegionInfo splitRegion = regions.get(regions.size() - 1); <del> hbaseAdmin.split(splitRegion.getRegionNameAsString(), splitPoints[i]); <del> blockUntilRegionSplit(conf, 50000, splitRegion.getRegionName(), true); <add> int attempt = 1; <add> boolean done = false; <add> while (!done && attempt < MAX_SPLIT_ATTEMPTS) { <add> // HBase seems to not always properly receive/process this split RPC, <add> // so we need to retry the split/block several times. <add> hbaseAdmin.split(splitRegion.getRegionNameAsString(), splitPoints[i]); <add> done = blockUntilRegionSplit(conf, WAIT_FOR_SPLIT_TIMEOUT, <add> splitRegion.getRegionName(), true); <add> Thread.sleep(100); <add> ++attempt; <add> } <add> if (!done) { <add> throw new IllegalStateException( <add> String.format("Failed to split region '%s' after %s attempts.", <add> splitRegion.getRegionNameAsString(), WAIT_FOR_SPLIT_TIMEOUT)); <add> } <add> LOG.info(String.format("Split region '%s' after %s attempts.", <add> splitRegion.getRegionNameAsString(), attempt)); <ide> } <ide> } <ide> <ide> * org.apache.hadoop.hbase.regionserver.TestEndToEndSplitTransaction <ide> * to help block until a region split is completed. <ide> * <add> * The original code was modified to return a true/false in case of success/failure. <add> * <ide> * Blocks until the region split is complete in META and region server opens the <ide> * daughters <ide> */ <del> private static void blockUntilRegionSplit(Configuration conf, long timeout, <add> private static boolean blockUntilRegionSplit(Configuration conf, long timeout, <ide> final byte[] regionName, boolean waitForDaughters) <ide> throws IOException, InterruptedException { <ide> long start = System.currentTimeMillis(); <ide> } <ide> Threads.sleep(100); <ide> } <add> if (daughterA == null || daughterB == null) return false; <ide> <ide> //if we are here, this means the region split is complete or timed out <ide> if (waitForDaughters) { <ide> } finally { <ide> IOUtils.closeQuietly(metaTable); <ide> } <add> return true; <ide> } <ide> <ide> private static Result getRegionRow(HTable metaTable, byte[] regionName)
Java
apache-2.0
d9023abfd03f98d7bc10deb706bf0be92417a1ad
0
nknize/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,strapdata/elassandra,robin13/elasticsearch,GlenRSmith/elasticsearch,uschindler/elasticsearch,coding0011/elasticsearch,coding0011/elasticsearch,nknize/elasticsearch,coding0011/elasticsearch,scorpionvicky/elasticsearch,vroyer/elassandra,HonzaKral/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,scorpionvicky/elasticsearch,gfyoung/elasticsearch,robin13/elasticsearch,gfyoung/elasticsearch,gfyoung/elasticsearch,gingerwizard/elasticsearch,vroyer/elassandra,HonzaKral/elasticsearch,vroyer/elassandra,strapdata/elassandra,nknize/elasticsearch,gingerwizard/elasticsearch,strapdata/elassandra,GlenRSmith/elasticsearch,uschindler/elasticsearch,robin13/elasticsearch,gfyoung/elasticsearch,scorpionvicky/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,coding0011/elasticsearch,HonzaKral/elasticsearch,gingerwizard/elasticsearch,GlenRSmith/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,coding0011/elasticsearch,gfyoung/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,strapdata/elassandra,gingerwizard/elasticsearch,HonzaKral/elasticsearch,strapdata/elassandra,nknize/elasticsearch
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.plugin.LicensePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.shield.ShieldPlugin; import org.elasticsearch.shield.authc.esusers.ESUsersRealm; import org.elasticsearch.shield.authc.support.SecuredString; import org.elasticsearch.shield.authc.support.UsernamePasswordToken; import org.elasticsearch.shield.signature.InternalSignatureService; import org.elasticsearch.shield.test.ShieldTestUtils; import org.elasticsearch.shield.transport.netty.NettySecuredTransport; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import java.io.File; import java.net.URISyntaxException; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.shield.test.ShieldTestUtils.writeFile; /** * {@link org.elasticsearch.test.SettingsSource} subclass that allows to set all needed settings for shield. * Unicast discovery is configured through {@link org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration.UnicastZen}, * also shield is installed with all the needed configuration and files. * To avoid conflicts, every cluster should have its own instance of this class as some configuration files need to be created. */ public class ShieldSettingsSource extends ClusterDiscoveryConfiguration.UnicastZen { public static final Settings DEFAULT_SETTINGS = ImmutableSettings.builder() .put("node.mode", "network") .put("plugins.load_classpath_plugins", false) .build(); public static final String DEFAULT_USER_NAME = "test_user"; public static final String DEFAULT_PASSWORD = "changeme"; public static final String DEFAULT_ROLE = "user"; public static final String DEFAULT_TRANSPORT_CLIENT_ROLE = "trans_client_user"; public static final String DEFAULT_TRANSPORT_CLIENT_USER_NAME = "test_trans_client_user"; public static final String CONFIG_STANDARD_USER = DEFAULT_USER_NAME + ":{plain}" + DEFAULT_PASSWORD + "\n" + DEFAULT_TRANSPORT_CLIENT_USER_NAME + ":{plain}" + DEFAULT_PASSWORD + "\n"; public static final String CONFIG_STANDARD_USER_ROLES = DEFAULT_ROLE + ":" + DEFAULT_USER_NAME + "," + DEFAULT_TRANSPORT_CLIENT_USER_NAME + "\n" + DEFAULT_TRANSPORT_CLIENT_ROLE + ":" + DEFAULT_TRANSPORT_CLIENT_USER_NAME+ "\n"; public static final String CONFIG_ROLE_ALLOW_ALL = DEFAULT_ROLE + ":\n" + " cluster: ALL\n" + " indices:\n" + " '*': ALL\n" + DEFAULT_TRANSPORT_CLIENT_ROLE + ":\n" + " cluster:\n" + " - cluster:monitor/nodes/info\n" + " - cluster:monitor/state"; private final File parentFolder; private final String subfolderPrefix; private final byte[] systemKey; private final boolean sslTransportEnabled; private final boolean hostnameVerificationEnabled; private final boolean hostnameVerificationResolveNameEnabled; /** * Creates a new {@link org.elasticsearch.test.SettingsSource} for the shield configuration. * * @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available) * @param sslTransportEnabled whether ssl should be enabled on the transport layer or not * @param parentFolder the parent folder that will contain all of the configuration files that need to be created * @param scope the scope of the test that is requiring an instance of ShieldSettingsSource */ public ShieldSettingsSource(int numOfNodes, boolean sslTransportEnabled, File parentFolder, ElasticsearchIntegrationTest.Scope scope) { this(numOfNodes, sslTransportEnabled, generateKey(), parentFolder, scope); } /** * Creates a new {@link org.elasticsearch.test.SettingsSource} for the shield configuration. * * @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available) * @param sslTransportEnabled whether ssl should be enabled on the transport layer or not * @param systemKey the system key that all of the nodes will use to sign messages * @param parentFolder the parent folder that will contain all of the configuration files that need to be created * @param scope the scope of the test that is requiring an instance of ShieldSettingsSource */ public ShieldSettingsSource(int numOfNodes, boolean sslTransportEnabled, byte[] systemKey, File parentFolder, ElasticsearchIntegrationTest.Scope scope) { super(numOfNodes, DEFAULT_SETTINGS, scope); this.systemKey = systemKey; this.parentFolder = parentFolder; this.subfolderPrefix = scope.name(); this.sslTransportEnabled = sslTransportEnabled; this.hostnameVerificationEnabled = randomBoolean(); this.hostnameVerificationResolveNameEnabled = randomBoolean(); } @Override public Settings node(int nodeOrdinal) { File folder = ShieldTestUtils.createFolder(parentFolder, subfolderPrefix + "-" + nodeOrdinal); ImmutableSettings.Builder builder = ImmutableSettings.builder().put(super.node(nodeOrdinal)) .put("plugin.types", ShieldPlugin.class.getName() + "," + licensePluginClass().getName()) .put("shield.audit.enabled", randomBoolean()) .put(InternalSignatureService.FILE_SETTING, writeFile(folder, "system_key", systemKey)) .put("shield.authc.realms.esusers.type", ESUsersRealm.TYPE) .put("shield.authc.realms.esusers.order", 0) .put("shield.authc.realms.esusers.files.users", writeFile(folder, "users", configUsers())) .put("shield.authc.realms.esusers.files.users_roles", writeFile(folder, "users_roles", configUsersRoles())) .put("shield.authz.store.files.roles", writeFile(folder, "roles.yml", configRoles())) .put(getNodeSSLSettings()); setUser(builder, nodeClientUsername(), nodeClientPassword()); return builder.build(); } @Override public Settings transportClient() { ImmutableSettings.Builder builder = ImmutableSettings.builder().put(super.transportClient()) .put("plugin.types", ShieldPlugin.class.getName()) .put(getClientSSLSettings()); setUser(builder, transportClientUsername(), transportClientPassword()); return builder.build(); } protected String configUsers() { return CONFIG_STANDARD_USER; } protected String configUsersRoles() { return CONFIG_STANDARD_USER_ROLES; } protected String configRoles() { return CONFIG_ROLE_ALLOW_ALL; } protected String nodeClientUsername() { return DEFAULT_USER_NAME; } protected SecuredString nodeClientPassword() { return new SecuredString(DEFAULT_PASSWORD.toCharArray()); } protected String transportClientUsername() { return DEFAULT_TRANSPORT_CLIENT_USER_NAME; } protected SecuredString transportClientPassword() { return new SecuredString(DEFAULT_PASSWORD.toCharArray()); } protected Class<? extends Plugin> licensePluginClass() { return LicensePlugin.class; } protected String licensePluginName() { return LicensePlugin.NAME; } private void setUser(ImmutableSettings.Builder builder, String username, SecuredString password) { if (randomBoolean()) { builder.put(Headers.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue(username, password)); } else { builder.put("shield.user", username + ":" + new String(password.internalChars())); } } private static byte[] generateKey() { try { return InternalSignatureService.generateKey(); } catch (Exception e) { throw new ElasticsearchException("exception while generating the system key", e); } } private Settings getNodeSSLSettings() { return getSSLSettingsForStore("/org/elasticsearch/shield/transport/ssl/certs/simple/testnode.jks", "testnode", sslTransportEnabled, hostnameVerificationEnabled, hostnameVerificationResolveNameEnabled); } private Settings getClientSSLSettings() { return getSSLSettingsForStore("/org/elasticsearch/shield/transport/ssl/certs/simple/testclient.jks", "testclient", sslTransportEnabled, hostnameVerificationEnabled, hostnameVerificationResolveNameEnabled); } /** * Returns the configuration settings given the location of a certificate and its password * * @param resourcePathToStore the location of the keystore or truststore * @param password the password * @return the configuration settings */ public static Settings getSSLSettingsForStore(String resourcePathToStore, String password) { return getSSLSettingsForStore(resourcePathToStore, password, true, true, true); } private static Settings getSSLSettingsForStore(String resourcePathToStore, String password, boolean sslTransportEnabled, boolean hostnameVerificationEnabled, boolean hostnameVerificationResolveNameEnabled) { File store; try { store = new File(ShieldSettingsSource.class.getResource(resourcePathToStore).toURI()); } catch (URISyntaxException e) { throw new ElasticsearchException("exception while reading the store", e); } if (!store.exists()) { throw new ElasticsearchException("store path doesn't exist"); } ImmutableSettings.Builder builder = settingsBuilder() .put("shield.transport.ssl", sslTransportEnabled) .put("shield.http.ssl", false); if (sslTransportEnabled) { builder.put("shield.ssl.keystore.path", store.getPath()) .put("shield.ssl.keystore.password", password) .put(NettySecuredTransport.HOSTNAME_VERIFICATION_SETTING, hostnameVerificationEnabled) .put(NettySecuredTransport.HOSTNAME_VERIFICATION_RESOLVE_NAME_SETTING, hostnameVerificationResolveNameEnabled); } if (sslTransportEnabled && randomBoolean()) { builder.put("shield.ssl.truststore.path", store.getPath()) .put("shield.ssl.truststore.password", password); } return builder.build(); } }
src/test/java/org/elasticsearch/test/ShieldSettingsSource.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.os.OsUtils; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.plugin.LicensePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.shield.ShieldPlugin; import org.elasticsearch.shield.authc.esusers.ESUsersRealm; import org.elasticsearch.shield.authc.support.SecuredString; import org.elasticsearch.shield.authc.support.UsernamePasswordToken; import org.elasticsearch.shield.signature.InternalSignatureService; import org.elasticsearch.shield.test.ShieldTestUtils; import org.elasticsearch.shield.transport.netty.NettySecuredTransport; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import java.io.File; import java.net.URISyntaxException; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.shield.test.ShieldTestUtils.writeFile; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; /** * {@link org.elasticsearch.test.SettingsSource} subclass that allows to set all needed settings for shield. * Unicast discovery is configured through {@link org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration.UnicastZen}, * also shield is installed with all the needed configuration and files. * To avoid conflicts, every cluster should have its own instance of this class as some configuration files need to be created. */ public class ShieldSettingsSource extends ClusterDiscoveryConfiguration.UnicastZen { public static final Settings DEFAULT_SETTINGS = ImmutableSettings.builder() .put("node.mode", "network") .put("plugins.load_classpath_plugins", false) .build(); public static final String DEFAULT_USER_NAME = "test_user"; public static final String DEFAULT_PASSWORD = "changeme"; public static final String DEFAULT_ROLE = "user"; public static final String DEFAULT_TRANSPORT_CLIENT_ROLE = "trans_client_user"; public static final String DEFAULT_TRANSPORT_CLIENT_USER_NAME = "test_trans_client_user"; public static final String CONFIG_STANDARD_USER = DEFAULT_USER_NAME + ":{plain}" + DEFAULT_PASSWORD + "\n" + DEFAULT_TRANSPORT_CLIENT_USER_NAME + ":{plain}" + DEFAULT_PASSWORD + "\n"; public static final String CONFIG_STANDARD_USER_ROLES = DEFAULT_ROLE + ":" + DEFAULT_USER_NAME + "," + DEFAULT_TRANSPORT_CLIENT_USER_NAME + "\n" + DEFAULT_TRANSPORT_CLIENT_ROLE + ":" + DEFAULT_TRANSPORT_CLIENT_USER_NAME+ "\n"; public static final String CONFIG_ROLE_ALLOW_ALL = DEFAULT_ROLE + ":\n" + " cluster: ALL\n" + " indices:\n" + " '*': ALL\n" + DEFAULT_TRANSPORT_CLIENT_ROLE + ":\n" + " cluster:\n" + " - cluster:monitor/nodes/info\n" + " - cluster:monitor/state"; private final File parentFolder; private final String subfolderPrefix; private final byte[] systemKey; private final boolean sslTransportEnabled; private final boolean hostnameVerificationEnabled; private final boolean hostnameVerificationResolveNameEnabled; /** * Creates a new {@link org.elasticsearch.test.SettingsSource} for the shield configuration. * * @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available) * @param sslTransportEnabled whether ssl should be enabled on the transport layer or not * @param parentFolder the parent folder that will contain all of the configuration files that need to be created * @param scope the scope of the test that is requiring an instance of ShieldSettingsSource */ public ShieldSettingsSource(int numOfNodes, boolean sslTransportEnabled, File parentFolder, ElasticsearchIntegrationTest.Scope scope) { this(numOfNodes, sslTransportEnabled, generateKey(), parentFolder, scope); } /** * Creates a new {@link org.elasticsearch.test.SettingsSource} for the shield configuration. * * @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available) * @param sslTransportEnabled whether ssl should be enabled on the transport layer or not * @param systemKey the system key that all of the nodes will use to sign messages * @param parentFolder the parent folder that will contain all of the configuration files that need to be created * @param scope the scope of the test that is requiring an instance of ShieldSettingsSource */ public ShieldSettingsSource(int numOfNodes, boolean sslTransportEnabled, byte[] systemKey, File parentFolder, ElasticsearchIntegrationTest.Scope scope) { super(numOfNodes, DEFAULT_SETTINGS, scope); this.systemKey = systemKey; this.parentFolder = parentFolder; this.subfolderPrefix = scope.name(); this.sslTransportEnabled = sslTransportEnabled; this.hostnameVerificationEnabled = randomBoolean(); this.hostnameVerificationResolveNameEnabled = randomBoolean(); } @Override public Settings node(int nodeOrdinal) { File folder = ShieldTestUtils.createFolder(parentFolder, subfolderPrefix + "-" + nodeOrdinal); ImmutableSettings.Builder builder = ImmutableSettings.builder().put(super.node(nodeOrdinal)) .put("plugin.types", ShieldPlugin.class.getName() + "," + licensePluginClass().getName()) .put("shield.audit.enabled", randomBoolean()) .put(InternalSignatureService.FILE_SETTING, writeFile(folder, "system_key", systemKey)) .put("shield.authc.realms.esusers.type", ESUsersRealm.TYPE) .put("shield.authc.realms.esusers.order", 0) .put("shield.authc.realms.esusers.files.users", writeFile(folder, "users", configUsers())) .put("shield.authc.realms.esusers.files.users_roles", writeFile(folder, "users_roles", configUsersRoles())) .put("shield.authz.store.files.roles", writeFile(folder, "roles.yml", configRoles())) .put(getNodeSSLSettings()); //the random call has to happen all the time for repeatability String networkHost = randomBoolean() ? "127.0.0.1" : "::1"; if (OsUtils.MAC) { builder.put("network.host", networkHost); } setUser(builder, nodeClientUsername(), nodeClientPassword()); return builder.build(); } @Override public Settings transportClient() { ImmutableSettings.Builder builder = ImmutableSettings.builder().put(super.transportClient()) .put("plugin.types", ShieldPlugin.class.getName()) .put(getClientSSLSettings()); setUser(builder, transportClientUsername(), transportClientPassword()); return builder.build(); } protected String configUsers() { return CONFIG_STANDARD_USER; } protected String configUsersRoles() { return CONFIG_STANDARD_USER_ROLES; } protected String configRoles() { return CONFIG_ROLE_ALLOW_ALL; } protected String nodeClientUsername() { return DEFAULT_USER_NAME; } protected SecuredString nodeClientPassword() { return new SecuredString(DEFAULT_PASSWORD.toCharArray()); } protected String transportClientUsername() { return DEFAULT_TRANSPORT_CLIENT_USER_NAME; } protected SecuredString transportClientPassword() { return new SecuredString(DEFAULT_PASSWORD.toCharArray()); } protected Class<? extends Plugin> licensePluginClass() { return LicensePlugin.class; } protected String licensePluginName() { return LicensePlugin.NAME; } private void setUser(ImmutableSettings.Builder builder, String username, SecuredString password) { if (randomBoolean()) { builder.put(Headers.PREFIX + "." + UsernamePasswordToken.BASIC_AUTH_HEADER, basicAuthHeaderValue(username, password)); } else { builder.put("shield.user", username + ":" + new String(password.internalChars())); } } private static byte[] generateKey() { try { return InternalSignatureService.generateKey(); } catch (Exception e) { throw new ElasticsearchException("exception while generating the system key", e); } } private Settings getNodeSSLSettings() { return getSSLSettingsForStore("/org/elasticsearch/shield/transport/ssl/certs/simple/testnode.jks", "testnode", sslTransportEnabled, hostnameVerificationEnabled, hostnameVerificationResolveNameEnabled); } private Settings getClientSSLSettings() { return getSSLSettingsForStore("/org/elasticsearch/shield/transport/ssl/certs/simple/testclient.jks", "testclient", sslTransportEnabled, hostnameVerificationEnabled, hostnameVerificationResolveNameEnabled); } /** * Returns the configuration settings given the location of a certificate and its password * * @param resourcePathToStore the location of the keystore or truststore * @param password the password * @return the configuration settings */ public static Settings getSSLSettingsForStore(String resourcePathToStore, String password) { return getSSLSettingsForStore(resourcePathToStore, password, true, true, true); } private static Settings getSSLSettingsForStore(String resourcePathToStore, String password, boolean sslTransportEnabled, boolean hostnameVerificationEnabled, boolean hostnameVerificationResolveNameEnabled) { File store; try { store = new File(ShieldSettingsSource.class.getResource(resourcePathToStore).toURI()); } catch (URISyntaxException e) { throw new ElasticsearchException("exception while reading the store", e); } if (!store.exists()) { throw new ElasticsearchException("store path doesn't exist"); } ImmutableSettings.Builder builder = settingsBuilder() .put("shield.transport.ssl", sslTransportEnabled) .put("shield.http.ssl", false); if (sslTransportEnabled) { builder.put("shield.ssl.keystore.path", store.getPath()) .put("shield.ssl.keystore.password", password) .put(NettySecuredTransport.HOSTNAME_VERIFICATION_SETTING, hostnameVerificationEnabled) .put(NettySecuredTransport.HOSTNAME_VERIFICATION_RESOLVE_NAME_SETTING, hostnameVerificationResolveNameEnabled); } if (sslTransportEnabled && randomBoolean()) { builder.put("shield.ssl.truststore.path", store.getPath()) .put("shield.ssl.truststore.password", password); } return builder.build(); } }
Testing: Remove randomization on osx The randomization of the `network.host` property on OSX only could lead to connecting to the wrong HTTP port in our functional tests. As this randomization is not really needed, we can simply remove it Closes elastic/elasticsearch#586 Original commit: elastic/x-pack-elasticsearch@fb16bd864404a325207b113af07f0a0674d3312b
src/test/java/org/elasticsearch/test/ShieldSettingsSource.java
Testing: Remove randomization on osx
<ide><path>rc/test/java/org/elasticsearch/test/ShieldSettingsSource.java <ide> <ide> import org.elasticsearch.ElasticsearchException; <ide> import org.elasticsearch.client.support.Headers; <del>import org.elasticsearch.common.io.FileSystemUtils; <del>import org.elasticsearch.common.os.OsUtils; <ide> import org.elasticsearch.common.settings.ImmutableSettings; <ide> import org.elasticsearch.common.settings.Settings; <ide> import org.elasticsearch.license.plugin.LicensePlugin; <ide> import java.io.File; <ide> import java.net.URISyntaxException; <ide> <add>import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; <ide> import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; <ide> import static org.elasticsearch.shield.authc.support.UsernamePasswordToken.basicAuthHeaderValue; <ide> import static org.elasticsearch.shield.test.ShieldTestUtils.writeFile; <del>import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; <ide> <ide> /** <ide> * {@link org.elasticsearch.test.SettingsSource} subclass that allows to set all needed settings for shield. <ide> .put("shield.authz.store.files.roles", writeFile(folder, "roles.yml", configRoles())) <ide> .put(getNodeSSLSettings()); <ide> <del> //the random call has to happen all the time for repeatability <del> String networkHost = randomBoolean() ? "127.0.0.1" : "::1"; <del> if (OsUtils.MAC) { <del> builder.put("network.host", networkHost); <del> } <del> <ide> setUser(builder, nodeClientUsername(), nodeClientPassword()); <ide> <ide> return builder.build();
Java
apache-2.0
f992300d8449f96a7274ba3e1152632ada0e7dc5
0
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
/* * JaamSim Discrete Event Simulation * Copyright (C) 2002-2011 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.basicsim; import java.io.File; import java.text.SimpleDateFormat; import java.util.Calendar; import javax.swing.JFrame; import com.jaamsim.Samples.SampleConstant; import com.jaamsim.Samples.SampleExpInput; import com.jaamsim.events.Conditional; import com.jaamsim.events.EventManager; import com.jaamsim.input.BooleanInput; import com.jaamsim.input.DirInput; import com.jaamsim.input.EntityListInput; import com.jaamsim.input.Input; import com.jaamsim.input.InputAgent; import com.jaamsim.input.IntegerInput; import com.jaamsim.input.Keyword; import com.jaamsim.input.Output; import com.jaamsim.input.ValueInput; import com.jaamsim.math.Vec3d; import com.jaamsim.ui.AboutBox; import com.jaamsim.ui.EditBox; import com.jaamsim.ui.EntityPallet; import com.jaamsim.ui.FrameBox; import com.jaamsim.ui.GUIFrame; import com.jaamsim.ui.LogBox; import com.jaamsim.ui.ObjectSelector; import com.jaamsim.ui.OutputBox; import com.jaamsim.ui.PropertyBox; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.DistanceUnit; import com.jaamsim.units.TimeUnit; import com.jaamsim.units.Unit; /** * Simulation provides the basic structure for the Entity model lifetime of earlyInit, * startUp and doEndAt. The initial processtargets required to start the model are * added to the eventmanager here. This class also acts as a bridge to the UI by * providing controls for the various windows. */ public class Simulation extends Entity { // Key Inputs tab @Keyword(description = "The duration of the simulation run in which all statistics will be recorded.", example = "Simulation Duration { 8760 h }") private static final ValueInput runDuration; @Keyword(description = "The initialization interval for the simulation run. The model will run " + "for the InitializationDuration interval and then clear the statistics and execute for the " + "specified RunDuration interval. The total length of the simulation run will be the sum of " + "the InitializationDuration and RunDuration inputs.", example = "Simulation Initialization { 720 h }") private static final ValueInput initializationTime; @Keyword(description = "An optional expression that pauses the run when TRUE is returned.", example = "Simulation PauseCondition { '[Queue1].QueueLength > 20'}") private static final SampleExpInput pauseConditionInput; @Keyword(description = "If TRUE, the simulation run will be terminated when the " + "PauseCondition expression returns TRUE.", example = "Simulation ExitAtPauseCondition { TRUE }") private static final BooleanInput exitAtPauseCondition; @Keyword(description = "Indicates whether to close the program on completion of the simulation run.", example = "Simulation ExitAtStop { TRUE }") private static final BooleanInput exitAtStop; @Keyword(description = "Global seed that sets the substream for each probability " + "distribution. Must be an integer >= 0. GlobalSubstreamSeed works " + "together with each probability distribution's RandomSeed keyword to " + "determine its random sequence. It allows the user to change all the " + "random sequences in a model with a single input. To run multiple " + "replications, set the appropriate inputs under the Multiple Runs tab " + "and then set the GlobalSubstreamSeed input to the run number or to " + "one of the run indices.", example = "Simulation GlobalSubstreamSeed { 5 }\n" + "Simulation GlobalSubstreamSeed { [Simulation].RunNumber }\n" + "Simulation GlobalSubstreamSeed { [Simulation].RunIndex(3) }") private static final SampleExpInput globalSeedInput; @Keyword(description = "Indicates whether an output report will be printed at the end of the simulation run.", example = "Simulation PrintReport { TRUE }") private static final BooleanInput printReport; @Keyword(description = "The directory in which to place the output report. Defaults to the " + "directory containing the configuration file for the run.", example = "Simulation ReportDirectory { 'c:\reports\' }") private static final DirInput reportDirectory; @Keyword(description = "The length of time represented by one simulation tick.", example = "Simulation TickLength { 1e-6 s }") private static final ValueInput tickLengthInput; // GUI tab @Keyword(description = "An optional list of units to be used for displaying model outputs.", example = "Simulation DisplayedUnits { h kt }") private static final EntityListInput<? extends Unit> displayedUnits; @Keyword(description = "If TRUE, a dragged object will be positioned to the nearest grid point.", example = "Simulation SnapToGrid { TRUE }") private static final BooleanInput snapToGrid; @Keyword(description = "The distance between snap grid points.", example = "Simulation SnapGridSpacing { 1 m }") private static final ValueInput snapGridSpacing; @Keyword(description = "The distance moved by the selected entity when the an arrow key is pressed.", example = "Simulation IncrementSize { 1 cm }") private static final ValueInput incrementSize; @Keyword(description = "A Boolean to turn on or off real time in the simulation run", example = "Simulation RealTime { TRUE }") private static final BooleanInput realTime; @Keyword(description = "The real time speed up factor", example = "Simulation RealTimeFactor { 1200 }") private static final IntegerInput realTimeFactor; public static final int DEFAULT_REAL_TIME_FACTOR = 1; public static final int MIN_REAL_TIME_FACTOR = 1; public static final int MAX_REAL_TIME_FACTOR= 1000000; @Keyword(description = "The time at which the simulation will be paused.", example = "Simulation PauseTime { 200 h }") private static final ValueInput pauseTime; @Keyword(description = "Indicates whether the Model Builder tool should be shown on startup.", example = "Simulation ShowModelBuilder { TRUE }") private static final BooleanInput showModelBuilder; @Keyword(description = "Indicates whether the Object Selector tool should be shown on startup.", example = "Simulation ShowObjectSelector { TRUE }") private static final BooleanInput showObjectSelector; @Keyword(description = "Indicates whether the Input Editor tool should be shown on startup.", example = "Simulation ShowInputEditor { TRUE }") private static final BooleanInput showInputEditor; @Keyword(description = "Indicates whether the Output Viewer tool should be shown on startup.", example = "Simulation ShowOutputViewer { TRUE }") private static final BooleanInput showOutputViewer; @Keyword(description = "Indicates whether the Output Viewer tool should be shown on startup.", example = "Simulation ShowPropertyViewer { TRUE }") private static final BooleanInput showPropertyViewer; @Keyword(description = "Indicates whether the Log Viewer tool should be shown on startup.", example = "Simulation ShowLogViewer { TRUE }") private static final BooleanInput showLogViewer; @Keyword(description = "Time at which the simulation run is started (hh:mm).", example = "Simulation StartTime { 2160 h }") private static final ValueInput startTimeInput; // Hidden keywords @Keyword(description = "If the value is TRUE, then the input report file will be printed after " + "loading the configuration file. The input report can always be generated when " + "needed by selecting \"Print Input Report\" under the File menu.", example = "Simulation PrintInputReport { TRUE }") private static final BooleanInput printInputReport; @Keyword(description = "This is placeholder description text", example = "This is placeholder example text") private static final BooleanInput traceEventsInput; @Keyword(description = "This is placeholder description text", example = "This is placeholder example text") private static final BooleanInput verifyEventsInput; private static double timeScale; // the scale from discrete to continuous time private static double startTime; // simulation time (seconds) for the start of the run (not necessarily zero) private static double endTime; // simulation time (seconds) for the end of the run private static Simulation myInstance; private static String modelName = "JaamSim"; static { // Key Inputs tab runDuration = new ValueInput("RunDuration", "Key Inputs", 31536000.0d); runDuration.setUnitType(TimeUnit.class); runDuration.setValidRange(1e-15d, Double.POSITIVE_INFINITY); initializationTime = new ValueInput("InitializationDuration", "Key Inputs", 0.0); initializationTime.setUnitType(TimeUnit.class); initializationTime.setValidRange(0.0d, Double.POSITIVE_INFINITY); pauseConditionInput = new SampleExpInput("PauseCondition", "Key Inputs", null); pauseConditionInput.setUnitType(DimensionlessUnit.class); exitAtPauseCondition = new BooleanInput("ExitAtPauseCondition", "Key Inputs", false); exitAtStop = new BooleanInput("ExitAtStop", "Key Inputs", false); globalSeedInput = new SampleExpInput("GlobalSubstreamSeed", "Key Inputs", new SampleConstant(0)); globalSeedInput.setUnitType(DimensionlessUnit.class); globalSeedInput.setValidRange(0, Integer.MAX_VALUE); printReport = new BooleanInput("PrintReport", "Key Inputs", false); reportDirectory = new DirInput("ReportDirectory", "Key Inputs", null); reportDirectory.setDefaultText("Configuration File Directory"); tickLengthInput = new ValueInput("TickLength", "Key Inputs", 1e-6d); tickLengthInput.setUnitType(TimeUnit.class); tickLengthInput.setValidRange(1e-9d, 5.0d); // GUI tab displayedUnits = new EntityListInput<>(Unit.class, "DisplayedUnits", "GUI", null); displayedUnits.setDefaultText("SI Units"); displayedUnits.setPromptReqd(false); realTime = new BooleanInput("RealTime", "GUI", false); realTime.setPromptReqd(false); snapToGrid = new BooleanInput("SnapToGrid", "GUI", false); snapToGrid.setPromptReqd(false); snapGridSpacing = new ValueInput("SnapGridSpacing", "GUI", 0.1d); snapGridSpacing.setUnitType(DistanceUnit.class); snapGridSpacing.setValidRange(1.0e-6, Double.POSITIVE_INFINITY); snapGridSpacing.setPromptReqd(false); incrementSize = new ValueInput("IncrementSize", "GUI", 0.1d); incrementSize.setUnitType(DistanceUnit.class); incrementSize.setValidRange(1.0e-6, Double.POSITIVE_INFINITY); incrementSize.setPromptReqd(false); realTimeFactor = new IntegerInput("RealTimeFactor", "GUI", DEFAULT_REAL_TIME_FACTOR); realTimeFactor.setValidRange(MIN_REAL_TIME_FACTOR, MAX_REAL_TIME_FACTOR); realTimeFactor.setPromptReqd(false); pauseTime = new ValueInput("PauseTime", "GUI", Double.POSITIVE_INFINITY); pauseTime.setUnitType(TimeUnit.class); pauseTime.setValidRange(0.0d, Double.POSITIVE_INFINITY); pauseTime.setPromptReqd(false); showModelBuilder = new BooleanInput("ShowModelBuilder", "GUI", false); showModelBuilder.setPromptReqd(false); showObjectSelector = new BooleanInput("ShowObjectSelector", "GUI", false); showObjectSelector.setPromptReqd(false); showInputEditor = new BooleanInput("ShowInputEditor", "GUI", false); showInputEditor.setPromptReqd(false); showOutputViewer = new BooleanInput("ShowOutputViewer", "GUI", false); showOutputViewer.setPromptReqd(false); showPropertyViewer = new BooleanInput("ShowPropertyViewer", "GUI", false); showPropertyViewer.setPromptReqd(false); showLogViewer = new BooleanInput("ShowLogViewer", "GUI", false); showLogViewer.setPromptReqd(false); // Hidden keywords startTimeInput = new ValueInput("StartTime", "Key Inputs", 0.0d); startTimeInput.setUnitType(TimeUnit.class); startTimeInput.setValidRange(0.0d, Double.POSITIVE_INFINITY); traceEventsInput = new BooleanInput("TraceEvents", "Key Inputs", false); verifyEventsInput = new BooleanInput("VerifyEvents", "Key Inputs", false); printInputReport = new BooleanInput("PrintInputReport", "Key Inputs", false); // Initialize basic model information startTime = 0.0; endTime = 8760.0*3600.0; } { // Key Inputs tab this.addInput(runDuration); this.addInput(initializationTime); this.addInput(pauseConditionInput); this.addInput(exitAtPauseCondition); this.addInput(exitAtStop); this.addInput(globalSeedInput); this.addInput(printReport); this.addInput(reportDirectory); this.addInput(tickLengthInput); // GUI tab this.addInput(displayedUnits); this.addInput(snapToGrid); this.addInput(snapGridSpacing); this.addInput(incrementSize); this.addInput(realTime); this.addInput(realTimeFactor); this.addInput(pauseTime); this.addInput(showModelBuilder); this.addInput(showObjectSelector); this.addInput(showInputEditor); this.addInput(showOutputViewer); this.addInput(showPropertyViewer); this.addInput(showLogViewer); // Hidden keywords this.addInput(startTimeInput); this.addInput(traceEventsInput); this.addInput(verifyEventsInput); this.addInput(printInputReport); // Hide various keywords startTimeInput.setHidden(true); traceEventsInput.setHidden(true); verifyEventsInput.setHidden(true); printInputReport.setHidden(true); } public Simulation() {} public static Simulation getInstance() { if (myInstance == null) { for (Entity ent : Entity.getAll()) { if (ent instanceof Simulation ) { myInstance = (Simulation) ent; break; } } } return myInstance; } @Override public void updateForInput( Input<?> in ) { super.updateForInput( in ); if(in == realTimeFactor || in == realTime) { updateRealTime(); return; } if (in == pauseTime) { updatePauseTime(); return; } if (in == reportDirectory) { InputAgent.setReportDirectory(reportDirectory.getDir()); return; } if (in == displayedUnits) { if (displayedUnits.getValue() == null) return; for (Unit u : displayedUnits.getValue()) { Unit.setPreferredUnit(u.getClass(), u); } return; } if (in == showModelBuilder) { setWindowVisible(EntityPallet.getInstance(), showModelBuilder.getValue()); return; } if (in == showObjectSelector) { setWindowVisible(ObjectSelector.getInstance(), showObjectSelector.getValue()); return; } if (in == showInputEditor) { setWindowVisible(EditBox.getInstance(), showInputEditor.getValue()); FrameBox.reSelectEntity(); return; } if (in == showOutputViewer) { setWindowVisible(OutputBox.getInstance(), showOutputViewer.getValue()); FrameBox.reSelectEntity(); return; } if (in == showPropertyViewer) { setWindowVisible(PropertyBox.getInstance(), showPropertyViewer.getValue()); FrameBox.reSelectEntity(); return; } if (in == showLogViewer) { setWindowVisible(LogBox.getInstance(), showLogViewer.getValue()); FrameBox.reSelectEntity(); return; } } public static void clear() { initializationTime.reset(); runDuration.reset(); pauseTime.reset(); tickLengthInput.reset(); traceEventsInput.reset(); verifyEventsInput.reset(); printInputReport.reset(); realTimeFactor.reset(); realTime.reset(); updateRealTime(); exitAtStop.reset(); startTimeInput.reset(); showModelBuilder.reset(); showObjectSelector.reset(); showInputEditor.reset(); showOutputViewer.reset(); showPropertyViewer.reset(); showLogViewer.reset(); // Initialize basic model information startTime = 0.0; endTime = 8760.0*3600.0; myInstance = null; // close warning/error trace file InputAgent.closeLogFile(); // Kill all entities except simulation while(Entity.getAll().size() > 0) { Entity ent = Entity.getAll().get(Entity.getAll().size()-1); ent.kill(); } } /** * Initializes and starts the model * 1) Initializes EventManager to accept events. * 2) calls startModel() to allow the model to add its starting events to EventManager * 3) start EventManager processing events */ public static void start(EventManager evt) { // Validate each entity based on inputs only for (int i = 0; i < Entity.getAll().size(); i++) { try { Entity.getAll().get(i).validate(); } catch (Throwable e) { LogBox.format("%s: Validation error- %s", Entity.getAll().get(i).getName(), e.getMessage()); GUIFrame.showErrorDialog("Input Error Detected During Validation", "%s: %-70s", Entity.getAll().get(i).getName(), e.getMessage()); GUIFrame.instance().updateForSimulationState(GUIFrame.SIM_STATE_CONFIGURED); return; } } InputAgent.prepareReportDirectory(); evt.clear(); evt.setTraceListener(null); if( Simulation.traceEvents() ) { String evtName = InputAgent.getConfigFile().getParentFile() + File.separator + InputAgent.getRunName() + ".evt"; EventRecorder rec = new EventRecorder(evtName); evt.setTraceListener(rec); } else if( Simulation.verifyEvents() ) { String evtName = InputAgent.getConfigFile().getParentFile() + File.separator + InputAgent.getRunName() + ".evt"; EventTracer trc = new EventTracer(evtName); evt.setTraceListener(trc); } evt.setTickLength(tickLengthInput.getValue()); setSimTimeScale(evt.secondsToNearestTick(3600.0d)); FrameBox.setSecondsPerTick(tickLengthInput.getValue()); startTime = startTimeInput.getValue(); endTime = startTime + Simulation.getInitializationTime() + Simulation.getRunDuration(); evt.scheduleProcessExternal(0, 0, false, new InitModelTarget(), null); evt.resume(evt.secondsToNearestTick(Simulation.getPauseTime())); } public static void end() { for (int i = 0; i < Entity.getAll().size(); i++) { Entity.getAll().get(i).doEnd(); } // Print the output report if (printReport.getValue()) InputAgent.printReport(Simulation.getInstance().getSimTime()); // Close warning/error trace file InputAgent.logMessage("Made it to do end at"); InputAgent.closeLogFile(); // Always terminate the run when in batch mode if (InputAgent.getBatch()) GUIFrame.shutdown(0); EventManager.current().pause(); } public static int getSubstreamNumber() { return (int)globalSeedInput.getValue().getNextSample(0.0); } public static boolean getPrintReport() { return printReport.getValue(); } public static boolean traceEvents() { return traceEventsInput.getValue(); } public static boolean verifyEvents() { return verifyEventsInput.getValue(); } static void setSimTimeScale(double scale) { timeScale = scale; } public static double getSimTimeFactor() { return timeScale; } public static double getEventTolerance() { return (1.0d / getSimTimeFactor()); } public static double getTickLength() { return tickLengthInput.getValue(); } public static double getPauseTime() { return pauseTime.getValue(); } /** * Returns the start time of the run. * @return - simulation time in seconds for the start of the run. */ public static double getStartTime() { return startTime; } /** * Returns the end time of the run. * @return - simulation time in seconds when the current run will stop. */ public static double getEndTime() { return endTime; } /** * Returns the duration of the run (not including intialization) */ public static double getRunDuration() { return runDuration.getValue(); } /** * Returns the duration of the initialization period */ public static double getInitializationTime() { return initializationTime.getValue(); } public static double getIncrementSize() { return incrementSize.getValue(); } public static boolean isSnapToGrid() { return snapToGrid.getValue(); } public static double getSnapGridSpacing() { return snapGridSpacing.getValue(); } public static boolean getExitAtPauseCondition() { return exitAtPauseCondition.getValue(); } public void doPauseCondition() { if (pauseConditionInput != null) EventManager.scheduleUntil(pauseModel, pauseCondition, null); } private final PauseModelTarget pauseModel = new PauseModelTarget(); class PauseConditional extends Conditional { @Override public boolean evaluate() { if (pauseConditionInput.getValue() == null) return false; double simTime = Simulation.getInstance().getSimTime(); return pauseConditionInput.getValue().getNextSample(simTime) != 0.0d; } } private final Conditional pauseCondition = new PauseConditional(); /** * Returns the nearest point on the snap grid to the given coordinate. * To avoid dithering, the new position must be at least one grid space * from the old position. * @param newPos - new coordinate for the object * @param oldPos - present coordinate for the object * @return newest snap grid point. */ public static Vec3d getSnapGridPosition(Vec3d newPos, Vec3d oldPos) { double spacing = snapGridSpacing.getValue(); Vec3d ret = new Vec3d(newPos); if (Math.abs(newPos.x - oldPos.x) < spacing) ret.x = oldPos.x; if (Math.abs(newPos.y - oldPos.y) < spacing) ret.y = oldPos.y; if (Math.abs(newPos.z - oldPos.z) < spacing) ret.z = oldPos.z; return Simulation.getSnapGridPosition(ret); } /** * Returns the nearest point on the snap grid to the given coordinate. * @param pos - position to be adjusted * @return nearest snap grid point. */ public static Vec3d getSnapGridPosition(Vec3d pos) { double spacing = snapGridSpacing.getValue(); Vec3d ret = new Vec3d(pos); ret.x = spacing*Math.rint(ret.x/spacing); ret.y = spacing*Math.rint(ret.y/spacing); ret.z = spacing*Math.rint(ret.z/spacing); return ret; } static void updateRealTime() { GUIFrame.instance().updateForRealTime(realTime.getValue(), realTimeFactor.getValue()); } static void updatePauseTime() { GUIFrame.instance().updateForPauseTime(pauseTime.getValueString()); } public static void setModelName(String newModelName) { modelName = newModelName; } public static String getModelName() { return modelName; } public static boolean getExitAtStop() { return exitAtStop.getValue(); } public static boolean getPrintInputReport() { return printInputReport.getValue(); } public static boolean isRealTime() { return realTime.getValue(); } public static void setWindowVisible(JFrame f, boolean visible) { f.setVisible(visible); if (visible) f.toFront(); } /** * Re-open any Tools windows that have been closed temporarily. */ public static void showActiveTools() { setWindowVisible(EntityPallet.getInstance(), showModelBuilder.getValue()); setWindowVisible(ObjectSelector.getInstance(), showObjectSelector.getValue()); setWindowVisible(EditBox.getInstance(), showInputEditor.getValue()); setWindowVisible(OutputBox.getInstance(), showOutputViewer.getValue()); setWindowVisible(PropertyBox.getInstance(), showPropertyViewer.getValue()); setWindowVisible(LogBox.getInstance(), showLogViewer.getValue()); } /** * Closes all the Tools windows temporarily. */ public static void closeAllTools() { setWindowVisible(EntityPallet.getInstance(), false); setWindowVisible(ObjectSelector.getInstance(), false); setWindowVisible(EditBox.getInstance(), false); setWindowVisible(OutputBox.getInstance(), false); setWindowVisible(PropertyBox.getInstance(), false); setWindowVisible(LogBox.getInstance(), false); } @Output(name = "Software Name", description = "The licensed name for the simulation software.", reportable = true, sequence = 0) public String getSoftwareName(double simTime) { return modelName; } @Output(name = "Software Version", description = "The release number for the simulation software.", reportable = true, sequence = 1) public String getSoftwareVersion(double simTime) { return AboutBox.version; } @Output(name = "Configuration File", description = "The configuration file that has been loaded.", reportable = true, sequence = 2) public String getConfigFileName(double simTime) { return InputAgent.getConfigFile().getPath(); } @Output(name = "Present Time and Date", description = "The present local time and date.", reportable = true, sequence = 3) public String getPresentTime(double simTime) { String timeStamp = new SimpleDateFormat("MMM dd, yyyy HH:mm").format(Calendar.getInstance().getTime()); return timeStamp; } @Output(name = "Initialization Duration", description = "The length of time the model was executed prior to the start of statistics " + "collection.", unitType = TimeUnit.class, reportable = true, sequence = 4) public double getInitializationDuration(double simTime) { return initializationTime.getValue(); } @Output(name = "Run Duration", description = "The length of time over which statistics were collected.", unitType = TimeUnit.class, reportable = true, sequence = 5) public double getRunDuration(double simTime) { return runDuration.getValue(); } @Output(name = "Present Simulation Time", description = "The value for the simulation clock at the present time.", unitType = TimeUnit.class, reportable = true, sequence = 6) public double getPresentSimulationTime(double simTime) { return simTime; } }
src/main/java/com/jaamsim/basicsim/Simulation.java
/* * JaamSim Discrete Event Simulation * Copyright (C) 2002-2011 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.basicsim; import java.io.File; import java.text.SimpleDateFormat; import java.util.Calendar; import javax.swing.JFrame; import com.jaamsim.Samples.SampleExpInput; import com.jaamsim.events.Conditional; import com.jaamsim.events.EventManager; import com.jaamsim.input.BooleanInput; import com.jaamsim.input.DirInput; import com.jaamsim.input.EntityListInput; import com.jaamsim.input.Input; import com.jaamsim.input.InputAgent; import com.jaamsim.input.IntegerInput; import com.jaamsim.input.Keyword; import com.jaamsim.input.Output; import com.jaamsim.input.ValueInput; import com.jaamsim.math.Vec3d; import com.jaamsim.ui.AboutBox; import com.jaamsim.ui.EditBox; import com.jaamsim.ui.EntityPallet; import com.jaamsim.ui.FrameBox; import com.jaamsim.ui.GUIFrame; import com.jaamsim.ui.LogBox; import com.jaamsim.ui.ObjectSelector; import com.jaamsim.ui.OutputBox; import com.jaamsim.ui.PropertyBox; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.DistanceUnit; import com.jaamsim.units.TimeUnit; import com.jaamsim.units.Unit; /** * Simulation provides the basic structure for the Entity model lifetime of earlyInit, * startUp and doEndAt. The initial processtargets required to start the model are * added to the eventmanager here. This class also acts as a bridge to the UI by * providing controls for the various windows. */ public class Simulation extends Entity { // Key Inputs tab @Keyword(description = "The duration of the simulation run in which all statistics will be recorded.", example = "Simulation Duration { 8760 h }") private static final ValueInput runDuration; @Keyword(description = "The initialization interval for the simulation run. The model will run " + "for the InitializationDuration interval and then clear the statistics and execute for the " + "specified RunDuration interval. The total length of the simulation run will be the sum of " + "the InitializationDuration and RunDuration inputs.", example = "Simulation Initialization { 720 h }") private static final ValueInput initializationTime; @Keyword(description = "An optional expression that pauses the run when TRUE is returned.", example = "Simulation PauseCondition { '[Queue1].QueueLength > 20'}") private static final SampleExpInput pauseConditionInput; @Keyword(description = "If TRUE, the simulation run will be terminated when the " + "PauseCondition expression returns TRUE.", example = "Simulation ExitAtPauseCondition { TRUE }") private static final BooleanInput exitAtPauseCondition; @Keyword(description = "Indicates whether to close the program on completion of the simulation run.", example = "Simulation ExitAtStop { TRUE }") private static final BooleanInput exitAtStop; @Keyword(description = "Global seed that sets the substream for each probability distribution. " + "Must be an integer >= 0. GlobalSubstreamSeed works together with each probability " + "distribution's RandomSeed keyword to determine its random sequence. It allows the " + "user to change all the random sequences in a model with a single input.", example = "Simulation GlobalSubstreamSeed { 5 }") private static final IntegerInput globalSeedInput; @Keyword(description = "Indicates whether an output report will be printed at the end of the simulation run.", example = "Simulation PrintReport { TRUE }") private static final BooleanInput printReport; @Keyword(description = "The directory in which to place the output report. Defaults to the " + "directory containing the configuration file for the run.", example = "Simulation ReportDirectory { 'c:\reports\' }") private static final DirInput reportDirectory; @Keyword(description = "The length of time represented by one simulation tick.", example = "Simulation TickLength { 1e-6 s }") private static final ValueInput tickLengthInput; // GUI tab @Keyword(description = "An optional list of units to be used for displaying model outputs.", example = "Simulation DisplayedUnits { h kt }") private static final EntityListInput<? extends Unit> displayedUnits; @Keyword(description = "If TRUE, a dragged object will be positioned to the nearest grid point.", example = "Simulation SnapToGrid { TRUE }") private static final BooleanInput snapToGrid; @Keyword(description = "The distance between snap grid points.", example = "Simulation SnapGridSpacing { 1 m }") private static final ValueInput snapGridSpacing; @Keyword(description = "The distance moved by the selected entity when the an arrow key is pressed.", example = "Simulation IncrementSize { 1 cm }") private static final ValueInput incrementSize; @Keyword(description = "A Boolean to turn on or off real time in the simulation run", example = "Simulation RealTime { TRUE }") private static final BooleanInput realTime; @Keyword(description = "The real time speed up factor", example = "Simulation RealTimeFactor { 1200 }") private static final IntegerInput realTimeFactor; public static final int DEFAULT_REAL_TIME_FACTOR = 1; public static final int MIN_REAL_TIME_FACTOR = 1; public static final int MAX_REAL_TIME_FACTOR= 1000000; @Keyword(description = "The time at which the simulation will be paused.", example = "Simulation PauseTime { 200 h }") private static final ValueInput pauseTime; @Keyword(description = "Indicates whether the Model Builder tool should be shown on startup.", example = "Simulation ShowModelBuilder { TRUE }") private static final BooleanInput showModelBuilder; @Keyword(description = "Indicates whether the Object Selector tool should be shown on startup.", example = "Simulation ShowObjectSelector { TRUE }") private static final BooleanInput showObjectSelector; @Keyword(description = "Indicates whether the Input Editor tool should be shown on startup.", example = "Simulation ShowInputEditor { TRUE }") private static final BooleanInput showInputEditor; @Keyword(description = "Indicates whether the Output Viewer tool should be shown on startup.", example = "Simulation ShowOutputViewer { TRUE }") private static final BooleanInput showOutputViewer; @Keyword(description = "Indicates whether the Output Viewer tool should be shown on startup.", example = "Simulation ShowPropertyViewer { TRUE }") private static final BooleanInput showPropertyViewer; @Keyword(description = "Indicates whether the Log Viewer tool should be shown on startup.", example = "Simulation ShowLogViewer { TRUE }") private static final BooleanInput showLogViewer; @Keyword(description = "Time at which the simulation run is started (hh:mm).", example = "Simulation StartTime { 2160 h }") private static final ValueInput startTimeInput; // Hidden keywords @Keyword(description = "If the value is TRUE, then the input report file will be printed after " + "loading the configuration file. The input report can always be generated when " + "needed by selecting \"Print Input Report\" under the File menu.", example = "Simulation PrintInputReport { TRUE }") private static final BooleanInput printInputReport; @Keyword(description = "This is placeholder description text", example = "This is placeholder example text") private static final BooleanInput traceEventsInput; @Keyword(description = "This is placeholder description text", example = "This is placeholder example text") private static final BooleanInput verifyEventsInput; private static double timeScale; // the scale from discrete to continuous time private static double startTime; // simulation time (seconds) for the start of the run (not necessarily zero) private static double endTime; // simulation time (seconds) for the end of the run private static Simulation myInstance; private static String modelName = "JaamSim"; static { // Key Inputs tab runDuration = new ValueInput("RunDuration", "Key Inputs", 31536000.0d); runDuration.setUnitType(TimeUnit.class); runDuration.setValidRange(1e-15d, Double.POSITIVE_INFINITY); initializationTime = new ValueInput("InitializationDuration", "Key Inputs", 0.0); initializationTime.setUnitType(TimeUnit.class); initializationTime.setValidRange(0.0d, Double.POSITIVE_INFINITY); pauseConditionInput = new SampleExpInput("PauseCondition", "Key Inputs", null); pauseConditionInput.setUnitType(DimensionlessUnit.class); exitAtPauseCondition = new BooleanInput("ExitAtPauseCondition", "Key Inputs", false); exitAtStop = new BooleanInput("ExitAtStop", "Key Inputs", false); globalSeedInput = new IntegerInput("GlobalSubstreamSeed", "Key Inputs", 0); globalSeedInput.setValidRange(0, Integer.MAX_VALUE); printReport = new BooleanInput("PrintReport", "Key Inputs", false); reportDirectory = new DirInput("ReportDirectory", "Key Inputs", null); reportDirectory.setDefaultText("Configuration File Directory"); tickLengthInput = new ValueInput("TickLength", "Key Inputs", 1e-6d); tickLengthInput.setUnitType(TimeUnit.class); tickLengthInput.setValidRange(1e-9d, 5.0d); // GUI tab displayedUnits = new EntityListInput<>(Unit.class, "DisplayedUnits", "GUI", null); displayedUnits.setDefaultText("SI Units"); displayedUnits.setPromptReqd(false); realTime = new BooleanInput("RealTime", "GUI", false); realTime.setPromptReqd(false); snapToGrid = new BooleanInput("SnapToGrid", "GUI", false); snapToGrid.setPromptReqd(false); snapGridSpacing = new ValueInput("SnapGridSpacing", "GUI", 0.1d); snapGridSpacing.setUnitType(DistanceUnit.class); snapGridSpacing.setValidRange(1.0e-6, Double.POSITIVE_INFINITY); snapGridSpacing.setPromptReqd(false); incrementSize = new ValueInput("IncrementSize", "GUI", 0.1d); incrementSize.setUnitType(DistanceUnit.class); incrementSize.setValidRange(1.0e-6, Double.POSITIVE_INFINITY); incrementSize.setPromptReqd(false); realTimeFactor = new IntegerInput("RealTimeFactor", "GUI", DEFAULT_REAL_TIME_FACTOR); realTimeFactor.setValidRange(MIN_REAL_TIME_FACTOR, MAX_REAL_TIME_FACTOR); realTimeFactor.setPromptReqd(false); pauseTime = new ValueInput("PauseTime", "GUI", Double.POSITIVE_INFINITY); pauseTime.setUnitType(TimeUnit.class); pauseTime.setValidRange(0.0d, Double.POSITIVE_INFINITY); pauseTime.setPromptReqd(false); showModelBuilder = new BooleanInput("ShowModelBuilder", "GUI", false); showModelBuilder.setPromptReqd(false); showObjectSelector = new BooleanInput("ShowObjectSelector", "GUI", false); showObjectSelector.setPromptReqd(false); showInputEditor = new BooleanInput("ShowInputEditor", "GUI", false); showInputEditor.setPromptReqd(false); showOutputViewer = new BooleanInput("ShowOutputViewer", "GUI", false); showOutputViewer.setPromptReqd(false); showPropertyViewer = new BooleanInput("ShowPropertyViewer", "GUI", false); showPropertyViewer.setPromptReqd(false); showLogViewer = new BooleanInput("ShowLogViewer", "GUI", false); showLogViewer.setPromptReqd(false); // Hidden keywords startTimeInput = new ValueInput("StartTime", "Key Inputs", 0.0d); startTimeInput.setUnitType(TimeUnit.class); startTimeInput.setValidRange(0.0d, Double.POSITIVE_INFINITY); traceEventsInput = new BooleanInput("TraceEvents", "Key Inputs", false); verifyEventsInput = new BooleanInput("VerifyEvents", "Key Inputs", false); printInputReport = new BooleanInput("PrintInputReport", "Key Inputs", false); // Initialize basic model information startTime = 0.0; endTime = 8760.0*3600.0; } { // Key Inputs tab this.addInput(runDuration); this.addInput(initializationTime); this.addInput(pauseConditionInput); this.addInput(exitAtPauseCondition); this.addInput(exitAtStop); this.addInput(globalSeedInput); this.addInput(printReport); this.addInput(reportDirectory); this.addInput(tickLengthInput); // GUI tab this.addInput(displayedUnits); this.addInput(snapToGrid); this.addInput(snapGridSpacing); this.addInput(incrementSize); this.addInput(realTime); this.addInput(realTimeFactor); this.addInput(pauseTime); this.addInput(showModelBuilder); this.addInput(showObjectSelector); this.addInput(showInputEditor); this.addInput(showOutputViewer); this.addInput(showPropertyViewer); this.addInput(showLogViewer); // Hidden keywords this.addInput(startTimeInput); this.addInput(traceEventsInput); this.addInput(verifyEventsInput); this.addInput(printInputReport); // Hide various keywords startTimeInput.setHidden(true); traceEventsInput.setHidden(true); verifyEventsInput.setHidden(true); printInputReport.setHidden(true); } public Simulation() {} public static Simulation getInstance() { if (myInstance == null) { for (Entity ent : Entity.getAll()) { if (ent instanceof Simulation ) { myInstance = (Simulation) ent; break; } } } return myInstance; } @Override public void updateForInput( Input<?> in ) { super.updateForInput( in ); if(in == realTimeFactor || in == realTime) { updateRealTime(); return; } if (in == pauseTime) { updatePauseTime(); return; } if (in == reportDirectory) { InputAgent.setReportDirectory(reportDirectory.getDir()); return; } if (in == displayedUnits) { if (displayedUnits.getValue() == null) return; for (Unit u : displayedUnits.getValue()) { Unit.setPreferredUnit(u.getClass(), u); } return; } if (in == showModelBuilder) { setWindowVisible(EntityPallet.getInstance(), showModelBuilder.getValue()); return; } if (in == showObjectSelector) { setWindowVisible(ObjectSelector.getInstance(), showObjectSelector.getValue()); return; } if (in == showInputEditor) { setWindowVisible(EditBox.getInstance(), showInputEditor.getValue()); FrameBox.reSelectEntity(); return; } if (in == showOutputViewer) { setWindowVisible(OutputBox.getInstance(), showOutputViewer.getValue()); FrameBox.reSelectEntity(); return; } if (in == showPropertyViewer) { setWindowVisible(PropertyBox.getInstance(), showPropertyViewer.getValue()); FrameBox.reSelectEntity(); return; } if (in == showLogViewer) { setWindowVisible(LogBox.getInstance(), showLogViewer.getValue()); FrameBox.reSelectEntity(); return; } } public static void clear() { initializationTime.reset(); runDuration.reset(); pauseTime.reset(); tickLengthInput.reset(); traceEventsInput.reset(); verifyEventsInput.reset(); printInputReport.reset(); realTimeFactor.reset(); realTime.reset(); updateRealTime(); exitAtStop.reset(); startTimeInput.reset(); showModelBuilder.reset(); showObjectSelector.reset(); showInputEditor.reset(); showOutputViewer.reset(); showPropertyViewer.reset(); showLogViewer.reset(); // Initialize basic model information startTime = 0.0; endTime = 8760.0*3600.0; myInstance = null; // close warning/error trace file InputAgent.closeLogFile(); // Kill all entities except simulation while(Entity.getAll().size() > 0) { Entity ent = Entity.getAll().get(Entity.getAll().size()-1); ent.kill(); } } /** * Initializes and starts the model * 1) Initializes EventManager to accept events. * 2) calls startModel() to allow the model to add its starting events to EventManager * 3) start EventManager processing events */ public static void start(EventManager evt) { // Validate each entity based on inputs only for (int i = 0; i < Entity.getAll().size(); i++) { try { Entity.getAll().get(i).validate(); } catch (Throwable e) { LogBox.format("%s: Validation error- %s", Entity.getAll().get(i).getName(), e.getMessage()); GUIFrame.showErrorDialog("Input Error Detected During Validation", "%s: %-70s", Entity.getAll().get(i).getName(), e.getMessage()); GUIFrame.instance().updateForSimulationState(GUIFrame.SIM_STATE_CONFIGURED); return; } } InputAgent.prepareReportDirectory(); evt.clear(); evt.setTraceListener(null); if( Simulation.traceEvents() ) { String evtName = InputAgent.getConfigFile().getParentFile() + File.separator + InputAgent.getRunName() + ".evt"; EventRecorder rec = new EventRecorder(evtName); evt.setTraceListener(rec); } else if( Simulation.verifyEvents() ) { String evtName = InputAgent.getConfigFile().getParentFile() + File.separator + InputAgent.getRunName() + ".evt"; EventTracer trc = new EventTracer(evtName); evt.setTraceListener(trc); } evt.setTickLength(tickLengthInput.getValue()); setSimTimeScale(evt.secondsToNearestTick(3600.0d)); FrameBox.setSecondsPerTick(tickLengthInput.getValue()); startTime = startTimeInput.getValue(); endTime = startTime + Simulation.getInitializationTime() + Simulation.getRunDuration(); evt.scheduleProcessExternal(0, 0, false, new InitModelTarget(), null); evt.resume(evt.secondsToNearestTick(Simulation.getPauseTime())); } public static void end() { for (int i = 0; i < Entity.getAll().size(); i++) { Entity.getAll().get(i).doEnd(); } // Print the output report if (printReport.getValue()) InputAgent.printReport(Simulation.getInstance().getSimTime()); // Close warning/error trace file InputAgent.logMessage("Made it to do end at"); InputAgent.closeLogFile(); // Always terminate the run when in batch mode if (InputAgent.getBatch()) GUIFrame.shutdown(0); EventManager.current().pause(); } public static int getSubstreamNumber() { return globalSeedInput.getValue(); } public static boolean getPrintReport() { return printReport.getValue(); } public static boolean traceEvents() { return traceEventsInput.getValue(); } public static boolean verifyEvents() { return verifyEventsInput.getValue(); } static void setSimTimeScale(double scale) { timeScale = scale; } public static double getSimTimeFactor() { return timeScale; } public static double getEventTolerance() { return (1.0d / getSimTimeFactor()); } public static double getTickLength() { return tickLengthInput.getValue(); } public static double getPauseTime() { return pauseTime.getValue(); } /** * Returns the start time of the run. * @return - simulation time in seconds for the start of the run. */ public static double getStartTime() { return startTime; } /** * Returns the end time of the run. * @return - simulation time in seconds when the current run will stop. */ public static double getEndTime() { return endTime; } /** * Returns the duration of the run (not including intialization) */ public static double getRunDuration() { return runDuration.getValue(); } /** * Returns the duration of the initialization period */ public static double getInitializationTime() { return initializationTime.getValue(); } public static double getIncrementSize() { return incrementSize.getValue(); } public static boolean isSnapToGrid() { return snapToGrid.getValue(); } public static double getSnapGridSpacing() { return snapGridSpacing.getValue(); } public static boolean getExitAtPauseCondition() { return exitAtPauseCondition.getValue(); } public void doPauseCondition() { if (pauseConditionInput != null) EventManager.scheduleUntil(pauseModel, pauseCondition, null); } private final PauseModelTarget pauseModel = new PauseModelTarget(); class PauseConditional extends Conditional { @Override public boolean evaluate() { if (pauseConditionInput.getValue() == null) return false; double simTime = Simulation.getInstance().getSimTime(); return pauseConditionInput.getValue().getNextSample(simTime) != 0.0d; } } private final Conditional pauseCondition = new PauseConditional(); /** * Returns the nearest point on the snap grid to the given coordinate. * To avoid dithering, the new position must be at least one grid space * from the old position. * @param newPos - new coordinate for the object * @param oldPos - present coordinate for the object * @return newest snap grid point. */ public static Vec3d getSnapGridPosition(Vec3d newPos, Vec3d oldPos) { double spacing = snapGridSpacing.getValue(); Vec3d ret = new Vec3d(newPos); if (Math.abs(newPos.x - oldPos.x) < spacing) ret.x = oldPos.x; if (Math.abs(newPos.y - oldPos.y) < spacing) ret.y = oldPos.y; if (Math.abs(newPos.z - oldPos.z) < spacing) ret.z = oldPos.z; return Simulation.getSnapGridPosition(ret); } /** * Returns the nearest point on the snap grid to the given coordinate. * @param pos - position to be adjusted * @return nearest snap grid point. */ public static Vec3d getSnapGridPosition(Vec3d pos) { double spacing = snapGridSpacing.getValue(); Vec3d ret = new Vec3d(pos); ret.x = spacing*Math.rint(ret.x/spacing); ret.y = spacing*Math.rint(ret.y/spacing); ret.z = spacing*Math.rint(ret.z/spacing); return ret; } static void updateRealTime() { GUIFrame.instance().updateForRealTime(realTime.getValue(), realTimeFactor.getValue()); } static void updatePauseTime() { GUIFrame.instance().updateForPauseTime(pauseTime.getValueString()); } public static void setModelName(String newModelName) { modelName = newModelName; } public static String getModelName() { return modelName; } public static boolean getExitAtStop() { return exitAtStop.getValue(); } public static boolean getPrintInputReport() { return printInputReport.getValue(); } public static boolean isRealTime() { return realTime.getValue(); } public static void setWindowVisible(JFrame f, boolean visible) { f.setVisible(visible); if (visible) f.toFront(); } /** * Re-open any Tools windows that have been closed temporarily. */ public static void showActiveTools() { setWindowVisible(EntityPallet.getInstance(), showModelBuilder.getValue()); setWindowVisible(ObjectSelector.getInstance(), showObjectSelector.getValue()); setWindowVisible(EditBox.getInstance(), showInputEditor.getValue()); setWindowVisible(OutputBox.getInstance(), showOutputViewer.getValue()); setWindowVisible(PropertyBox.getInstance(), showPropertyViewer.getValue()); setWindowVisible(LogBox.getInstance(), showLogViewer.getValue()); } /** * Closes all the Tools windows temporarily. */ public static void closeAllTools() { setWindowVisible(EntityPallet.getInstance(), false); setWindowVisible(ObjectSelector.getInstance(), false); setWindowVisible(EditBox.getInstance(), false); setWindowVisible(OutputBox.getInstance(), false); setWindowVisible(PropertyBox.getInstance(), false); setWindowVisible(LogBox.getInstance(), false); } @Output(name = "Software Name", description = "The licensed name for the simulation software.", reportable = true, sequence = 0) public String getSoftwareName(double simTime) { return modelName; } @Output(name = "Software Version", description = "The release number for the simulation software.", reportable = true, sequence = 1) public String getSoftwareVersion(double simTime) { return AboutBox.version; } @Output(name = "Configuration File", description = "The configuration file that has been loaded.", reportable = true, sequence = 2) public String getConfigFileName(double simTime) { return InputAgent.getConfigFile().getPath(); } @Output(name = "Present Time and Date", description = "The present local time and date.", reportable = true, sequence = 3) public String getPresentTime(double simTime) { String timeStamp = new SimpleDateFormat("MMM dd, yyyy HH:mm").format(Calendar.getInstance().getTime()); return timeStamp; } @Output(name = "Initialization Duration", description = "The length of time the model was executed prior to the start of statistics " + "collection.", unitType = TimeUnit.class, reportable = true, sequence = 4) public double getInitializationDuration(double simTime) { return initializationTime.getValue(); } @Output(name = "Run Duration", description = "The length of time over which statistics were collected.", unitType = TimeUnit.class, reportable = true, sequence = 5) public double getRunDuration(double simTime) { return runDuration.getValue(); } @Output(name = "Present Simulation Time", description = "The value for the simulation clock at the present time.", unitType = TimeUnit.class, reportable = true, sequence = 6) public double getPresentSimulationTime(double simTime) { return simTime; } }
JS: allow the GlobalSubstreamSeed keyword to accept an expression Signed-off-by: Harry King <[email protected]> Signed-off-by: Stephen Wong <[email protected]>
src/main/java/com/jaamsim/basicsim/Simulation.java
JS: allow the GlobalSubstreamSeed keyword to accept an expression
<ide><path>rc/main/java/com/jaamsim/basicsim/Simulation.java <ide> <ide> import javax.swing.JFrame; <ide> <add>import com.jaamsim.Samples.SampleConstant; <ide> import com.jaamsim.Samples.SampleExpInput; <ide> import com.jaamsim.events.Conditional; <ide> import com.jaamsim.events.EventManager; <ide> example = "Simulation ExitAtStop { TRUE }") <ide> private static final BooleanInput exitAtStop; <ide> <del> @Keyword(description = "Global seed that sets the substream for each probability distribution. " <del> + "Must be an integer >= 0. GlobalSubstreamSeed works together with each probability " <del> + "distribution's RandomSeed keyword to determine its random sequence. It allows the " <del> + "user to change all the random sequences in a model with a single input.", <del> example = "Simulation GlobalSubstreamSeed { 5 }") <del> private static final IntegerInput globalSeedInput; <add> @Keyword(description = "Global seed that sets the substream for each probability " <add> + "distribution. Must be an integer >= 0. GlobalSubstreamSeed works " <add> + "together with each probability distribution's RandomSeed keyword to " <add> + "determine its random sequence. It allows the user to change all the " <add> + "random sequences in a model with a single input. To run multiple " <add> + "replications, set the appropriate inputs under the Multiple Runs tab " <add> + "and then set the GlobalSubstreamSeed input to the run number or to " <add> + "one of the run indices.", <add> example = "Simulation GlobalSubstreamSeed { 5 }\n" <add> + "Simulation GlobalSubstreamSeed { [Simulation].RunNumber }\n" <add> + "Simulation GlobalSubstreamSeed { [Simulation].RunIndex(3) }") <add> private static final SampleExpInput globalSeedInput; <ide> <ide> @Keyword(description = "Indicates whether an output report will be printed at the end of the simulation run.", <ide> example = "Simulation PrintReport { TRUE }") <ide> <ide> exitAtStop = new BooleanInput("ExitAtStop", "Key Inputs", false); <ide> <del> globalSeedInput = new IntegerInput("GlobalSubstreamSeed", "Key Inputs", 0); <add> globalSeedInput = new SampleExpInput("GlobalSubstreamSeed", "Key Inputs", new SampleConstant(0)); <add> globalSeedInput.setUnitType(DimensionlessUnit.class); <ide> globalSeedInput.setValidRange(0, Integer.MAX_VALUE); <ide> <ide> printReport = new BooleanInput("PrintReport", "Key Inputs", false); <ide> } <ide> <ide> public static int getSubstreamNumber() { <del> return globalSeedInput.getValue(); <add> return (int)globalSeedInput.getValue().getNextSample(0.0); <ide> } <ide> <ide> public static boolean getPrintReport() {
Java
mit
fa906f7335d99ae5f4d361ab425fadd56f677e65
0
Bien-CV/eCoastSimulator.png
// TODO : // notifyNouvelleEnchere // notifyNouvelleVente package client; import java.rmi.Naming; import java.rmi.RemoteException; import java.rmi.server.UnicastRemoteObject; import java.util.UUID; import java.util.HashMap; import java.util.List; import commun.ClientInfo; import commun.DejaConnecteException; import commun.IClient; import commun.IHotelDesVentes; import commun.Objet; import commun.PasCreateurException; import commun.PseudoDejaUtiliseException; import commun.SalleDeVenteInfo; import commun.Message; public class Client extends UnicastRemoteObject implements IClient { private static final long serialVersionUID = 1L; private String urlEtPortServeur; private String adresseServeur; private String adresseClient="localhost"; public String getAdresseClient() { return adresseClient; } private String pseudo; public String getPseudo() { return pseudo; } public void setPseudo(String pseudo) { this.pseudo = pseudo; } private IHotelDesVentes hdv; private HashMap<UUID, Objet> ventesSuivies; //private HashMap<UUID, Objet> ventesExistantes; private List<SalleDeVenteInfo> listeInfosSalles; // liste des messages postés dans les différentes salles de ventes suivies private HashMap<UUID, List<Message>> listesMessages; private UUID id; private ClientInfo myClientInfos; private String ipClient; private String portClient="8091"; private UUID idSalleObservee; private String nomSalleObservee; private UUID idObjetObserve; private String nomObjetObserve; public void setIdSalleObservee(UUID idSalleObservee) { this.idSalleObservee = idSalleObservee; } public String getPortClient() { return portClient; } public Client(String pseudo,String urlEtPortDuServeur) throws RemoteException { super(); this.pseudo = pseudo; this.hdv = connexionServeur(); this.ventesSuivies = new HashMap<UUID, Objet>(); this.id = UUID.randomUUID(); urlEtPortServeur = urlEtPortDuServeur; adresseServeur = urlEtPortServeur + "/hoteldesventes"; //TODO: Récupérer la vraie IP du client ipClient="localhost"; this.myClientInfos = new ClientInfo(this.id, this.pseudo, ipClient, portClient); } public IHotelDesVentes connexionServeur() { try { IHotelDesVentes hotelDesVentes = (IHotelDesVentes) Naming.lookup("//" + this.adresseServeur); System.out.println("Connexion au serveur " + this.adresseServeur + " reussi."); return hotelDesVentes; } catch (Exception e) { System.out.println("Connexion au serveur " + this.adresseServeur + " impossible."); e.printStackTrace(); return null; } } // notification au serveur de l'arrivée d'un nouveau client public void connexion () { try { // login + récupération de la liste des salles existantes. // TODO : mettre a jour l'IHM avec la liste susmentionnée. // ventesExistantes = hdv.login(this.myClientInfos); listeInfosSalles = hdv.login(this.myClientInfos); } catch (RemoteException | PseudoDejaUtiliseException | DejaConnecteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // notification au serveur du départ d'un client public void deconnexion () { try { hdv.logout(myClientInfos); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } // TODO : fermeture de l'application ? } // notification au serveur de l'ajout d'un nouvel objet a vendre dans une salle donnée. public void nouvelleSoumission(String nom, String description, int prix, UUID idSdv) throws RemoteException { Objet nouveau = new Objet(nom, description, prix,pseudo); //ajout de l'objet par le hdv // TODO : peut etre autoriser l'ajout seulement pour le créateur de la salle try { hdv.ajouterObjet(nouveau, idSdv, this.id); } catch (PasCreateurException e) { // TODO affichage utilisateur en cas d'ajout dans une salle qu'il a pas créé ? e.printStackTrace(); } //print des informations sur l'ajout } public static void main(String[] argv) { try { //start IHM } catch (Exception e) { e.printStackTrace(); } } public IHotelDesVentes getServeur() { return hdv; } public void setServeur(IHotelDesVentes serveur) { this.hdv = serveur; } // notification au serveur de la fermeture d'une salle par le client public void fermetureSalle(UUID idSDV) { try { hdv.fermerSalle(idSDV, this.id); } catch (PasCreateurException e) { // impossible de fermer la salle si on en est pas le créateur e.printStackTrace(); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // notification au serveur de la volonté de rejoindre unje nouvelle salle de vente. // ajout du nouvel objet suivit en cas de réussite. public void rejoindreSalle(UUID idSalle) { try { Objet obj = hdv.rejoindreSalle(idSalle, this.myClientInfos); ventesSuivies.put(idSalle, obj); // TODO : refresh l'IHM pour prendre en compte les modifs } catch (RemoteException e) { e.printStackTrace(); // TODO : affichage d'un message d'erreur par l'IHM } } public HashMap<UUID, Objet> getVentesSuivies() { return ventesSuivies; } @Override public void nouveauMessage(UUID idSalle, Message message) { listesMessages.get(idSalle).add(message); // TODO : refresh l'IHM pour prendre en compte les modifs // TODO : éventuellement supprimer les plus anciens messages au dela d'un certain nombre. } @Override public void notifModifObjet(UUID idSalle, Objet objet) { ventesSuivies.put(idSalle, objet); // TODO : refresh l'IHM pour prendre en compte les modifs } @Override public void notifFermetureSalle(UUID idSalle) { ventesSuivies.remove(idSalle); // TODO : refresh l'IHM pour prendre en compte les modifs } @Override public void notifNouvelleSalle(SalleDeVenteInfo sdvi) { //ventesExistantes.put(sdvi.getId(), sdvi.getObjCourrant()); listeInfosSalles.add(sdvi); } public UUID getIdSalleObservee() { return this.idSalleObservee; } }
src/client/Client.java
// TODO : // notifyNouvelleEnchere // notifyNouvelleVente package client; import java.rmi.Naming; import java.rmi.RemoteException; import java.rmi.server.UnicastRemoteObject; import java.util.UUID; import java.util.HashMap; import java.util.List; import commun.ClientInfo; import commun.DejaConnecteException; import commun.IClient; import commun.IHotelDesVentes; import commun.Objet; import commun.PasCreateurException; import commun.PseudoDejaUtiliseException; import commun.SalleDeVenteInfo; import commun.Message; public class Client extends UnicastRemoteObject implements IClient { private static final long serialVersionUID = 1L; private String urlEtPortServeur; private String adresseServeur; private String adresseClient="localhost"; public String getAdresseClient() { return adresseClient; } private String pseudo; public String getPseudo() { return pseudo; } public void setPseudo(String pseudo) { this.pseudo = pseudo; } private IHotelDesVentes hdv; private HashMap<UUID, Objet> ventesSuivies; //private HashMap<UUID, Objet> ventesExistantes; private List<SalleDeVenteInfo> listeInfosSalles; // liste des messages postés dans les différentes salles de ventes suivies private HashMap<UUID, List<Message>> listesMessages; private UUID id; private ClientInfo myClientInfos; private String ipClient; private String portClient="8091"; private UUID idSalleObservee; private String nomSalleObservee; private UUID idObjetObserve; private String nomObjetObserve; public void setIdSalleObservee(UUID idSalleObservee) { this.idSalleObservee = idSalleObservee; } public String getPortClient() { return portClient; } public Client(String pseudo,String urlEtPortDuServeur) throws RemoteException { super(); this.pseudo = pseudo; this.hdv = connexionServeur(); this.ventesSuivies = new HashMap<UUID, Objet>(); this.id = UUID.randomUUID(); urlEtPortServeur = urlEtPortDuServeur; adresseServeur = urlEtPortServeur + "/hoteldesventes"; //TODO: Récupérer la vraie IP du client ipClient="localhost"; this.myClientInfos = new ClientInfo(this.id, this.pseudo, ipClient, portClient); } public IHotelDesVentes connexionServeur() { try { IHotelDesVentes hotelDesVentes = (IHotelDesVentes) Naming.lookup("//" + this.adresseServeur); System.out.println("Connexion au serveur " + this.adresseServeur + " reussi."); return hotelDesVentes; } catch (Exception e) { System.out.println("Connexion au serveur " + this.adresseServeur + " impossible."); e.printStackTrace(); return null; } } // notification au serveur de l'arrivée d'un nouveau client public void connexion () { try { // login + récupération de la liste des salles existantes. // TODO : mettre a jour l'IHM avec la liste susmentionnée. // ventesExistantes = hdv.login(this.myClientInfos); listeInfosSalles = hdv.login(this.myClientInfos); } catch (RemoteException | PseudoDejaUtiliseException | DejaConnecteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // notification au serveur du départ d'un client public void deconnexion () { try { hdv.logout(myClientInfos); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } // TODO : fermeture de l'application ? } // notification au serveur de l'ajout d'un nouvel objet a vendre dans une salle donnée. public void nouvelleSoumission(String nom, String description, int prix, UUID idSdv) throws RemoteException { Objet nouveau = new Objet(nom, description, prix,pseudo); //ajout de l'objet par le hdv // TODO : peut etre autoriser l'ajout seulement pour le créateur de la salle try { hdv.ajouterObjet(nouveau, idSdv, this.id); } catch (PasCreateurException e) { // TODO affichage utilisateur en cas d'ajout dans une salle qu'il a pas créé ? e.printStackTrace(); } //print des informations sur l'ajout } public static void main(String[] argv) { try { //start IHM } catch (Exception e) { e.printStackTrace(); } } public IHotelDesVentes getServeur() { return hdv; } public void setServeur(IHotelDesVentes serveur) { this.hdv = serveur; } // notification au serveur de la fermeture d'une salle par le client public void fermetureSalle(UUID idSDV) { try { hdv.fermerSalle(idSDV, this.id); } catch (PasCreateurException e) { // impossible de fermer la salle si on en est pas le créateur e.printStackTrace(); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // notification au serveur de la volonté de rejoindre unje nouvelle salle de vente. // ajout du nouvel objet suivit en cas de réussite. public void rejoindreSalle(UUID idSalle) { try { Objet obj = hdv.rejoindreSalle(idSalle, this.myClientInfos); ventesSuivies.put(idSalle, obj); // TODO : refresh l'IHM pour prendre en compte les modifs } catch (RemoteException e) { e.printStackTrace(); // TODO : affichage d'un message d'erreur par l'IHM } } public HashMap<UUID, Objet> getVentesSuivies() { return ventesSuivies; } @Override public void nouveauMessage(UUID idSalle, Message message) { listesMessages.get(idSalle).add(message); // TODO : refresh l'IHM pour prendre en compte les modifs // TODO : éventuellement supprimer les plus anciens messages au dela d'un certain nombre. } @Override public void notifModifObjet(UUID idSalle, Objet objet) { ventesSuivies.put(idSalle, objet); // TODO : refresh l'IHM pour prendre en compte les modifs } @Override public void notifFermetureSalle(UUID idSalle) { ventesSuivies.remove(idSalle); // TODO : refresh l'IHM pour prendre en compte les modifs } @Override public void notifNouvelleSalle(SalleDeVenteInfo sdvi) { //ventesExistantes.put(sdvi.getId(), sdvi.getObjCourrant()); listeInfosSalles.add(sdvi); } public UUID getIdSalleObservee() { return this.idSalleObservee; } }
bliblu
src/client/Client.java
bliblu
<ide><path>rc/client/Client.java <ide> public UUID getIdSalleObservee() { <ide> return this.idSalleObservee; <ide> } <add> <ide> }
JavaScript
mit
23dba67a78a3615b8624772941d13f8c6c3f219c
0
alikhil/sttp,alikhil/sttp
var compresser = require("../src/compress.js"); var aesCrypter = require("../src/aes.js"); var hasher = require("../src/hash.js"); var util = require("../src/util.js"); var base64 = require("../src/Base64.js").Base64(); var DataPacker = function(aesKey) { this.aesKey = aesKey; this.pack = function(data) { if (typeof data !== "string") data = JSON.stringify(data); var compressed = compresser.compress(data); console.log("compressed"+compressed); var crypted = aesCrypter.encryptAES(compressed, aesKey); console.log("crypted"+crypted); var packet = { data: crypted, hash: hasher.hash(crypted) }; var result = JSON.stringify(packet); return base64.encode(result); }; this.unpack = function(rawData) { var decoded = base64.decode(rawData); console.log("rawData" + rawData); console.log("decoded" + decoded); var decodedObject = JSON.parse(decoded); if (hasher.hash(decodedObject.data) !== decodedObject.hash) throw "Hashes are not equal. Data corrupted"; var decrypted = aesCrypter.decryptAES(decodedObject.data, aesKey); console.log("decrypted"+decrypted); var decompressed = compresser.decompress(decrypted); return decompressed; }; return this; }; var AuthKeyPacker = function(key, private) { this.private = private || false; function hasKey(obj, key) { return obj.hasOwnProperty(key); } this.hasPrivateKey = private; this.key = key; this.canEncrypt = function() { return key !== null && hasKey(key, "E") && hasKey(key, "N"); }; this.canDecrypt = function() { return this.hasPrivateKey && this.canEncrypt() && hasKey(key, "P") && hasKey(key, "Q") && hasKey(key, "D") && hasKey(key, "F"); }; }; exports.AuthKeyPacker = AuthKeyPacker; exports.DataPacker = DataPacker;
src/packer.js
var compresser = require("../src/compress.js"); var aesCrypter = require("../src/aes.js"); var hasher = require("../src/hash.js"); var util = require("../src/util.js"); var base64 = require("../src/Base64.js").Base64(); var DataPacker = function(aesKey) { this.aesKey = aesKey; this.pack = function(data) { if (typeof data !== "string") data = JSON.stringify(data); var compressed = compresser.compress(data); var crypted = aesCrypter.encryptAES(compressed, aesKey); var packet = { data: crypted, hash: hasher.hash(crypted) }; var result = JSON.stringify(packet); return base64.encode(result); }; this.unpack = function(rawData) { return rawData; }; return this; }; var AuthKeyPacker = function(key, private) { this.private = private || false; function hasKey(obj, key) { return obj.hasOwnProperty(key); } this.hasPrivateKey = private; this.key = key; this.canEncrypt = function() { return key !== null && hasKey(key, "E") && hasKey(key, "N"); }; this.canDecrypt = function() { return this.hasPrivateKey && this.canEncrypt() && hasKey(key, "P") && hasKey(key, "Q") && hasKey(key, "D") && hasKey(key, "F"); }; }; exports.AuthKeyPacker = AuthKeyPacker; exports.DataPacker = DataPacker;
Updates packer.js. * Adds implentation of unpack() function of DataPacker. * Adds log printing lines.
src/packer.js
Updates packer.js.
<ide><path>rc/packer.js <ide> if (typeof data !== "string") <ide> data = JSON.stringify(data); <ide> var compressed = compresser.compress(data); <add> console.log("compressed"+compressed); <ide> var crypted = aesCrypter.encryptAES(compressed, aesKey); <add> console.log("crypted"+crypted); <ide> var packet = { data: crypted, hash: hasher.hash(crypted) }; <ide> var result = JSON.stringify(packet); <ide> return base64.encode(result); <ide> }; <ide> <ide> this.unpack = function(rawData) { <del> return rawData; <add> var decoded = base64.decode(rawData); <add> console.log("rawData" + rawData); <add> console.log("decoded" + decoded); <add> var decodedObject = JSON.parse(decoded); <add> if (hasher.hash(decodedObject.data) !== decodedObject.hash) <add> throw "Hashes are not equal. Data corrupted"; <add> var decrypted = aesCrypter.decryptAES(decodedObject.data, aesKey); <add> console.log("decrypted"+decrypted); <add> var decompressed = compresser.decompress(decrypted); <add> return decompressed; <ide> }; <ide> <ide> return this;
Java
apache-2.0
085bb615e4410078b6699c3a932b6ce90fadb5d8
0
vatbub/hangman-solver,vatbub/hangman-solver
package view; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ResourceBundle; import java.util.logging.Level; import algorithm.*; import common.Common; import common.Config; import common.UpdateChecker; import common.UpdateInfo; import common.Version; import javafx.animation.RotateTransition; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.Hyperlink; import javafx.scene.control.Label; import javafx.scene.control.ProgressBar; import javafx.scene.control.TextArea; import javafx.scene.control.TextField; import javafx.scene.control.Tooltip; import javafx.scene.image.Image; import javafx.scene.input.MouseButton; import javafx.scene.input.MouseEvent; import javafx.stage.Stage; import javafx.util.Duration; import languages.Language; import languages.TabFile; import logging.FOKLogger; import stats.HangmanStats; import stats.MongoSetup; import view.updateAvailableDialog.UpdateAvailableDialog; /** * The MainWindow controller class. **/ public class MainWindow extends Application implements Initializable { private static double curVersionEasterEggTurnDegrees = 0; private static boolean disableUpdateChecks = false; private static FOKLogger log; public static void main(String[] args) { String path = MainWindow.class.getProtectionDomain().getCodeSource().getLocation().getPath(); String decodedPath; try { decodedPath = URLDecoder.decode(path, "UTF-8"); System.out.println(decodedPath); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } common.Common.setAppName("hangmanSolver"); log = new FOKLogger(MainWindow.class.getName()); for (String arg : args) { if (arg.toLowerCase().matches("mockappversion=.*")) { // Set the mock version String version = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockAppVersion(version); } else if (arg.toLowerCase().matches("mockbuildnumber=.*")) { // Set the mock version String buildnumber = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockBuildNumber(buildnumber); ; } else if (arg.toLowerCase().matches("disableupdatechecks")) { log.getLogger().info("Update checks are disabled as app was launched from launcher."); disableUpdateChecks = true; } } launch(args); } private ResourceBundle bundle = ResourceBundle.getBundle("view.strings.messages"); private ResourceBundle errorMessageBundle = ResourceBundle.getBundle("view.strings.errormessages"); private static String currentSequenceStr; public static Result currentSolution; private boolean shareThoughtsBool; private String lastThought; private static Scene scene; private static int clickCounter = 0; private static int loadedLanguagesCount = 0; public Scene getScene() { return scene; } @FXML // fx:id="loadLanguagesProgressBar" private ProgressBar loadLanguagesProgressBar; // Value injected by FXMLLoader @FXML /** * ResourceBundle that was given to the FXMLLoader */ private ResourceBundle resources; @FXML /** * URL location of the FXML file that was given to the FXMLLoader */ private URL location; @FXML /** * fx:id="actionLabel" */ private Label actionLabel; // Value injected by FXMLLoader @FXML /** * fx:id="applyButton" */ private Button applyButton; // Value injected by FXMLLoader @FXML /** * fx:id="creditsButton" */ private Button creditsButton; @FXML // fx:id="currentAppVersionTextLabel" private Label currentAppVersionTextLabel; // Value injected by FXMLLoader @FXML /** * fx:id="currentSequence" */ public TextField currentSequence; // Value injected by FXMLLoader @FXML /** * fx:id="getNextLetter" */ private Button getNextLetter; // Value injected by FXMLLoader @FXML /** * fx:id="languageSelector" * */ private ComboBox<String> languageSelector; // Value injected by FXMLLoader @FXML /** * fx:id="result" */ private TextField result; // Value injected by FXMLLoader @FXML /** * fx:id="shareThoughtsCheckbox" */ private CheckBox shareThoughtsCheckbox; @FXML /** * fx:id="thoughts" */ private Label thoughts; @FXML /** * fx:id="newGameButton" */ private Button newGameButton; @FXML /** * fx:id="proposedSolutions" */ private TextArea proposedSolutions; @FXML /** * fx:id="updateLink" */ private Hyperlink updateLink; // Value injected by FXMLLoader @FXML /** * fx:id="versionLabel" */ private Label versionLabel; // Value injected by FXMLLoader @FXML /** * Handler for Hyperlink[fx:id="updateLink"] onAction * * @param event * The event object that contains information about the event. */ void updateLinkOnAction(ActionEvent event) { // Check for new version ignoring ignored updates Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailableCompareAppVersion(Config.getUpdateRepoBaseURL(), Config.groupID, Config.artifactID, Config.updateFileClassifier); Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } }; updateThread.setName("manualUpdateThread"); updateThread.start(); } @FXML /** * Handler for Hyperlink[fx:id="newGameButton"] onAction * */ void newGameButtonOnAction(ActionEvent event) { startNewGame(); } public void startNewGame() { startNewGame(true); } public void startNewGame(boolean submitWord) { if (submitWord) { // Maybe Submit the word to the MongoDB database submitWordOnQuit(); } algorithm.HangmanSolver.proposedSolutions.clear(); applyButton.setDisable(true); languageSelector.setDisable(false); currentSequence.setText(""); proposedSolutions.setText(""); setThought(""); result.setText(""); currentSequence.setDisable(false); currentSequence.requestFocus(); } /** * Handler for Button[fx:id="applyButton"] onAction<br> * <br> * Applies the current guess to the letter sequence using the bestWord. * * @param event * The event object (automatically injected) */ @FXML void applyResult(ActionEvent event) { String newSequence = ""; // Split the pattern up in words ArrayList<String> words = new ArrayList<String>(Arrays.asList(currentSequence.getText().split(" "))); boolean wordReplaced = false; if (currentSolution.result.length() > 1) { // The next guess is a word. for (int i = 0; i < words.size(); i++) { if (!words.get(i).contains("_") || wordReplaced) { // Word is already solved or the solution was already // applied if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + words.get(i); } else { // Replace word String newWord = ""; String oldWord = words.get(i); for (int t = 0; t < oldWord.length(); t++) { if (oldWord.charAt(t) == '_') { // replace it newWord = newWord + currentSolution.result.charAt(t); } else { // Don't replace it as there is no _ newWord = newWord + oldWord.charAt(t); } } if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + newWord; } } } else { // The next guess is a letter for (int i = 0; i < words.size(); i++) { if (!words.get(i).contains("_") || wordReplaced) { // Word is already solved or the solution was already // applied if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + words.get(i); } else { // add letters String newWord = ""; String oldWord = words.get(i); for (int t = 0; t < oldWord.length(); t++) { if (oldWord.charAt(t) == '_' && Character.toUpperCase(currentSolution.bestWord.charAt(t)) == Character .toUpperCase(currentSolution.result.charAt(0))) { // replace it newWord = newWord + currentSolution.bestWord.charAt(t); } else { // Don't replace it as there is no _ newWord = newWord + oldWord.charAt(t); } } if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + newWord; } } } // Set the new sequence in the gui currentSequence.setText(newSequence); /* * // submit solved words for (int i = 0; i < words.size(); i++) { if * (!words.get(i).contains("_")) { if (!words.get(i).equals("")) { // * Submit the word to the internet db. // Although this method is called * quite often, it keeps // track of // the submissions to avoid * duplicates. HangmanStats.addWordToDatabase(words.get(i), * currentSolution.lang); } } } */ // get the next guess launchAlgorithm(); } @FXML void currentAppVersionTextLabelOnMouseClicked(MouseEvent event) { if (event.getButton().equals(MouseButton.PRIMARY)) { // Do the easter egg when clicking with the left mouse button clickCounter++; if (clickCounter >= 3) { // rotate double angle = (Math.random() - 0.5) * 1440; curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees + angle; RotateTransition rt = new RotateTransition(Duration.millis(500), currentAppVersionTextLabel); rt.setByAngle(angle); rt.setAutoReverse(true); rt.play(); clickCounter = 0; currentAppVersionTextLabel.setTooltip(new Tooltip(bundle.getString("resetEasterEgg"))); // remove whole turns while (curVersionEasterEggTurnDegrees > 360.0) { curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees - 360.0; } while (curVersionEasterEggTurnDegrees < -360.0) { curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees + 360.0; } } } else { // Reset the easter egg if (Math.abs(360.0 - curVersionEasterEggTurnDegrees) < Math.abs(curVersionEasterEggTurnDegrees)) { curVersionEasterEggTurnDegrees = -(360.0 - curVersionEasterEggTurnDegrees); } double angle = -curVersionEasterEggTurnDegrees; curVersionEasterEggTurnDegrees = 0; RotateTransition rt = new RotateTransition(Duration.millis(500), currentAppVersionTextLabel); rt.setByAngle(angle); rt.setAutoReverse(true); rt.play(); currentAppVersionTextLabel.setTooltip(null); } } /** * Handler for Button[fx:id="getNextLetter"] onAction<br> * Fires when the user is in the text field and hits the enter key or clicks * the 'get result button' * * @param event * The event object (automatically injected) */ @FXML void getNextLetterAction(ActionEvent event) { launchAlgorithm(); } @FXML /** * Handler for Hyperlink[fx:id="creditsButton"] onAction * * @param event * The event object that contains information about the event. */ void creditsButtonOnAction(ActionEvent event) { LicenseWindow.show(bundle.getString("licenseWindowTitle")); } @FXML /** * Handler for Hyperlink[fx:id="shareThoughtsCheckbox"] onAction * * @param event * The event object that contains information about the event. */ void shareThoughtsCheckboxOnAction(ActionEvent event) { shareThoughtsBool = shareThoughtsCheckbox.isSelected(); if (shareThoughtsBool) { setThought(); } else { // Clear the thoughts label thoughts.setText(""); } } @Override /** * Method is invoked by JavaFX after the application launch */ public void start(Stage primaryStage) { try { if (HangmanStats.uploadThread.isAlive() == false) { HangmanStats.uploadThread.start(); } // Dont check for updates if launched from launcher if (!disableUpdateChecks) { Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailable(Config.getUpdateRepoBaseURL(), Config.groupID, Config.artifactID, Config.updateFileClassifier); if (update.showAlert) { Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } } }; updateThread.setName("updateThread"); updateThread.start(); } Parent root = FXMLLoader.load(getClass().getResource("MainWindow.fxml"), bundle); scene = new Scene(root); scene.getStylesheets().add(getClass().getResource("MainWindow.css").toExternalForm()); primaryStage.setTitle(bundle.getString("windowTitle")); primaryStage.setMinWidth(scene.getRoot().minWidth(0) + 70); primaryStage.setMinHeight(scene.getRoot().minHeight(0) + 70); primaryStage.setScene(scene); // Set Icon primaryStage.getIcons().add(new Image(MainWindow.class.getResourceAsStream("icon.png"))); primaryStage.show(); } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void stop() { shutDown(); } /** * Method is invoked by the FXML Loader after all variables have been * injected. */ public void initialize(URL arg0, ResourceBundle arg1) { assert actionLabel != null : "fx:id=\"actionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert applyButton != null : "fx:id=\"applyButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert creditsButton != null : "fx:id=\"creditsButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert currentAppVersionTextLabel != null : "fx:id=\"currentAppVersionTextLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert currentSequence != null : "fx:id=\"currentSequence\" was not injected: check your FXML file 'MainWindow.fxml'."; assert getNextLetter != null : "fx:id=\"getNextLetter\" was not injected: check your FXML file 'MainWindow.fxml'."; assert languageSelector != null : "fx:id=\"languageSelector\" was not injected: check your FXML file 'MainWindow.fxml'."; assert loadLanguagesProgressBar != null : "fx:id=\"loadLanguagesProgressBar\" was not injected: check your FXML file 'MainWindow.fxml'."; assert newGameButton != null : "fx:id=\"newGameButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert proposedSolutions != null : "fx:id=\"proposedSolutions\" was not injected: check your FXML file 'MainWindow.fxml'."; assert result != null : "fx:id=\"result\" was not injected: check your FXML file 'MainWindow.fxml'."; assert shareThoughtsCheckbox != null : "fx:id=\"shareThoughtsCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert thoughts != null : "fx:id=\"thoughts\" was not injected: check your FXML file 'MainWindow.fxml'."; assert updateLink != null : "fx:id=\"updateLink\" was not injected: check your FXML file 'MainWindow.fxml'."; assert versionLabel != null : "fx:id=\"versionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; // Initialize your logic here: all @FXML variables will have been // injected // Set the height of the apply button to the height of the result text field applyButton.setPrefHeight(result.getPrefHeight()+4); loadLanguageList(); shareThoughtsCheckbox.setSelected(true); shareThoughtsBool = true; try { versionLabel.setText(new Version(Common.getAppVersion(), Common.getBuildNumber()).toString(false)); } catch (IllegalArgumentException e) { versionLabel.setText(Common.UNKNOWN_APP_VERSION); } // Make update link unvisible if launched from launcher if (disableUpdateChecks) { updateLink.setDisable(true); updateLink.setVisible(false); } // Listen for TextField text changes currentSequence.textProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) { currentSequenceStr = currentSequence.getText(); getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); } }); } /** * This method launches the algorithm and writes its results into the gui. */ void launchAlgorithm() { MainWindow window = this; Thread algorithmThread = new Thread() { @Override public void run() { try { Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(true); getNextLetter.setDisable(true); applyButton.setDisable(true); newGameButton.setDisable(true); currentSequence.setDisable(true); getNextLetter.setText(bundle.getString("computeNextLetterButton.waitForAlgorithmText")); } }); currentSolution = HangmanSolver.solve(currentSequence.getText(), Language.getSupportedLanguages() .get(languageSelector.getSelectionModel().getSelectedIndex())); /* * Platform.runLater(new Runnable() { * * @Override public void run() { * System.out.println("Setting resultText..."); * result.setText(currentSolution.result); } }); */ String proposedSolutionsString = ""; for (String solution : HangmanSolver.proposedSolutions) { proposedSolutionsString = proposedSolutionsString + solution + ", "; } // remove last , proposedSolutionsString = proposedSolutionsString.substring(0, proposedSolutionsString.length() - 2); final String proposedSolutionsStringCopy = proposedSolutionsString; /* * Platform.runLater(new Runnable() { * * @Override public void run() { * proposedSolutions.setText(proposedSolutionsStringCopy); } * }); */ if (currentSolution.gameState == GameState.GAME_LOST || currentSolution.gameState == GameState.GAME_WON) { Platform.runLater(new Runnable() { @Override public void run() { GameEndDialog.show(bundle.getString("GameEndDialog.windowTitle"), currentSolution.gameState, window); } }); } else { Platform.runLater(new Runnable() { @Override public void run() { // Update gui // next guess result.setText(currentSolution.result); // already proposed solutions proposedSolutions.setText(proposedSolutionsStringCopy); // thought String thoughtText = ""; if (currentSolution.bestWordScore >= Config.thresholdToShowWord) { applyButton.setDisable(false); thoughtText = bundle.getString("thinkOfAWord") .replace("<percent>", Double.toString(Math.round(currentSolution.bestWordScore * 100))) .replace("<word>", currentSolution.bestWord); } else { applyButton.setDisable(true); thoughtText = bundle.getString("dontThinkAWord"); } // Add the remeaning wrong guesses thoughtText = thoughtText + " " + bundle.getString("remeaningWrongGuesses") .replace("<number>", Integer.toString( Config.maxTurnCountToLoose - HangmanSolver.getWrongGuessCount())); setThought(thoughtText); // Update buttons etc only if everything else // succeeded getNextLetter.setText(bundle.getString("computeNextLetterButton.letterWrongText")); currentSequence.setDisable(false); // If the apply button is enabled, give it the // focus, else focus the current sequence if (!applyButton.isDisable()) { applyButton.requestFocus(); } else { currentSequence.requestFocus(); } } }); } } catch (ArrayIndexOutOfBoundsException e) { // No language selected Platform.runLater(new Runnable() { @Override public void run() { // NoLanguageSelected.show(); Alert alert = new Alert(Alert.AlertType.ERROR, errorMessageBundle.getString("selectLanguage")); alert.show(); // Replace button text with original string getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); languageSelector.setDisable(false); currentSequence.setDisable(false); languageSelector.requestFocus(); } }); } catch (StringIndexOutOfBoundsException e2) { // No sequence entered Platform.runLater(new Runnable() { @Override public void run() { // NoSequenceEntered.show(); Alert alert = new Alert(Alert.AlertType.ERROR, errorMessageBundle.getString("enterWordSequence")); alert.show(); // Replace button text with original string getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); currentSequence.setDisable(false); currentSequence.requestFocus(); } }); } finally { Platform.runLater(new Runnable() { @Override public void run() { getNextLetter.setDisable(false); newGameButton.setDisable(false); } }); } } }; algorithmThread.start(); } /** * Writes the last thought into the thoughts-label. */ public void setThought() { setThought(lastThought); } /** * Writes the given thought into the thoughts-label. The last thought is * remembered and can be recalled with {@code setThought()}. * * @param thought * The thought to be written to the gui. */ public void setThought(String thought) { lastThought = thought; if (shareThoughtsBool) { thoughts.setText(thought); } } /** * Loads the available languages into the gui dropdown. */ private void loadLanguageList() { Thread loadLangThread = new Thread() { @Override public void run() { log.getLogger().info("Loading language list..."); Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(true); languageSelector.setPromptText(bundle.getString("languageSelector.waitText")); currentSequence.setDisable(true); getNextLetter.setDisable(true); result.setDisable(true); newGameButton.setDisable(true); } }); ObservableList<String> items = FXCollections.observableArrayList(); Platform.runLater(new Runnable(){ @Override public void run(){ loadLanguagesProgressBar.setPrefHeight(languageSelector.getHeight());; loadLanguagesProgressBar.setVisible(true); } }); // Load the languages List<Language> langList = Language.getSupportedLanguages(); for (int i=0; i<langList.size(); i++) { items.add(langList.get(i).getHumanReadableName()); loadedLanguagesCount = i; Platform.runLater(new Runnable(){ @Override public void run(){ loadLanguagesProgressBar.setProgress((double)loadedLanguagesCount/(double)langList.size()); } }); } languageSelector.setItems(items); log.getLogger().info("Languages loaded"); Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(false); languageSelector.setPromptText(bundle.getString("languageSelector.PromptText")); currentSequence.setDisable(false); getNextLetter.setDisable(false); result.setDisable(false); newGameButton.setDisable(false); loadLanguagesProgressBar.setVisible(false); // Initialize the language search field. new AutoCompleteComboBoxListener<String>(languageSelector); languageSelector.requestFocus(); } }); } }; loadLangThread.start(); } /** * This method is executed before the app exits and executes several * shutdown commands.<br> * <b>IMPORTANT: This method does not quit the app, it just prepares the app * for shutdown!</b> */ public static void shutDown() { try { log.getLogger().info("Shutting down...."); // Maybe submit the current word submitWordOnQuit(); HangmanStats.uploadThread.interrupt(); HangmanStats.uploadThread.join(); MongoSetup.close(); log.getLogger().info("Good bye"); } catch (InterruptedException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } /** * Submits the current words when the user closes the app and the current * sequence and the bestWord have a correlation bigger or equal than * {@code Config.thresholdToSelectWord}. */ private static void submitWordOnQuit() { try { String[] words = currentSequenceStr.split(" "); for (String word : words) { if (word.length() == currentSolution.bestWord.length()) if (TabFile.stringCorrelation(word, currentSolution.bestWord) >= Config .thresholdToSelectWord(word.length())) { HangmanStats.addWordToDatabase(currentSolution.bestWord, currentSolution.lang); } } } catch (NullPointerException e) { // Do nothing, no word entered } } }
src/main/java/view/MainWindow.java
package view; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ResourceBundle; import java.util.logging.Level; import algorithm.*; import common.Common; import common.Config; import common.UpdateChecker; import common.UpdateInfo; import common.Version; import javafx.animation.RotateTransition; import javafx.application.Application; import javafx.application.Platform; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Button; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.Hyperlink; import javafx.scene.control.Label; import javafx.scene.control.ProgressBar; import javafx.scene.control.TextArea; import javafx.scene.control.TextField; import javafx.scene.control.Tooltip; import javafx.scene.image.Image; import javafx.scene.input.MouseButton; import javafx.scene.input.MouseEvent; import javafx.stage.Stage; import javafx.util.Duration; import languages.Language; import languages.TabFile; import logging.FOKLogger; import stats.HangmanStats; import stats.MongoSetup; import view.updateAvailableDialog.UpdateAvailableDialog; /** * The MainWindow controller class. **/ public class MainWindow extends Application implements Initializable { private static double curVersionEasterEggTurnDegrees = 0; private static boolean disableUpdateChecks = false; private static FOKLogger log; public static void main(String[] args) { common.Common.setAppName("hangmanSolver"); log = new FOKLogger(MainWindow.class.getName()); for (String arg : args) { if (arg.toLowerCase().matches("mockappversion=.*")) { // Set the mock version String version = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockAppVersion(version); } else if (arg.toLowerCase().matches("mockbuildnumber=.*")) { // Set the mock version String buildnumber = arg.substring(arg.toLowerCase().indexOf('=') + 1); Common.setMockBuildNumber(buildnumber); ; } else if (arg.toLowerCase().matches("disableupdatechecks")) { log.getLogger().info("Update checks are disabled as app was launched from launcher."); disableUpdateChecks = true; } } launch(args); } private ResourceBundle bundle = ResourceBundle.getBundle("view.strings.messages"); private ResourceBundle errorMessageBundle = ResourceBundle.getBundle("view.strings.errormessages"); private static String currentSequenceStr; public static Result currentSolution; private boolean shareThoughtsBool; private String lastThought; private static Scene scene; private static int clickCounter = 0; private static int loadedLanguagesCount = 0; public Scene getScene() { return scene; } @FXML // fx:id="loadLanguagesProgressBar" private ProgressBar loadLanguagesProgressBar; // Value injected by FXMLLoader @FXML /** * ResourceBundle that was given to the FXMLLoader */ private ResourceBundle resources; @FXML /** * URL location of the FXML file that was given to the FXMLLoader */ private URL location; @FXML /** * fx:id="actionLabel" */ private Label actionLabel; // Value injected by FXMLLoader @FXML /** * fx:id="applyButton" */ private Button applyButton; // Value injected by FXMLLoader @FXML /** * fx:id="creditsButton" */ private Button creditsButton; @FXML // fx:id="currentAppVersionTextLabel" private Label currentAppVersionTextLabel; // Value injected by FXMLLoader @FXML /** * fx:id="currentSequence" */ public TextField currentSequence; // Value injected by FXMLLoader @FXML /** * fx:id="getNextLetter" */ private Button getNextLetter; // Value injected by FXMLLoader @FXML /** * fx:id="languageSelector" * */ private ComboBox<String> languageSelector; // Value injected by FXMLLoader @FXML /** * fx:id="result" */ private TextField result; // Value injected by FXMLLoader @FXML /** * fx:id="shareThoughtsCheckbox" */ private CheckBox shareThoughtsCheckbox; @FXML /** * fx:id="thoughts" */ private Label thoughts; @FXML /** * fx:id="newGameButton" */ private Button newGameButton; @FXML /** * fx:id="proposedSolutions" */ private TextArea proposedSolutions; @FXML /** * fx:id="updateLink" */ private Hyperlink updateLink; // Value injected by FXMLLoader @FXML /** * fx:id="versionLabel" */ private Label versionLabel; // Value injected by FXMLLoader @FXML /** * Handler for Hyperlink[fx:id="updateLink"] onAction * * @param event * The event object that contains information about the event. */ void updateLinkOnAction(ActionEvent event) { // Check for new version ignoring ignored updates Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailableCompareAppVersion(Config.getUpdateRepoBaseURL(), Config.groupID, Config.artifactID, Config.updateFileClassifier); Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } }; updateThread.setName("manualUpdateThread"); updateThread.start(); } @FXML /** * Handler for Hyperlink[fx:id="newGameButton"] onAction * */ void newGameButtonOnAction(ActionEvent event) { startNewGame(); } public void startNewGame() { startNewGame(true); } public void startNewGame(boolean submitWord) { if (submitWord) { // Maybe Submit the word to the MongoDB database submitWordOnQuit(); } algorithm.HangmanSolver.proposedSolutions.clear(); applyButton.setDisable(true); languageSelector.setDisable(false); currentSequence.setText(""); proposedSolutions.setText(""); setThought(""); result.setText(""); currentSequence.setDisable(false); currentSequence.requestFocus(); } /** * Handler for Button[fx:id="applyButton"] onAction<br> * <br> * Applies the current guess to the letter sequence using the bestWord. * * @param event * The event object (automatically injected) */ @FXML void applyResult(ActionEvent event) { String newSequence = ""; // Split the pattern up in words ArrayList<String> words = new ArrayList<String>(Arrays.asList(currentSequence.getText().split(" "))); boolean wordReplaced = false; if (currentSolution.result.length() > 1) { // The next guess is a word. for (int i = 0; i < words.size(); i++) { if (!words.get(i).contains("_") || wordReplaced) { // Word is already solved or the solution was already // applied if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + words.get(i); } else { // Replace word String newWord = ""; String oldWord = words.get(i); for (int t = 0; t < oldWord.length(); t++) { if (oldWord.charAt(t) == '_') { // replace it newWord = newWord + currentSolution.result.charAt(t); } else { // Don't replace it as there is no _ newWord = newWord + oldWord.charAt(t); } } if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + newWord; } } } else { // The next guess is a letter for (int i = 0; i < words.size(); i++) { if (!words.get(i).contains("_") || wordReplaced) { // Word is already solved or the solution was already // applied if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + words.get(i); } else { // add letters String newWord = ""; String oldWord = words.get(i); for (int t = 0; t < oldWord.length(); t++) { if (oldWord.charAt(t) == '_' && Character.toUpperCase(currentSolution.bestWord.charAt(t)) == Character .toUpperCase(currentSolution.result.charAt(0))) { // replace it newWord = newWord + currentSolution.bestWord.charAt(t); } else { // Don't replace it as there is no _ newWord = newWord + oldWord.charAt(t); } } if (newSequence.length() != 0) { newSequence = newSequence + " "; } newSequence = newSequence + newWord; } } } // Set the new sequence in the gui currentSequence.setText(newSequence); /* * // submit solved words for (int i = 0; i < words.size(); i++) { if * (!words.get(i).contains("_")) { if (!words.get(i).equals("")) { // * Submit the word to the internet db. // Although this method is called * quite often, it keeps // track of // the submissions to avoid * duplicates. HangmanStats.addWordToDatabase(words.get(i), * currentSolution.lang); } } } */ // get the next guess launchAlgorithm(); } @FXML void currentAppVersionTextLabelOnMouseClicked(MouseEvent event) { if (event.getButton().equals(MouseButton.PRIMARY)) { // Do the easter egg when clicking with the left mouse button clickCounter++; if (clickCounter >= 3) { // rotate double angle = (Math.random() - 0.5) * 1440; curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees + angle; RotateTransition rt = new RotateTransition(Duration.millis(500), currentAppVersionTextLabel); rt.setByAngle(angle); rt.setAutoReverse(true); rt.play(); clickCounter = 0; currentAppVersionTextLabel.setTooltip(new Tooltip(bundle.getString("resetEasterEgg"))); // remove whole turns while (curVersionEasterEggTurnDegrees > 360.0) { curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees - 360.0; } while (curVersionEasterEggTurnDegrees < -360.0) { curVersionEasterEggTurnDegrees = curVersionEasterEggTurnDegrees + 360.0; } } } else { // Reset the easter egg if (Math.abs(360.0 - curVersionEasterEggTurnDegrees) < Math.abs(curVersionEasterEggTurnDegrees)) { curVersionEasterEggTurnDegrees = -(360.0 - curVersionEasterEggTurnDegrees); } double angle = -curVersionEasterEggTurnDegrees; curVersionEasterEggTurnDegrees = 0; RotateTransition rt = new RotateTransition(Duration.millis(500), currentAppVersionTextLabel); rt.setByAngle(angle); rt.setAutoReverse(true); rt.play(); currentAppVersionTextLabel.setTooltip(null); } } /** * Handler for Button[fx:id="getNextLetter"] onAction<br> * Fires when the user is in the text field and hits the enter key or clicks * the 'get result button' * * @param event * The event object (automatically injected) */ @FXML void getNextLetterAction(ActionEvent event) { launchAlgorithm(); } @FXML /** * Handler for Hyperlink[fx:id="creditsButton"] onAction * * @param event * The event object that contains information about the event. */ void creditsButtonOnAction(ActionEvent event) { LicenseWindow.show(bundle.getString("licenseWindowTitle")); } @FXML /** * Handler for Hyperlink[fx:id="shareThoughtsCheckbox"] onAction * * @param event * The event object that contains information about the event. */ void shareThoughtsCheckboxOnAction(ActionEvent event) { shareThoughtsBool = shareThoughtsCheckbox.isSelected(); if (shareThoughtsBool) { setThought(); } else { // Clear the thoughts label thoughts.setText(""); } } @Override /** * Method is invoked by JavaFX after the application launch */ public void start(Stage primaryStage) { try { if (HangmanStats.uploadThread.isAlive() == false) { HangmanStats.uploadThread.start(); } // Dont check for updates if launched from launcher if (!disableUpdateChecks) { Thread updateThread = new Thread() { @Override public void run() { UpdateInfo update = UpdateChecker.isUpdateAvailable(Config.getUpdateRepoBaseURL(), Config.groupID, Config.artifactID, Config.updateFileClassifier); if (update.showAlert) { Platform.runLater(new Runnable() { @Override public void run() { new UpdateAvailableDialog(update); } }); } } }; updateThread.setName("updateThread"); updateThread.start(); } Parent root = FXMLLoader.load(getClass().getResource("MainWindow.fxml"), bundle); scene = new Scene(root); scene.getStylesheets().add(getClass().getResource("MainWindow.css").toExternalForm()); primaryStage.setTitle(bundle.getString("windowTitle")); primaryStage.setMinWidth(scene.getRoot().minWidth(0) + 70); primaryStage.setMinHeight(scene.getRoot().minHeight(0) + 70); primaryStage.setScene(scene); // Set Icon primaryStage.getIcons().add(new Image(MainWindow.class.getResourceAsStream("icon.png"))); primaryStage.show(); } catch (Exception e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } @Override public void stop() { shutDown(); } /** * Method is invoked by the FXML Loader after all variables have been * injected. */ public void initialize(URL arg0, ResourceBundle arg1) { assert actionLabel != null : "fx:id=\"actionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert applyButton != null : "fx:id=\"applyButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert creditsButton != null : "fx:id=\"creditsButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert currentAppVersionTextLabel != null : "fx:id=\"currentAppVersionTextLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; assert currentSequence != null : "fx:id=\"currentSequence\" was not injected: check your FXML file 'MainWindow.fxml'."; assert getNextLetter != null : "fx:id=\"getNextLetter\" was not injected: check your FXML file 'MainWindow.fxml'."; assert languageSelector != null : "fx:id=\"languageSelector\" was not injected: check your FXML file 'MainWindow.fxml'."; assert loadLanguagesProgressBar != null : "fx:id=\"loadLanguagesProgressBar\" was not injected: check your FXML file 'MainWindow.fxml'."; assert newGameButton != null : "fx:id=\"newGameButton\" was not injected: check your FXML file 'MainWindow.fxml'."; assert proposedSolutions != null : "fx:id=\"proposedSolutions\" was not injected: check your FXML file 'MainWindow.fxml'."; assert result != null : "fx:id=\"result\" was not injected: check your FXML file 'MainWindow.fxml'."; assert shareThoughtsCheckbox != null : "fx:id=\"shareThoughtsCheckbox\" was not injected: check your FXML file 'MainWindow.fxml'."; assert thoughts != null : "fx:id=\"thoughts\" was not injected: check your FXML file 'MainWindow.fxml'."; assert updateLink != null : "fx:id=\"updateLink\" was not injected: check your FXML file 'MainWindow.fxml'."; assert versionLabel != null : "fx:id=\"versionLabel\" was not injected: check your FXML file 'MainWindow.fxml'."; // Initialize your logic here: all @FXML variables will have been // injected // Set the height of the apply button to the height of the result text field applyButton.setPrefHeight(result.getPrefHeight()+4); loadLanguageList(); shareThoughtsCheckbox.setSelected(true); shareThoughtsBool = true; try { versionLabel.setText(new Version(Common.getAppVersion(), Common.getBuildNumber()).toString(false)); } catch (IllegalArgumentException e) { versionLabel.setText(Common.UNKNOWN_APP_VERSION); } // Make update link unvisible if launched from launcher if (disableUpdateChecks) { updateLink.setDisable(true); updateLink.setVisible(false); } // Listen for TextField text changes currentSequence.textProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) { currentSequenceStr = currentSequence.getText(); getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); } }); } /** * This method launches the algorithm and writes its results into the gui. */ void launchAlgorithm() { MainWindow window = this; Thread algorithmThread = new Thread() { @Override public void run() { try { Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(true); getNextLetter.setDisable(true); applyButton.setDisable(true); newGameButton.setDisable(true); currentSequence.setDisable(true); getNextLetter.setText(bundle.getString("computeNextLetterButton.waitForAlgorithmText")); } }); currentSolution = HangmanSolver.solve(currentSequence.getText(), Language.getSupportedLanguages() .get(languageSelector.getSelectionModel().getSelectedIndex())); /* * Platform.runLater(new Runnable() { * * @Override public void run() { * System.out.println("Setting resultText..."); * result.setText(currentSolution.result); } }); */ String proposedSolutionsString = ""; for (String solution : HangmanSolver.proposedSolutions) { proposedSolutionsString = proposedSolutionsString + solution + ", "; } // remove last , proposedSolutionsString = proposedSolutionsString.substring(0, proposedSolutionsString.length() - 2); final String proposedSolutionsStringCopy = proposedSolutionsString; /* * Platform.runLater(new Runnable() { * * @Override public void run() { * proposedSolutions.setText(proposedSolutionsStringCopy); } * }); */ if (currentSolution.gameState == GameState.GAME_LOST || currentSolution.gameState == GameState.GAME_WON) { Platform.runLater(new Runnable() { @Override public void run() { GameEndDialog.show(bundle.getString("GameEndDialog.windowTitle"), currentSolution.gameState, window); } }); } else { Platform.runLater(new Runnable() { @Override public void run() { // Update gui // next guess result.setText(currentSolution.result); // already proposed solutions proposedSolutions.setText(proposedSolutionsStringCopy); // thought String thoughtText = ""; if (currentSolution.bestWordScore >= Config.thresholdToShowWord) { applyButton.setDisable(false); thoughtText = bundle.getString("thinkOfAWord") .replace("<percent>", Double.toString(Math.round(currentSolution.bestWordScore * 100))) .replace("<word>", currentSolution.bestWord); } else { applyButton.setDisable(true); thoughtText = bundle.getString("dontThinkAWord"); } // Add the remeaning wrong guesses thoughtText = thoughtText + " " + bundle.getString("remeaningWrongGuesses") .replace("<number>", Integer.toString( Config.maxTurnCountToLoose - HangmanSolver.getWrongGuessCount())); setThought(thoughtText); // Update buttons etc only if everything else // succeeded getNextLetter.setText(bundle.getString("computeNextLetterButton.letterWrongText")); currentSequence.setDisable(false); // If the apply button is enabled, give it the // focus, else focus the current sequence if (!applyButton.isDisable()) { applyButton.requestFocus(); } else { currentSequence.requestFocus(); } } }); } } catch (ArrayIndexOutOfBoundsException e) { // No language selected Platform.runLater(new Runnable() { @Override public void run() { // NoLanguageSelected.show(); Alert alert = new Alert(Alert.AlertType.ERROR, errorMessageBundle.getString("selectLanguage")); alert.show(); // Replace button text with original string getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); languageSelector.setDisable(false); currentSequence.setDisable(false); languageSelector.requestFocus(); } }); } catch (StringIndexOutOfBoundsException e2) { // No sequence entered Platform.runLater(new Runnable() { @Override public void run() { // NoSequenceEntered.show(); Alert alert = new Alert(Alert.AlertType.ERROR, errorMessageBundle.getString("enterWordSequence")); alert.show(); // Replace button text with original string getNextLetter.setText(bundle.getString("computeNextLetterButtonLabel")); currentSequence.setDisable(false); currentSequence.requestFocus(); } }); } finally { Platform.runLater(new Runnable() { @Override public void run() { getNextLetter.setDisable(false); newGameButton.setDisable(false); } }); } } }; algorithmThread.start(); } /** * Writes the last thought into the thoughts-label. */ public void setThought() { setThought(lastThought); } /** * Writes the given thought into the thoughts-label. The last thought is * remembered and can be recalled with {@code setThought()}. * * @param thought * The thought to be written to the gui. */ public void setThought(String thought) { lastThought = thought; if (shareThoughtsBool) { thoughts.setText(thought); } } /** * Loads the available languages into the gui dropdown. */ private void loadLanguageList() { Thread loadLangThread = new Thread() { @Override public void run() { log.getLogger().info("Loading language list..."); Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(true); languageSelector.setPromptText(bundle.getString("languageSelector.waitText")); currentSequence.setDisable(true); getNextLetter.setDisable(true); result.setDisable(true); newGameButton.setDisable(true); } }); ObservableList<String> items = FXCollections.observableArrayList(); Platform.runLater(new Runnable(){ @Override public void run(){ loadLanguagesProgressBar.setPrefHeight(languageSelector.getHeight());; loadLanguagesProgressBar.setVisible(true); } }); // Load the languages List<Language> langList = Language.getSupportedLanguages(); for (int i=0; i<langList.size(); i++) { items.add(langList.get(i).getHumanReadableName()); loadedLanguagesCount = i; Platform.runLater(new Runnable(){ @Override public void run(){ loadLanguagesProgressBar.setProgress((double)loadedLanguagesCount/(double)langList.size()); } }); } languageSelector.setItems(items); log.getLogger().info("Languages loaded"); Platform.runLater(new Runnable() { @Override public void run() { languageSelector.setDisable(false); languageSelector.setPromptText(bundle.getString("languageSelector.PromptText")); currentSequence.setDisable(false); getNextLetter.setDisable(false); result.setDisable(false); newGameButton.setDisable(false); loadLanguagesProgressBar.setVisible(false); // Initialize the language search field. new AutoCompleteComboBoxListener<String>(languageSelector); languageSelector.requestFocus(); } }); } }; loadLangThread.start(); } /** * This method is executed before the app exits and executes several * shutdown commands.<br> * <b>IMPORTANT: This method does not quit the app, it just prepares the app * for shutdown!</b> */ public static void shutDown() { try { log.getLogger().info("Shutting down...."); // Maybe submit the current word submitWordOnQuit(); HangmanStats.uploadThread.interrupt(); HangmanStats.uploadThread.join(); MongoSetup.close(); log.getLogger().info("Good bye"); } catch (InterruptedException e) { log.getLogger().log(Level.SEVERE, "An error occurred", e); } } /** * Submits the current words when the user closes the app and the current * sequence and the bestWord have a correlation bigger or equal than * {@code Config.thresholdToSelectWord}. */ private static void submitWordOnQuit() { try { String[] words = currentSequenceStr.split(" "); for (String word : words) { if (word.length() == currentSolution.bestWord.length()) if (TabFile.stringCorrelation(word, currentSolution.bestWord) >= Config .thresholdToSelectWord(word.length())) { HangmanStats.addWordToDatabase(currentSolution.bestWord, currentSolution.lang); } } } catch (NullPointerException e) { // Do nothing, no word entered } } }
Added debug output for deleting old app versions [#1]
src/main/java/view/MainWindow.java
Added debug output for deleting old app versions [#1]
<ide><path>rc/main/java/view/MainWindow.java <ide> package view; <ide> <add>import java.io.UnsupportedEncodingException; <ide> import java.net.URL; <add>import java.net.URLDecoder; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <ide> import java.util.List; <ide> private static FOKLogger log; <ide> <ide> public static void main(String[] args) { <add> String path = MainWindow.class.getProtectionDomain().getCodeSource().getLocation().getPath(); <add> String decodedPath; <add> try { <add> decodedPath = URLDecoder.decode(path, "UTF-8"); <add> System.out.println(decodedPath); <add> } catch (UnsupportedEncodingException e) { <add> // TODO Auto-generated catch block <add> e.printStackTrace(); <add> } <add> <ide> common.Common.setAppName("hangmanSolver"); <ide> log = new FOKLogger(MainWindow.class.getName()); <ide> for (String arg : args) {
Java
apache-2.0
6b35ddf687e538f54e67f10c092aa0111c849107
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui.popup.list; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.Shortcut; import com.intellij.openapi.actionSystem.ShortcutProvider; import com.intellij.openapi.actionSystem.ShortcutSet; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.ui.popup.ListItemDescriptorAdapter; import com.intellij.openapi.ui.popup.ListPopupStep; import com.intellij.openapi.ui.popup.ListPopupStepEx; import com.intellij.openapi.ui.popup.MnemonicNavigationFilter; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.Comparing; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.ArrayUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; public class PopupListElementRenderer<E> extends GroupedItemsListRenderer<E> { protected final ListPopupImpl myPopup; private JLabel myShortcutLabel; private @Nullable JLabel myValueLabel; public PopupListElementRenderer(final ListPopupImpl aPopup) { super(new ListItemDescriptorAdapter<E>() { @Override public String getTextFor(E value) { return aPopup.getListStep().getTextFor(value); } @Override public Icon getIconFor(E value) { return aPopup.getListStep().getIconFor(value); } @Override public Icon getSelectedIconFor(E value) { return aPopup.getListStep().getSelectedIconFor(value); } @Override public boolean hasSeparatorAboveOf(E value) { return aPopup.getListModel().isSeparatorAboveOf(value); } @Override public String getCaptionAboveOf(E value) { return aPopup.getListModel().getCaptionAboveOf(value); } @Nullable @Override public String getTooltipFor(E value) { ListPopupStep<Object> listStep = aPopup.getListStep(); if (!(listStep instanceof ListPopupStepEx)) return null; return ((ListPopupStepEx<E>)listStep).getTooltipTextFor(value); } }); myPopup = aPopup; } @Override protected JComponent createItemComponent() { JPanel panel = new JPanel(new BorderLayout()); createLabel(); panel.add(myTextLabel, BorderLayout.WEST); myValueLabel = new JLabel(); myValueLabel.setEnabled(false); myValueLabel.setBorder(JBUI.Borders.empty(0, JBUIScale.scale(8), 1, 0)); myValueLabel.setForeground(UIManager.getColor("MenuItem.acceleratorForeground")); panel.add(myValueLabel, BorderLayout.CENTER); myShortcutLabel = new JLabel(); myShortcutLabel.setBorder(JBUI.Borders.emptyRight(3)); myShortcutLabel.setForeground(UIManager.getColor("MenuItem.acceleratorForeground")); panel.add(myShortcutLabel, BorderLayout.EAST); return layoutComponent(panel); } @Override protected void customizeComponent(JList<? extends E> list, E value, boolean isSelected) { ListPopupStep<Object> step = myPopup.getListStep(); boolean isSelectable = step.isSelectable(value); myTextLabel.setEnabled(isSelectable); setSelected(myComponent, isSelected && isSelectable); setSelected(myTextLabel, isSelected && isSelectable); setSelected(myNextStepLabel, isSelected && isSelectable); if (step instanceof BaseListPopupStep) { Color bg = ((BaseListPopupStep<E>)step).getBackgroundFor(value); Color fg = ((BaseListPopupStep<E>)step).getForegroundFor(value); if (!isSelected && fg != null) myTextLabel.setForeground(fg); if (!isSelected && bg != null) UIUtil.setBackgroundRecursively(myComponent, bg); if (bg != null && mySeparatorComponent.isVisible() && myCurrentIndex > 0) { E prevValue = list.getModel().getElementAt(myCurrentIndex - 1); // separator between 2 colored items shall get color too if (Comparing.equal(bg, ((BaseListPopupStep<E>)step).getBackgroundFor(prevValue))) { myRendererComponent.setBackground(bg); } } } if (step.isMnemonicsNavigationEnabled()) { MnemonicNavigationFilter<Object> filter = step.getMnemonicNavigationFilter(); int pos = filter == null ? -1 : filter.getMnemonicPos(value); if (pos != -1) { String text = myTextLabel.getText(); text = text.substring(0, pos) + text.substring(pos + 1); myTextLabel.setText(text); myTextLabel.setDisplayedMnemonicIndex(pos); } } else { myTextLabel.setDisplayedMnemonicIndex(-1); } if (step.hasSubstep(value) && isSelectable) { myNextStepLabel.setVisible(true); myNextStepLabel.setIcon(isSelected ? AllIcons.Icons.Ide.NextStepInverted : AllIcons.Icons.Ide.NextStep); } else { myNextStepLabel.setVisible(false); } if (myShortcutLabel != null) { myShortcutLabel.setEnabled(isSelectable); myShortcutLabel.setText(""); if (value instanceof ShortcutProvider) { ShortcutSet set = ((ShortcutProvider)value).getShortcut(); if (set != null) { Shortcut shortcut = ArrayUtil.getFirstElement(set.getShortcuts()); if (shortcut != null) { myShortcutLabel.setText(" " + KeymapUtil.getShortcutText(shortcut)); } } } setSelected(myShortcutLabel, isSelected && isSelectable); myShortcutLabel.setForeground(isSelected && isSelectable ? UIManager.getColor("MenuItem.acceleratorSelectionForeground") : UIManager.getColor("MenuItem.acceleratorForeground")); } if (myValueLabel != null) { myValueLabel.setText(step instanceof ListPopupStepEx<?> ? ((ListPopupStepEx<E>)step).getValueFor(value) : null); setSelected(myValueLabel, isSelected && isSelectable); } } }
platform/platform-impl/src/com/intellij/ui/popup/list/PopupListElementRenderer.java
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui.popup.list; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.Shortcut; import com.intellij.openapi.actionSystem.ShortcutProvider; import com.intellij.openapi.actionSystem.ShortcutSet; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.ui.popup.ListItemDescriptorAdapter; import com.intellij.openapi.ui.popup.ListPopupStep; import com.intellij.openapi.ui.popup.ListPopupStepEx; import com.intellij.openapi.ui.popup.MnemonicNavigationFilter; import com.intellij.openapi.ui.popup.util.BaseListPopupStep; import com.intellij.openapi.util.Comparing; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.ArrayUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; public class PopupListElementRenderer<E> extends GroupedItemsListRenderer<E> { protected final ListPopupImpl myPopup; private JLabel myShortcutLabel; private JLabel myValueLabel; public PopupListElementRenderer(final ListPopupImpl aPopup) { super(new ListItemDescriptorAdapter<E>() { @Override public String getTextFor(E value) { return aPopup.getListStep().getTextFor(value); } @Override public Icon getIconFor(E value) { return aPopup.getListStep().getIconFor(value); } @Override public Icon getSelectedIconFor(E value) { return aPopup.getListStep().getSelectedIconFor(value); } @Override public boolean hasSeparatorAboveOf(E value) { return aPopup.getListModel().isSeparatorAboveOf(value); } @Override public String getCaptionAboveOf(E value) { return aPopup.getListModel().getCaptionAboveOf(value); } @Nullable @Override public String getTooltipFor(E value) { ListPopupStep<Object> listStep = aPopup.getListStep(); if (!(listStep instanceof ListPopupStepEx)) return null; return ((ListPopupStepEx<E>)listStep).getTooltipTextFor(value); } }); myPopup = aPopup; } @Override protected JComponent createItemComponent() { JPanel panel = new JPanel(new BorderLayout()); createLabel(); panel.add(myTextLabel, BorderLayout.WEST); myValueLabel = new JLabel(); myValueLabel.setEnabled(false); myValueLabel.setBorder(JBUI.Borders.empty(0, JBUIScale.scale(8), 1, 0)); myValueLabel.setForeground(UIManager.getColor("MenuItem.acceleratorForeground")); panel.add(myValueLabel, BorderLayout.CENTER); myShortcutLabel = new JLabel(); myShortcutLabel.setBorder(JBUI.Borders.emptyRight(3)); myShortcutLabel.setForeground(UIManager.getColor("MenuItem.acceleratorForeground")); panel.add(myShortcutLabel, BorderLayout.EAST); return layoutComponent(panel); } @Override protected void customizeComponent(JList<? extends E> list, E value, boolean isSelected) { ListPopupStep<Object> step = myPopup.getListStep(); boolean isSelectable = step.isSelectable(value); myTextLabel.setEnabled(isSelectable); setSelected(myComponent, isSelected && isSelectable); setSelected(myTextLabel, isSelected && isSelectable); setSelected(myValueLabel, isSelected && isSelectable); setSelected(myNextStepLabel, isSelected && isSelectable); if (step instanceof BaseListPopupStep) { Color bg = ((BaseListPopupStep<E>)step).getBackgroundFor(value); Color fg = ((BaseListPopupStep<E>)step).getForegroundFor(value); if (!isSelected && fg != null) myTextLabel.setForeground(fg); if (!isSelected && bg != null) UIUtil.setBackgroundRecursively(myComponent, bg); if (bg != null && mySeparatorComponent.isVisible() && myCurrentIndex > 0) { E prevValue = list.getModel().getElementAt(myCurrentIndex - 1); // separator between 2 colored items shall get color too if (Comparing.equal(bg, ((BaseListPopupStep<E>)step).getBackgroundFor(prevValue))) { myRendererComponent.setBackground(bg); } } } if (step.isMnemonicsNavigationEnabled()) { MnemonicNavigationFilter<Object> filter = step.getMnemonicNavigationFilter(); int pos = filter == null ? -1 : filter.getMnemonicPos(value); if (pos != -1) { String text = myTextLabel.getText(); text = text.substring(0, pos) + text.substring(pos + 1); myTextLabel.setText(text); myTextLabel.setDisplayedMnemonicIndex(pos); } } else { myTextLabel.setDisplayedMnemonicIndex(-1); } if (step.hasSubstep(value) && isSelectable) { myNextStepLabel.setVisible(true); myNextStepLabel.setIcon(isSelected ? AllIcons.Icons.Ide.NextStepInverted : AllIcons.Icons.Ide.NextStep); } else { myNextStepLabel.setVisible(false); } if (myShortcutLabel != null) { myShortcutLabel.setEnabled(isSelectable); myShortcutLabel.setText(""); if (value instanceof ShortcutProvider) { ShortcutSet set = ((ShortcutProvider)value).getShortcut(); if (set != null) { Shortcut shortcut = ArrayUtil.getFirstElement(set.getShortcuts()); if (shortcut != null) { myShortcutLabel.setText(" " + KeymapUtil.getShortcutText(shortcut)); } } } setSelected(myShortcutLabel, isSelected && isSelectable); myShortcutLabel.setForeground(isSelected && isSelectable ? UIManager.getColor("MenuItem.acceleratorSelectionForeground") : UIManager.getColor("MenuItem.acceleratorForeground")); } if (myValueLabel != null) { myValueLabel.setText(step instanceof ListPopupStepEx<?> ? ((ListPopupStepEx<E>)step).getValueFor(value) : null); setSelected(myValueLabel, isSelected && isSelectable); } } }
EA-237410 (plugin) - IAE: UIUtil.$$$reportNull$$$ GitOrigin-RevId: 3375404b6d76665f32e1192e4af0b9375262e035
platform/platform-impl/src/com/intellij/ui/popup/list/PopupListElementRenderer.java
EA-237410 (plugin) - IAE: UIUtil.$$$reportNull$$$
<ide><path>latform/platform-impl/src/com/intellij/ui/popup/list/PopupListElementRenderer.java <ide> public class PopupListElementRenderer<E> extends GroupedItemsListRenderer<E> { <ide> protected final ListPopupImpl myPopup; <ide> private JLabel myShortcutLabel; <del> private JLabel myValueLabel; <add> private @Nullable JLabel myValueLabel; <ide> <ide> public PopupListElementRenderer(final ListPopupImpl aPopup) { <ide> super(new ListItemDescriptorAdapter<E>() { <ide> <ide> setSelected(myComponent, isSelected && isSelectable); <ide> setSelected(myTextLabel, isSelected && isSelectable); <del> setSelected(myValueLabel, isSelected && isSelectable); <ide> setSelected(myNextStepLabel, isSelected && isSelectable); <ide> <ide> if (step instanceof BaseListPopupStep) {
JavaScript
mit
35b3372bad7fb3085c42ea311f72e18ec0c86714
0
Putaitu/hashbrown-cms,Putaitu/hashbrown-cms,Putaitu/hashbrown-cms
'use strict'; const Yaml = require('./lib/yamljs/Yaml'); class JekyllProcessor extends HashBrown.Models.Processor { // Getters static get name() { return 'Jekyll'; } static get alias() { return 'jekyll'; } /** * Compiles content for Jekyll * * @param {Content} content * @param {String} language * * @returns {Promise} Result */ process(content, language) { checkParam(content, 'content', HashBrown.Models.Content); checkParam(language, 'language', String); let properties = content.getLocalizedProperties(language); let meta = content.getMeta(); if(!properties) { return Promise.reject(new Error('No properties for content "' + content.id + '" with language "' + language + '"')); } debug.log('Processing "' + properties.title + '" for Jekyll...', this); let createdBy; let updatedBy; // Get created by user return HashBrown.Helpers.UserHelper.getUserById(meta.createdBy) .then((user) => { createdBy = user; return HashBrown.Helpers.UserHelper.getUserById(meta.updatedBy); }) // Get updated by user .then((user) => { updatedBy = user; // We'll have to a allow unknown authors, as they could disappear between backups if(!createdBy) { createdBy = { fullName: 'Unknown', username: 'unknown' }; } if(!updatedBy) { updatedBy = { fullName: 'Unknown', username: 'unknown' }; } // Format date string let dateString = ''; dateString += meta.createDate.getFullYear() + '-'; dateString += (meta.createDate.getMonth() + 1) + '-'; dateString += meta.createDate.getDate(); // Add meta data to the properties properties.meta = { id: meta.id, parentId: meta.parentId, language: language }; // Date and author go in as main properties in Jekyll, not as meta properties.date = dateString; properties.author = updatedBy.fullName || updatedBy.username || createdBy.fullName || createdBy.username; // Remap "url" to "permalink" if(properties.url) { properties.permalink = properties.url; delete properties.url; } // Remap "template" to "layout" if(properties.template) { properties.layout = properties.template; delete properties.template; } let frontMatter = ''; frontMatter += '---\n'; frontMatter += Yaml.stringify(properties, 50); frontMatter += '---'; return Promise.resolve(frontMatter); }); } } module.exports = JekyllProcessor;
plugins/jekyll/server/Processor.js
'use strict'; const Yaml = require('./lib/yamljs/Yaml'); class JekyllProcessor extends HashBrown.Models.Processor { // Getters static get name() { return 'Jekyll'; } static get alias() { return 'jekyll'; } /** * Compiles content for Jekyll * * @param {Content} content * @param {String} language * * @returns {Promise} Result */ process(content, language) { checkParam(content, 'content', HashBrown.Models.Content); checkParam(language, 'language', String); let properties = content.getLocalizedProperties(language); let meta = content.getMeta(); if(!properties) { return Promise.reject(new Error('No properties for content "' + content.id + '" with language "' + language + '"')); } debug.log('Processing "' + properties.title + '" for Jekyll...', this); let createdBy; let updatedBy; // Get created by user return HashBrown.Helpers.UserHelper.getUserById(meta.createdBy) .then((user) => { createdBy = user; return HashBrown.Helpers.UserHelper.getUserById(meta.updatedBy); }) // Get updated by user .then((user) => { updatedBy = user; // We'll have to a allow unknown authors, as they could disappear between backups if(!createdBy) { createdBy = { fullName: 'Unknown', username: 'unknown' }; } if(!updatedBy) { updatedBy = { fullName: 'Unknown', username: 'unknown' }; } // Format date string let dateString = ''; dateString += meta.createDate.getFullYear() + '-'; dateString += (meta.createDate.getMonth() + 1) + '-'; dateString += meta.createDate.getDate(); // Add meta data to the properties properties.meta = { id: meta.id, parentId: meta.parentId, language: language }; // Date and author go in as main properties in Jekyll, not as meta properties.date = dateString; properties.author = updatedBy.fullName || upadtedBy.username || createdBy.fullName || createdBy.username; // Remap "url" to "permalink" if(properties.url) { properties.permalink = properties.url; delete properties.url; } // Remap "template" to "layout" if(properties.template) { properties.layout = properties.template; delete properties.template; } let frontMatter = ''; frontMatter += '---\n'; frontMatter += Yaml.stringify(properties, 50); frontMatter += '---'; return Promise.resolve(frontMatter); }); } } module.exports = JekyllProcessor;
Fixed typo in Jekyll plugin
plugins/jekyll/server/Processor.js
Fixed typo in Jekyll plugin
<ide><path>lugins/jekyll/server/Processor.js <ide> <ide> // Date and author go in as main properties in Jekyll, not as meta <ide> properties.date = dateString; <del> properties.author = updatedBy.fullName || upadtedBy.username || createdBy.fullName || createdBy.username; <add> properties.author = updatedBy.fullName || updatedBy.username || createdBy.fullName || createdBy.username; <ide> <ide> // Remap "url" to "permalink" <ide> if(properties.url) {
JavaScript
agpl-3.0
9a8f5935b808191637fef3199728794f7d1d2d27
0
superdesk/Live-Blog,superdesk/Live-Blog,superdesk/Live-Blog,vladnicoara/SDLive-Blog,vladnicoara/SDLive-Blog,superdesk/Live-Blog,vladnicoara/SDLive-Blog
define([ 'providers', 'jquery', 'gizmo/superdesk', config.guiJs('livedesk', 'action'), 'jquery/tmpl', 'jqueryui/draggable', 'providers/comments/adaptor', config.guiJs('livedesk', 'providers-templates'), 'tmpl!livedesk>providers/comments', 'tmpl!livedesk>items/item', 'tmpl!livedesk>items/sources/comments', 'tmpl!livedesk>items/implementors/sources/base', 'tmpl!livedesk>items/implementors/sources/comments', 'tmpl!livedesk>providers/no-results', 'tmpl!livedesk>providers/generic-error', 'tmpl!livedesk>providers/load-more', 'tmpl!livedesk>providers/loading' ], function( providers, $, Gizmo, BlogAction) { $.extend(providers.comments, { blogId: 0, data: [], topIds: 0, minId: Infinity, interval: 20000, keyword: '', extraInfos: 0, total: 0, init: function(blogUrl){ var self = this; this.adaptor.init(); self.data.comments = []; $.ajax({ url: typeof blogUrl === 'string' ? blogUrl : blogUrl[0] }).done(function(data){ self.blogId = data.Id; self.render(); }); }, render: function(){ var self = this; self.el.tmpl('livedesk>providers/comments', {}, function(){ //handle keyword search self.el.on('keyup','.comments-search-query', function( e ){ var keycode = e.keyCode; var keyword = $('.comments-search-query').val(); if ( keycode == 13 ) { self.keyword = keyword; self.getComments({cId: -1, clearResults: true}); } }); //show hidden self.el.on('click','[data-type="hidden-toggle"]', function( e ){ if ( $(this).attr('data-active') == 'false' ) { $(this).attr('data-active', 'true'); $(this).css('background-color', '#DDDDDD'); //show hidden comments self.getComments({cId: -1, clearResults: true}); } else { $(this).attr('data-active', 'false'); $(this).css('background-color', '#f2f2f2'); //hide hidden comments self.getComments({cId: -1, clearResults: true}); } }); //temp remove the autoupdate var refInt = window.setInterval(function(){ self.refreshComments(); },self.interval); self.getComments({}); }); //dynamically get size of header and set top space for list var top_space = $('#comments .sms-header').outerHeight() + 20; $('.comments-results-holder').css({'top': top_space}); }, refreshComments: function() { var self = this; var cId = self.topIds; //skip autoupdate for hidden items if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') != 'true' ) { self.getComments({cId: cId, prepend: true}); } }, getComments: function(paramObject) { var self = this; var dsd = { offset: 0, limit: 5, cId: -1, query: '', forceAppend: false, prepend: false, pagination: false, keyword: '', clearResults: false } var sd = $.extend({}, dsd, paramObject); var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/Comment/'); var keywordSearch = ''; if ( self.keyword.length > 0 ) { keywordSearch = '&content.ilike=' + encodeURIComponent('%' + self.keyword + '%') } var cIdText = ''; var limitText = '&limit=' + sd.limit; if ( sd.cId != -1 ) { cIdText = '&cId.since=' + sd.cId; limitText = ''; } else { self.topIds = -1; self.maxId = 0; } if ( sd.pagination ) { cIdText = ''; } var deletedText = ''; if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') == 'true' ) { var deletedText = '&isDeleted=true'; } myUrl = url.get() + '?X-Filter=*&offset=' + sd.offset + limitText + cIdText + keywordSearch + deletedText + '&desc=id'; $.ajax({ url: myUrl, dataType: "json" }).done(function(xdata){ var data = xdata; if ( sd.cId == -1 ) { self.total = data.total; self.topIds = data.lastCId; } var comments = data.PostList; //clean the results if ( sd.clearResults) { self.data.comments = []; $('.comments-list').html(''); $('.comments-load-more-holder').css('display','none').html(''); } //prepare the data for dragging to timeline posts = []; for ( var i = 0; i < comments.length; i++ ) { var item = comments[i]; item['message'] = item.Content; posts.push({ Meta: item }); self.data.comments[item.Id] = item; //increase the 'cId' if necessary if ( parseInt(self.topIds) < parseInt(item.CId) ) { self.topIds = parseInt(item.CId); } if ( sd.pagination ) { if ( parseInt(self.minId) > parseInt(item.CId) ) { self.minId = parseInt(item.Id); } } } var newPosts = []; //go throught the comments and see if they are updates for what we already have for ( var i = 0; i < posts.length; i++ ) { var cmnt = posts[ i ]; var updated = false; var Id = cmnt.Meta.Id; var unhideTxt = _("Unhide"); var hideTxt = _("Hide"); $('.comments-list').find('li.commentpost').each(function(){ if ( Id == $(this).attr('data-id') ) { //we need to update the item if ( cmnt.Meta.IsPublished == "True" ) { //$( this ).attr('data-hidden', 'true').css('display', 'none'); $( this ).remove(); self.total -- ; self.extraItems -- ; } else { if ( cmnt.Meta.DeletedOn ) { //got deleted $( this ).attr('data-hidden', 'true').css('display', 'none'); $( this ).find('a[href="#toggle-post"]').attr('data-action', 'unhide').text(unhideTxt); self.total -- ; self.extraItems -- ; } else { $( this ).attr('data-hidden', 'false').css('display', 'block'); $( this ).find('a[href="#toggle-post"]').attr('data-action', 'hide').text(hideTxt); self.total ++ ; self.extraItems ++ ; } } updated = true; } }); if ( ( ! updated && ! cmnt.Meta.PublishedOn && ! cmnt.Meta.DeletedOn && self.minId > cmnt.Meta.Id ) || sd.cId == -1 ) { newPosts.push(cmnt); } } posts = newPosts; if ( sd.cId == -1 || sd.forceAppend == true ) { self.extraItems = 0; } else { self.extraItems += newPosts.length; } if ( posts.length > 0 ) { //hide alert with no results message $('.comments-list div.alert').css('display', 'none'); $.tmpl('livedesk>items/item', { Post: posts, Base: 'implementors/sources/comments', Item: 'sources/comments' }, function(e, o) { if ( sd.prepend ) { el = $('.comments-list').prepend(o).find('.commentpost'); } else { el = $('.comments-list').append(o).find('.commentpost'); } el.on('click', 'a[href="#toggle-post"]', function(e){ e.preventDefault(); var cmntId = $(this).attr('data-id'); var action = $(this).attr('data-action'); if ( action == 'hide' ) { self.hideComment(cmntId); } else { self.unhideComment(cmntId); } }); BlogAction.get('modules.livedesk.blog-post-publish').done(function(action) { el.draggable( { revert: 'invalid', //containment:'document', helper: 'clone', appendTo: 'body', zIndex: 2700, clone: true, start: function(evt, ui) { item = $(evt.currentTarget); $(ui.helper).css('width', item.width()); var itemNo = $(this).attr('data-id'); $(this).data('post', itemNo ); } }); }).fail(function(){ el.removeClass('draggable').css('cursor',''); }); if ( ( sd.offset + sd.limit + self.extraItems ) < self.total ) { $('.comments-load-more-holder').css('display','block').tmpl('livedesk>providers/load-more', {name : 'comments-load-more'}, function(){ $(this).find('[name="comments-load-more"]').on('click', function(){ var offset = sd.offset + sd.limit + self.extraItems; self.getComments( $.extend({}, sd, {offset: offset, forceAppend: true, clearResults: false, pagination: true}) ); }); }); } else { $('.comments-load-more-holder').css('display','none').html(''); } }); } else { //autoupdates may return 0 results and then we don't want to show 'no results message' if ( ! sd.prepend ) { $.tmpl('livedesk>providers/no-results', {}, function(e,o) { $('.comments-list').html(o); }); } } }); }, toggleHidden: function(aspect) { if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') == 'false' ) { aspect = 'positive'; } else { aspect = 'negative'; } if ( aspect == "negative" ) { $( document ).find('li.commentpost[data-hidden="false"]').css('display', 'none'); $( document ).find('li.commentpost[data-hidden="true"]').css('display', 'block'); } else { $( document ).find('li.commentpost[data-hidden="true"]').css('display', 'none'); $( document ).find('li.commentpost[data-hidden="false"]').css('display', 'block'); } }, hideComment: function(cmntId) { var self = this; var msg = _("Are you sure you want to hide the comment?"); var newText = _("Unhide"); if ( confirm( msg ) ) { var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/' + cmntId + '/Hide'); $.post( url.get() , function( data ) { self.extraItems -- ; $( document ).find('li.commentpost[data-id="' + cmntId + '"]').remove(); // $( document ).find('li.commentpost[data-id="' + cmntId + '"]').attr('data-hidden', 'true').css('display', 'none'); // $( document ).find('li.commentpost a[href="#toggle-post"][data-id="' + cmntId + '"]').attr('data-action', 'unhide').text(newText); }); } }, unhideComment: function(cmntId) { var msg = _("Are you sure you want to un-hide the comment?"); var newText = _("Hide"); if ( confirm( msg ) ) { var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/' + cmntId + '/Unhide'); $.post( url.get() , function( data ) { $( document ).find('li.commentpost[data-id="' + cmntId + '"]').attr('data-hidden', 'false').css('display', 'none'); $( document ).find('a[href="#toggle-post"][data-id="' + cmntId + '"]').attr('data-action', 'hide').text(newText); }); } } }); return providers; });
plugins/livedesk/gui-resources/scripts/js/providers/comments.js
define([ 'providers', 'jquery', 'gizmo/superdesk', config.guiJs('livedesk', 'action'), 'jquery/tmpl', 'jqueryui/draggable', 'providers/comments/adaptor', config.guiJs('livedesk', 'providers-templates'), 'tmpl!livedesk>providers/comments', 'tmpl!livedesk>items/item', 'tmpl!livedesk>items/sources/comments', 'tmpl!livedesk>items/implementors/sources/base', 'tmpl!livedesk>items/implementors/sources/comments', 'tmpl!livedesk>providers/no-results', 'tmpl!livedesk>providers/generic-error', 'tmpl!livedesk>providers/load-more', 'tmpl!livedesk>providers/loading' ], function( providers, $, Gizmo, BlogAction) { $.extend(providers.comments, { blogId: 0, data: [], topIds: 0, minId: 999999999999, interval: 20000, keyword: '', extraInfos: 0, total: 0, init: function(blogUrl){ var self = this; this.adaptor.init(); self.data.comments = []; $.ajax({ url: typeof blogUrl === 'string' ? blogUrl : blogUrl[0] }).done(function(data){ self.blogId = data.Id; self.render(); }); }, render: function(){ var self = this; self.el.tmpl('livedesk>providers/comments', {}, function(){ //handle keyword search self.el.on('keyup','.comments-search-query', function( e ){ var keycode = e.keyCode; var keyword = $('.comments-search-query').val(); if ( keycode == 13 ) { self.keyword = keyword; self.getComments({cId: -1, clearResults: true}); } }); //show hidden self.el.on('click','[data-type="hidden-toggle"]', function( e ){ if ( $(this).attr('data-active') == 'false' ) { $(this).attr('data-active', 'true'); $(this).css('background-color', '#DDDDDD'); //show hidden comments self.getComments({cId: -1, clearResults: true}); } else { $(this).attr('data-active', 'false'); $(this).css('background-color', '#f2f2f2'); //hide hidden comments self.getComments({cId: -1, clearResults: true}); } }); //temp remove the autoupdate var refInt = window.setInterval(function(){ self.refreshComments(); },self.interval); self.getComments({}); }); //dynamically get size of header and set top space for list var top_space = $('#comments .sms-header').outerHeight() + 20; $('.comments-results-holder').css({'top': top_space}); }, refreshComments: function() { var self = this; var cId = self.topIds; //skip autoupdate for hidden items if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') != 'true' ) { self.getComments({cId: cId, prepend: true}); } }, getComments: function(paramObject) { var self = this; var dsd = { offset: 0, limit: 5, cId: -1, query: '', forceAppend: false, prepend: false, pagination: false, keyword: '', clearResults: false } var sd = $.extend({}, dsd, paramObject); var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/Comment/'); var keywordSearch = ''; if ( self.keyword.length > 0 ) { keywordSearch = '&content.ilike=' + encodeURIComponent('%' + self.keyword + '%') } var cIdText = ''; var limitText = '&limit=' + sd.limit; if ( sd.cId != -1 ) { cIdText = '&cId.since=' + sd.cId; limitText = ''; } else { self.topIds = -1; self.maxId = 0; } if ( sd.pagination ) { cIdText = ''; } var deletedText = ''; if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') == 'true' ) { var deletedText = '&isDeleted=true'; } myUrl = url.get() + '?X-Filter=*&offset=' + sd.offset + limitText + cIdText + keywordSearch + deletedText + '&desc=id'; $.ajax({ url: myUrl, dataType: "json" }).done(function(xdata){ var data = xdata; if ( sd.cId == -1 ) { self.total = data.total; self.topIds = data.lastCId; } var comments = data.PostList; //clean the results if ( sd.clearResults) { self.data.comments = []; $('.comments-list').html(''); $('.comments-load-more-holder').css('display','none').html(''); } //prepare the data for dragging to timeline posts = []; for ( var i = 0; i < comments.length; i++ ) { var item = comments[i]; item['message'] = item.Content; posts.push({ Meta: item }); self.data.comments[item.Id] = item; //increase the 'cId' if necessary if ( parseInt(self.topIds) < parseInt(item.CId) ) { self.topIds = parseInt(item.CId); } if ( sd.pagination ) { if ( parseInt(self.minId) > parseInt(item.CId) ) { self.minId = parseInt(item.Id); } } } var newPosts = []; //go throught the comments and see if they are updates for what we already have for ( var i = 0; i < posts.length; i++ ) { var cmnt = posts[ i ]; var updated = false; var Id = cmnt.Meta.Id; var unhideTxt = _("Unhide"); var hideTxt = _("Hide"); $('.comments-list').find('li.commentpost').each(function(){ if ( Id == $(this).attr('data-id') ) { //we need to update the item if ( cmnt.Meta.IsPublished == "True" ) { //$( this ).attr('data-hidden', 'true').css('display', 'none'); $( this ).remove(); self.total -- ; self.extraItems -- ; } else { if ( cmnt.Meta.DeletedOn ) { //got deleted $( this ).attr('data-hidden', 'true').css('display', 'none'); $( this ).find('a[href="#toggle-post"]').attr('data-action', 'unhide').text(unhideTxt); self.total -- ; self.extraItems -- ; } else { $( this ).attr('data-hidden', 'false').css('display', 'block'); $( this ).find('a[href="#toggle-post"]').attr('data-action', 'hide').text(hideTxt); self.total ++ ; self.extraItems ++ ; } } updated = true; } }); if ( ( ! updated && ! cmnt.Meta.PublishedOn && ! cmnt.Meta.DeletedOn && self.minId > cmnt.Meta.Id ) || sd.cId == -1 ) { newPosts.push(cmnt); } } posts = newPosts; if ( sd.cId == -1 || sd.forceAppend == true ) { self.extraItems = 0; } else { self.extraItems += newPosts.length; } if ( posts.length > 0 ) { //hide alert with no results message $('.comments-list div.alert').css('display', 'none'); $.tmpl('livedesk>items/item', { Post: posts, Base: 'implementors/sources/comments', Item: 'sources/comments' }, function(e, o) { if ( sd.prepend ) { el = $('.comments-list').prepend(o).find('.commentpost'); } else { el = $('.comments-list').append(o).find('.commentpost'); } el.on('click', 'a[href="#toggle-post"]', function(e){ e.preventDefault(); var cmntId = $(this).attr('data-id'); var action = $(this).attr('data-action'); if ( action == 'hide' ) { self.hideComment(cmntId); } else { self.unhideComment(cmntId); } }); BlogAction.get('modules.livedesk.blog-post-publish').done(function(action) { el.draggable( { revert: 'invalid', //containment:'document', helper: 'clone', appendTo: 'body', zIndex: 2700, clone: true, start: function(evt, ui) { item = $(evt.currentTarget); $(ui.helper).css('width', item.width()); var itemNo = $(this).attr('data-id'); $(this).data('post', itemNo ); } }); }).fail(function(){ el.removeClass('draggable').css('cursor',''); }); if ( ( sd.offset + sd.limit + self.extraItems ) < self.total ) { $('.comments-load-more-holder').css('display','block').tmpl('livedesk>providers/load-more', {name : 'comments-load-more'}, function(){ $(this).find('[name="comments-load-more"]').on('click', function(){ var offset = sd.offset + sd.limit + self.extraItems; self.getComments( $.extend({}, sd, {offset: offset, forceAppend: true, clearResults: false, pagination: true}) ); }); }); } else { $('.comments-load-more-holder').css('display','none').html(''); } }); } else { //autoupdates may return 0 results and then we don't want to show 'no results message' if ( ! sd.prepend ) { $.tmpl('livedesk>providers/no-results', {}, function(e,o) { $('.comments-list').html(o); }); } } }); }, toggleHidden: function(aspect) { if ( $( document ).find('a[data-type="hidden-toggle"]').attr('data-active') == 'false' ) { aspect = 'positive'; } else { aspect = 'negative'; } if ( aspect == "negative" ) { $( document ).find('li.commentpost[data-hidden="false"]').css('display', 'none'); $( document ).find('li.commentpost[data-hidden="true"]').css('display', 'block'); } else { $( document ).find('li.commentpost[data-hidden="true"]').css('display', 'none'); $( document ).find('li.commentpost[data-hidden="false"]').css('display', 'block'); } }, hideComment: function(cmntId) { var self = this; var msg = _("Are you sure you want to hide the comment?"); var newText = _("Unhide"); if ( confirm( msg ) ) { var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/' + cmntId + '/Hide'); $.post( url.get() , function( data ) { self.extraItems -- ; $( document ).find('li.commentpost[data-id="' + cmntId + '"]').remove(); // $( document ).find('li.commentpost[data-id="' + cmntId + '"]').attr('data-hidden', 'true').css('display', 'none'); // $( document ).find('li.commentpost a[href="#toggle-post"][data-id="' + cmntId + '"]').attr('data-action', 'unhide').text(newText); }); } }, unhideComment: function(cmntId) { var msg = _("Are you sure you want to un-hide the comment?"); var newText = _("Hide"); if ( confirm( msg ) ) { var url = new Gizmo.Url('LiveDesk/Blog/' + self.blogId + '/Post/' + cmntId + '/Unhide'); $.post( url.get() , function( data ) { $( document ).find('li.commentpost[data-id="' + cmntId + '"]').attr('data-hidden', 'false').css('display', 'none'); $( document ).find('a[href="#toggle-post"][data-id="' + cmntId + '"]').attr('data-action', 'hide').text(newText); }); } } }); return providers; });
LB-1160: As an editor I want to hide comments Minor fix
plugins/livedesk/gui-resources/scripts/js/providers/comments.js
LB-1160: As an editor I want to hide comments
<ide><path>lugins/livedesk/gui-resources/scripts/js/providers/comments.js <ide> blogId: 0, <ide> data: [], <ide> topIds: 0, <del> minId: 999999999999, <add> minId: Infinity, <ide> interval: 20000, <ide> keyword: '', <ide> extraInfos: 0,
Java
apache-2.0
3da5c3a9b3128c22191d7ea3034a772feb244121
0
APriestman/autopsy,esaunders/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy,narfindustries/autopsy,esaunders/autopsy,APriestman/autopsy,dgrove727/autopsy,wschaeferB/autopsy,narfindustries/autopsy,dgrove727/autopsy,millmanorama/autopsy,esaunders/autopsy,millmanorama/autopsy,rcordovano/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,APriestman/autopsy,millmanorama/autopsy,millmanorama/autopsy,APriestman/autopsy,esaunders/autopsy,APriestman/autopsy,dgrove727/autopsy,narfindustries/autopsy,rcordovano/autopsy,APriestman/autopsy,APriestman/autopsy,wschaeferB/autopsy,rcordovano/autopsy,wschaeferB/autopsy
/* * Autopsy Forensic Browser * * Copyright 2012-16 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad <at> 42six <dot> com * Project Contact/Architect: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.coreutils; import com.google.common.collect.ImmutableSortedSet; import com.google.common.io.Files; import java.awt.Image; import java.awt.image.BufferedImage; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import static java.util.Objects.nonNull; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.logging.Level; import javafx.concurrent.Task; import javafx.embed.swing.SwingFXUtils; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.imageio.IIOException; import javax.imageio.ImageIO; import javax.imageio.ImageReadParam; import javax.imageio.ImageReader; import javax.imageio.event.IIOReadProgressListener; import javax.imageio.stream.ImageInputStream; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.opencv.core.Core; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.corelibs.ScalrWrapper; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector.FileTypeDetectorInitException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; /** * Utilities for working with image files and creating thumbnails. Re-uses * thumbnails by storing them in the case's cache directory. */ public class ImageUtils { private static final Logger LOGGER = Logger.getLogger(ImageUtils.class.getName()); /** * save thumbnails to disk as this format */ private static final String FORMAT = "png"; //NON-NLS public static final int ICON_SIZE_SMALL = 50; public static final int ICON_SIZE_MEDIUM = 100; public static final int ICON_SIZE_LARGE = 200; private static final BufferedImage DEFAULT_THUMBNAIL; private static final List<String> GIF_EXTENSION_LIST = Arrays.asList("gif"); private static final SortedSet<String> GIF_MIME_SET = ImmutableSortedSet.copyOf(new String[]{"image/gif"}); private static final List<String> SUPPORTED_IMAGE_EXTENSIONS = new ArrayList<>(); private static final SortedSet<String> SUPPORTED_IMAGE_MIME_TYPES; private static final boolean openCVLoaded; static { ImageIO.scanForPlugins(); BufferedImage tempImage; try { tempImage = ImageIO.read(ImageUtils.class.getResourceAsStream("/org/sleuthkit/autopsy/images/file-icon.png"));//NON-NLS } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Failed to load default icon.", ex); //NON-NLS tempImage = null; } DEFAULT_THUMBNAIL = tempImage; //load opencv libraries boolean openCVLoadedTemp; try { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); if (System.getProperty("os.arch").equals("amd64") || System.getProperty("os.arch").equals("x86_64")) { //NON-NLS System.loadLibrary("opencv_ffmpeg248_64"); //NON-NLS } else { System.loadLibrary("opencv_ffmpeg248"); //NON-NLS } openCVLoadedTemp = true; } catch (UnsatisfiedLinkError e) { openCVLoadedTemp = false; LOGGER.log(Level.SEVERE, "OpenCV Native code library failed to load", e); //NON-NLS //TODO: show warning bubble } openCVLoaded = openCVLoadedTemp; SUPPORTED_IMAGE_EXTENSIONS.addAll(Arrays.asList(ImageIO.getReaderFileSuffixes())); SUPPORTED_IMAGE_EXTENSIONS.add("tec"); // Add JFIF .tec files SUPPORTED_IMAGE_MIME_TYPES = new TreeSet<>(Arrays.asList(ImageIO.getReaderMIMETypes())); /* * special cases and variants that we support, but don't get registered * with ImageIO automatically */ SUPPORTED_IMAGE_MIME_TYPES.addAll(Arrays.asList( "image/x-rgb", //NON-NLS "image/x-ms-bmp", //NON-NLS "image/x-portable-graymap", //NON-NLS "image/x-portable-bitmap", //NON-NLS "application/x-123")); //TODO: is this correct? -jm //NON-NLS SUPPORTED_IMAGE_MIME_TYPES.removeIf("application/octet-stream"::equals); //NON-NLS } /** * initialized lazily */ private static FileTypeDetector fileTypeDetector; /** * thread that saves generated thumbnails to disk in the background */ private static final Executor imageSaver = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder() .namingPattern("thumbnail-saver-%d").build()); //NON-NLS public static List<String> getSupportedImageExtensions() { return Collections.unmodifiableList(SUPPORTED_IMAGE_EXTENSIONS); } public static SortedSet<String> getSupportedImageMimeTypes() { return Collections.unmodifiableSortedSet(SUPPORTED_IMAGE_MIME_TYPES); } /** * Get the default thumbnail, which is the icon for a file. Used when we can * not generate a content based thumbnail. * * @return the default thumbnail */ public static Image getDefaultThumbnail() { return DEFAULT_THUMBNAIL; } /** * Can a thumbnail be generated for the content? * * Although this method accepts Content, it always returns false for objects * that are not instances of AbstractFile. * * @param content A content object to test for thumbnail support. * * @return true if a thumbnail can be generated for the given content. */ public static boolean thumbnailSupported(Content content) { if (!(content instanceof AbstractFile)) { return false; } AbstractFile file = (AbstractFile) content; return VideoUtils.isVideoThumbnailSupported(file) || isImageThumbnailSupported(file); } /** * Is the file an image that we can read and generate a thumbnail for? * * @param file the AbstractFile to test * * @return true if the file is an image we can read and generate thumbnail * for. */ public static boolean isImageThumbnailSupported(AbstractFile file) { return isMediaThumbnailSupported(file, "image/", SUPPORTED_IMAGE_MIME_TYPES, SUPPORTED_IMAGE_EXTENSIONS) || hasImageFileHeader(file);//NON-NLS } /** * Checks the MIME type and/or extension of a file to determine whether it * is a GIF. * * @param file the AbstractFile to test * * @return true if the file is a gif */ public static boolean isGIF(AbstractFile file) { return isMediaThumbnailSupported(file, null, GIF_MIME_SET, GIF_EXTENSION_LIST); } /** * Check if making a thumbnail for the given file is supported by checking * its extension and/or MIME type against the supplied collections. * * //TODO: this should move to a better place. Should ImageUtils and * VideoUtils both implement/extend some base interface/abstract class. That * would be the natural place to put this. * * @param file the AbstractFile to test * @param mimeTypePrefix a MIME 'top-level type name' such as "image/", * including the "/". In addition to the list of * supported MIME types, any type that starts with * this prefix will be regarded as supported * @param supportedMimeTypes a collection of mimetypes that are supported * @param supportedExtension a collection of extensions that are supported * * @return true if a thumbnail can be generated for the given file based on * the given MIME type prefix and lists of supported MIME types and * extensions */ static boolean isMediaThumbnailSupported(AbstractFile file, String mimeTypePrefix, final Collection<String> supportedMimeTypes, final List<String> supportedExtension) { if (false == file.isFile() || file.getSize() <= 0) { return false; } String extension = file.getNameExtension(); if (StringUtils.isNotBlank(extension) && supportedExtension.contains(extension)) { return true; } else { try { String mimeType = getFileTypeDetector().detect(file); if (StringUtils.isNotBlank(mimeTypePrefix) && mimeType.startsWith(mimeTypePrefix)) { return true; } return supportedMimeTypes.contains(mimeType); } catch (FileTypeDetectorInitException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error determining MIME type of " + getContentPathSafe(file), ex);//NON-NLS return false; } } } /** * //TODO: AUT-2057 this FileTypeDetector needs to be recreated when the * user adds new user defined file types. * * get a FileTypeDetector * * @return a FileTypeDetector * * @throws FileTypeDetectorInitException if initializing the * FileTypeDetector failed. */ synchronized private static FileTypeDetector getFileTypeDetector() throws FileTypeDetector.FileTypeDetectorInitException { if (fileTypeDetector == null) { fileTypeDetector = new FileTypeDetector(); } return fileTypeDetector; } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content the content to generate a thumbnail for * @param iconSize the size (one side of a square) in pixels to generate * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. */ public static BufferedImage getThumbnail(Content content, int iconSize) { if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile) content; Task<javafx.scene.image.Image> thumbnailTask = newGetThumbnailTask(file, iconSize, true); thumbnailTask.run(); try { return SwingFXUtils.fromFXImage(thumbnailTask.get(), null); } catch (InterruptedException | ExecutionException ex) { LOGGER.log(Level.WARNING, "Failed to get thumbnail for {0}: " + ex.toString(), getContentPathSafe(content)); //NON-NLS return DEFAULT_THUMBNAIL; } } else { return DEFAULT_THUMBNAIL; } } /** * * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content the content to generate a thumbnail for * @param iconSize the size (one side of a square) in pixels to generate * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. */ @Nullable public static File getCachedThumbnailFile(Content content, int iconSize) { getThumbnail(content, iconSize); return getCachedThumbnailLocation(content.getId()); } /** * Get the location of the cached thumbnail for a file with the given fileID * as a java {@link File}. The returned File may not exist on disk yet. * * @param fileID the fileID to get the cached thumbnail location for * * @return a File object representing the location of the cached thumbnail. * This file may not actually exist(yet). Returns null if there was * any problem getting the file, such as no case was open. */ private static File getCachedThumbnailLocation(long fileID) { try { String cacheDirectory = Case.getCurrentCase().getCacheDirectory(); return Paths.get(cacheDirectory, "thumbnails", fileID + ".png").toFile(); //NON-NLS } catch (IllegalStateException e) { LOGGER.log(Level.WARNING, "Could not get cached thumbnail location. No case is open."); //NON-NLS return null; } } /** * Do a direct check to see if the given file has an image file header. * NOTE: Currently only jpeg and png are supported. * * @param file the AbstractFile to check * * @return true if the given file has one of the supported image headers. */ public static boolean hasImageFileHeader(AbstractFile file) { return isJpegFileHeader(file) || isPngFileHeader(file); } /** * Check if the given file is a jpeg based on header. * * @param file the AbstractFile to check * * @return true if jpeg file, false otherwise */ public static boolean isJpegFileHeader(AbstractFile file) { if (file.getSize() < 100) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 2); /* * Check for the JPEG header. Since Java bytes are signed, we cast * them to an int first. */ return (((fileHeaderBuffer[0] & 0xff) == 0xff) && ((fileHeaderBuffer[1] & 0xff) == 0xd8)); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not a JPEG return false; } } /** * Check if the given file is a JFIF based on header, but has a leading End * Of Image marker (0xFFD9) * * @param file the AbstractFile to check * * @return true if JFIF file, false otherwise */ public static boolean isJfifFileHeaderWithLeadingEOIMarker(AbstractFile file) { if (file.getSize() < 100) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 4); // Check for the JFIF header with leading EOI marker: 0xFFD9 followed by SOI marker: 0xFFD8 return (fileHeaderBuffer[0] == (byte) 0xFF && fileHeaderBuffer[1] == (byte) 0xD9 && fileHeaderBuffer[2] == (byte) 0xFF && fileHeaderBuffer[3] == (byte) 0xD8); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not a JPEG return false; } } /** * Check if the given file is a png based on header. * * @param file the AbstractFile to check * * @return true if png file, false otherwise */ public static boolean isPngFileHeader(AbstractFile file) { if (file.getSize() < 10) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 8); /* * Check for the png header. Since Java bytes are signed, we cast * them to an int first. */ return (((fileHeaderBuffer[1] & 0xff) == 0x50) && ((fileHeaderBuffer[2] & 0xff) == 0x4E) && ((fileHeaderBuffer[3] & 0xff) == 0x47) && ((fileHeaderBuffer[4] & 0xff) == 0x0D) && ((fileHeaderBuffer[5] & 0xff) == 0x0A) && ((fileHeaderBuffer[6] & 0xff) == 0x1A) && ((fileHeaderBuffer[7] & 0xff) == 0x0A)); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not an png return false; } } private static byte[] readHeader(AbstractFile file, int buffLength) throws TskCoreException { byte[] fileHeaderBuffer = new byte[buffLength]; int bytesRead = file.read(fileHeaderBuffer, 0, buffLength); if (bytesRead != buffLength) { //ignore if can't read the first few bytes, not an image throw new TskCoreException("Could not read " + buffLength + " bytes from " + file.getName());//NON-NLS } return fileHeaderBuffer; } /** * Get the width of the given image, in pixels. * * @param file * * @return the width in pixels * * @throws IOException If the file is not a supported image or the width * could not be determined. */ static public int getImageWidth(AbstractFile file) throws IOException { return getImageProperty(file, "ImageIO could not determine width of {0}: ", //NON-NLS imageReader -> imageReader.getWidth(0) ); } /** * Get the height of the given image,in pixels. * * @param file * * @return the height in pixels * * @throws IOException If the file is not a supported image or the height * could not be determined. */ static public int getImageHeight(AbstractFile file) throws IOException { return getImageProperty(file, "ImageIO could not determine height of {0}: ", //NON-NLS imageReader -> imageReader.getHeight(0) ); } /** * Functional interface for methods that extract a property out of an * ImageReader. Initially created to abstract over * {@link #getImageHeight(org.sleuthkit.datamodel.AbstractFile)} and * {@link #getImageWidth(org.sleuthkit.datamodel.AbstractFile)} * * @param <T> The type of the property. */ @FunctionalInterface private static interface PropertyExtractor<T> { public T extract(ImageReader reader) throws IOException; } /** * Private template method designed to be used as the implementation of * public methods that pull particular (usually meta-)data out of a image * file. * * @param <T> the type of the property to be retrieved. * @param file the file to extract the data from * @param errorTemplate a message template used to log errors. Should * take one parameter: the file's unique path or * name. * @param propertyExtractor an implementation of {@link PropertyExtractor} * used to retrieve the specific property. * * @return the the value of the property extracted by the given * propertyExtractor * * @throws IOException if there was a problem reading the property from the * file. * * @see PropertyExtractor * @see #getImageHeight(org.sleuthkit.datamodel.AbstractFile) */ private static <T> T getImageProperty(AbstractFile file, final String errorTemplate, PropertyExtractor<T> propertyExtractor) throws IOException { try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file));) { try (ImageInputStream input = ImageIO.createImageInputStream(inputStream)) { if (input == null) { IIOException iioException = new IIOException("Could not create ImageInputStream."); LOGGER.log(Level.WARNING, errorTemplate + iioException.toString(), getContentPathSafe(file)); throw iioException; } Iterator<ImageReader> readers = ImageIO.getImageReaders(input); if (readers.hasNext()) { ImageReader reader = readers.next(); reader.setInput(input); try { return propertyExtractor.extract(reader); } catch (IOException ex) { LOGGER.log(Level.WARNING, errorTemplate + ex.toString(), getContentPathSafe(file)); throw ex; } finally { reader.dispose(); } } else { IIOException iioException = new IIOException("No ImageReader found."); LOGGER.log(Level.WARNING, errorTemplate + iioException.toString(), getContentPathSafe(file)); throw iioException; } } } } /** * Create a new {@link Task} that will get a thumbnail for the given image * of the specified size. If a cached thumbnail is available it will be * returned as the result of the task, otherwise a new thumbnail will be * created and cached. * * Note: the returned task is suitable for running in a background thread, * but is not started automatically. Clients are responsible for running the * task, monitoring its progress, and using its result. * * @param file The file to create a thumbnail for. * @param iconSize The size of the thumbnail. * @param defaultOnFailure Whether or not to default on failure. * * @return a new Task that returns a thumbnail as its result. */ public static Task<javafx.scene.image.Image> newGetThumbnailTask(AbstractFile file, int iconSize, boolean defaultOnFailure) { return new GetThumbnailTask(file, iconSize, defaultOnFailure); } /** * A Task that gets cached thumbnails and makes new ones as needed. */ static private class GetThumbnailTask extends ReadImageTaskBase { private static final String FAILED_TO_READ_IMAGE_FOR_THUMBNAIL_GENERATION = "Failed to read {0} for thumbnail generation."; //NON-NLS private final int iconSize; private final File cacheFile; private final boolean defaultOnFailure; @NbBundle.Messages({"# {0} - file name", "GetOrGenerateThumbnailTask.loadingThumbnailFor=Loading thumbnail for {0}", "# {0} - file name", "GetOrGenerateThumbnailTask.generatingPreviewFor=Generating preview for {0}"}) private GetThumbnailTask(AbstractFile file, int iconSize, boolean defaultOnFailure) { super(file); updateMessage(Bundle.GetOrGenerateThumbnailTask_loadingThumbnailFor(file.getName())); this.iconSize = iconSize; this.defaultOnFailure = defaultOnFailure; this.cacheFile = getCachedThumbnailLocation(file.getId()); } @Override protected javafx.scene.image.Image call() throws Exception { if (isGIF(file)) { return readImage(); } if (isCancelled()) { return null; } // If a thumbnail file is already saved locally, just read that. if (cacheFile != null && cacheFile.exists()) { try { BufferedImage cachedThumbnail = ImageIO.read(cacheFile); if (nonNull(cachedThumbnail) && cachedThumbnail.getWidth() == iconSize) { return SwingFXUtils.toFXImage(cachedThumbnail, null); } } catch (Exception ex) { LOGGER.log(Level.WARNING, "ImageIO had a problem reading the cached thumbnail for {0}: " + ex.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS cacheFile.delete(); //since we can't read the file we might as well delete it. } } if (isCancelled()) { return null; } //There was no correctly-sized cached thumbnail so make one. BufferedImage thumbnail = null; if (VideoUtils.isVideoThumbnailSupported(file)) { if (openCVLoaded) { updateMessage(Bundle.GetOrGenerateThumbnailTask_generatingPreviewFor(file.getName())); thumbnail = VideoUtils.generateVideoThumbnail(file, iconSize); } if (null == thumbnail) { if (defaultOnFailure) { thumbnail = DEFAULT_THUMBNAIL; } else { throw new IIOException("Failed to generate a thumbnail for " + getContentPathSafe(file));//NON-NLS } } } else { //read the image into a buffered image. //TODO: I don't like this, we just converted it from BufferedIamge to fx Image -jm BufferedImage bufferedImage = SwingFXUtils.fromFXImage(readImage(), null); if (null == bufferedImage) { String msg = MessageFormat.format(FAILED_TO_READ_IMAGE_FOR_THUMBNAIL_GENERATION, getContentPathSafe(file)); LOGGER.log(Level.WARNING, msg); throw new IIOException(msg); } updateProgress(-1, 1); //resize, or if that fails, crop it try { thumbnail = ScalrWrapper.resizeFast(bufferedImage, iconSize); } catch (IllegalArgumentException | OutOfMemoryError e) { // if resizing does not work due to extreme aspect ratio or oom, crop the image instead. LOGGER.log(Level.WARNING, "Cropping {0}, because it could not be scaled: " + e.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS final int height = bufferedImage.getHeight(); final int width = bufferedImage.getWidth(); if (iconSize < height || iconSize < width) { final int cropHeight = Math.min(iconSize, height); final int cropWidth = Math.min(iconSize, width); try { thumbnail = ScalrWrapper.cropImage(bufferedImage, cropWidth, cropHeight); } catch (Exception cropException) { LOGGER.log(Level.WARNING, "Could not crop {0}: " + cropException.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } } } catch (Exception e) { LOGGER.log(Level.WARNING, "Could not scale {0}: " + e.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS throw e; } } if (isCancelled()) { return null; } updateProgress(-1, 1); //if we got a valid thumbnail save it if ((cacheFile != null) && thumbnail != null && DEFAULT_THUMBNAIL != thumbnail) { saveThumbnail(thumbnail); } return SwingFXUtils.toFXImage(thumbnail, null); } /** * submit the thumbnail saving to another background thread. * * @param thumbnail */ private void saveThumbnail(BufferedImage thumbnail) { imageSaver.execute(() -> { try { Files.createParentDirs(cacheFile); if (cacheFile.exists()) { cacheFile.delete(); } ImageIO.write(thumbnail, FORMAT, cacheFile); } catch (IllegalArgumentException | IOException ex) { LOGGER.log(Level.WARNING, "Could not write thumbnail for {0}: " + ex.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } }); } } /** * Create a new {@link Task} that will read the file into memory as an * {@link javafx.scene.image.Image} * * Note: the returned task is suitable for running in a background thread, * but is not started automatically. Clients are responsible for running the * task, monitoring its progress, and using its result(including testing for * null). * * @param file the file to read as an Image * * @return a new Task that returns an Image as its result */ public static Task<javafx.scene.image.Image> newReadImageTask(AbstractFile file) { return new ReadImageTask(file); } /** * A task that reads the content of a AbstractFile as a javafx Image. */ @NbBundle.Messages({ "# {0} - file name", "ReadImageTask.mesageText=Reading image: {0}"}) static private class ReadImageTask extends ReadImageTaskBase { ReadImageTask(AbstractFile file) { super(file); updateMessage(Bundle.ReadImageTask_mesageText(file.getName())); } @Override protected javafx.scene.image.Image call() throws Exception { return readImage(); } } /** * Base class for tasks that need to read AbstractFiles as Images. */ static private abstract class ReadImageTaskBase extends Task<javafx.scene.image.Image> implements IIOReadProgressListener { private static final String IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT = "ImageIO could not read {0}. It may be unsupported or corrupt"; //NON-NLS final AbstractFile file; // private ImageReader reader; ReadImageTaskBase(AbstractFile file) { this.file = file; } protected javafx.scene.image.Image readImage() throws IOException { if (ImageUtils.isGIF(file)) { //use JavaFX to directly read GIF to preserve potential animation javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(new ReadContentInputStream(file))); if (image.isError() == false) { return image; } } else if (file.getNameExtension().equalsIgnoreCase("tec")) { //NON-NLS ReadContentInputStream readContentInputStream = new ReadContentInputStream(file); if (isJfifFileHeaderWithLeadingEOIMarker(file)) { readContentInputStream.seek(2); // Skip any leading EOI markers } //use JavaFX to directly read .tec files javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(readContentInputStream)); if (image.isError() == false) { return image; } } //fall through to default image reading code if there was an error if (isCancelled()) { return null; } return getImageProperty(file, "ImageIO could not read {0}: ", imageReader -> { imageReader.addIIOReadProgressListener(ReadImageTaskBase.this); /* * This is the important part, get or create a * ImageReadParam, create a destination image to hold * the decoded result, then pass that image with the * param. */ ImageReadParam param = imageReader.getDefaultReadParam(); BufferedImage bufferedImage = imageReader.getImageTypes(0).next().createBufferedImage(imageReader.getWidth(0), imageReader.getHeight(0)); param.setDestination(bufferedImage); try { bufferedImage = imageReader.read(0, param); //should always be same bufferedImage object } catch (IOException iOException) { LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + iOException.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } finally { imageReader.removeIIOReadProgressListener(ReadImageTaskBase.this); } if (isCancelled()) { return null; } return SwingFXUtils.toFXImage(bufferedImage, null); } ); } @Override public void imageProgress(ImageReader reader, float percentageDone) { //update this task with the progress reported by ImageReader.read updateProgress(percentageDone, 100); if (isCancelled()) { reader.removeIIOReadProgressListener(this); reader.abort(); reader.dispose(); } } @Override protected void succeeded() { super.succeeded(); try { javafx.scene.image.Image fxImage = get(); if (fxImage == null) { LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT, ImageUtils.getContentPathSafe(file)); } else if (fxImage.isError()) { //if there was somekind of error, log it LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + ObjectUtils.toString(fxImage.getException()), ImageUtils.getContentPathSafe(file)); } } catch (InterruptedException | ExecutionException ex) { failed(); } } @Override protected void failed() { super.failed(); LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + ObjectUtils.toString(getException()), ImageUtils.getContentPathSafe(file)); } @Override public void imageComplete(ImageReader source) { updateProgress(100, 100); } @Override public void imageStarted(ImageReader source, int imageIndex) { } @Override public void sequenceStarted(ImageReader source, int minIndex) { } @Override public void sequenceComplete(ImageReader source) { } @Override public void thumbnailStarted(ImageReader source, int imageIndex, int thumbnailIndex) { } @Override public void thumbnailProgress(ImageReader source, float percentageDone) { } @Override public void thumbnailComplete(ImageReader source) { } @Override public void readAborted(ImageReader source) { } } /** * Get the unique path for the content, or if that fails, just return the * name. * * @param content * * @return the unique path for the content, or if that fails, just the name. */ static String getContentPathSafe(Content content) { try { return content.getUniquePath(); } catch (TskCoreException tskCoreException) { String contentName = content.getName(); LOGGER.log(Level.SEVERE, "Failed to get unique path for " + contentName, tskCoreException); //NON-NLS return contentName; } } /** * Get the default thumbnail, which is the icon for a file. Used when we can * not generate content based thumbnail. * * @return * * @deprecated use {@link #getDefaultThumbnail() } instead. */ @Deprecated public static Image getDefaultIcon() { return getDefaultThumbnail(); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * * @param id * * @return * * @deprecated use {@link #getCachedThumbnailLocation(long) } instead */ @Deprecated public static File getFile(long id) { return getCachedThumbnailLocation(id); } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content * @param iconSize * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. * * @deprecated use {@link #getThumbnail(org.sleuthkit.datamodel.Content, int) * } instead. */ @Nonnull @Deprecated public static BufferedImage getIcon(Content content, int iconSize) { return getThumbnail(content, iconSize); } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content * @param iconSize * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. * * @deprecated use {@link #getCachedThumbnailFile(org.sleuthkit.datamodel.Content, int) * } instead. * */ @Nullable @Deprecated public static File getIconFile(Content content, int iconSize) { return getCachedThumbnailFile(content, iconSize); } }
Core/src/org/sleuthkit/autopsy/coreutils/ImageUtils.java
/* * Autopsy Forensic Browser * * Copyright 2012-16 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad <at> 42six <dot> com * Project Contact/Architect: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.coreutils; import com.google.common.collect.ImmutableSortedSet; import com.google.common.io.Files; import java.awt.Image; import java.awt.image.BufferedImage; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import static java.util.Objects.nonNull; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.logging.Level; import javafx.concurrent.Task; import javafx.embed.swing.SwingFXUtils; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.imageio.IIOException; import javax.imageio.ImageIO; import javax.imageio.ImageReadParam; import javax.imageio.ImageReader; import javax.imageio.event.IIOReadProgressListener; import javax.imageio.stream.ImageInputStream; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.opencv.core.Core; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.corelibs.ScalrWrapper; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector.FileTypeDetectorInitException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; /** * Utilities for working with image files and creating thumbnails. Re-uses * thumbnails by storing them in the case's cache directory. */ public class ImageUtils { private static final Logger LOGGER = Logger.getLogger(ImageUtils.class.getName()); /** * save thumbnails to disk as this format */ private static final String FORMAT = "png"; //NON-NLS public static final int ICON_SIZE_SMALL = 50; public static final int ICON_SIZE_MEDIUM = 100; public static final int ICON_SIZE_LARGE = 200; private static final BufferedImage DEFAULT_THUMBNAIL; private static final List<String> GIF_EXTENSION_LIST = Arrays.asList("gif"); private static final SortedSet<String> GIF_MIME_SET = ImmutableSortedSet.copyOf(new String[]{"image/gif"}); private static final List<String> SUPPORTED_IMAGE_EXTENSIONS = new ArrayList<>(); private static final SortedSet<String> SUPPORTED_IMAGE_MIME_TYPES; private static final boolean openCVLoaded; static { ImageIO.scanForPlugins(); BufferedImage tempImage; try { tempImage = ImageIO.read(ImageUtils.class.getResourceAsStream("/org/sleuthkit/autopsy/images/file-icon.png"));//NON-NLS } catch (IOException ex) { LOGGER.log(Level.SEVERE, "Failed to load default icon.", ex); //NON-NLS tempImage = null; } DEFAULT_THUMBNAIL = tempImage; //load opencv libraries boolean openCVLoadedTemp; try { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); if (System.getProperty("os.arch").equals("amd64") || System.getProperty("os.arch").equals("x86_64")) { //NON-NLS System.loadLibrary("opencv_ffmpeg248_64"); //NON-NLS } else { System.loadLibrary("opencv_ffmpeg248"); //NON-NLS } openCVLoadedTemp = true; } catch (UnsatisfiedLinkError e) { openCVLoadedTemp = false; LOGGER.log(Level.SEVERE, "OpenCV Native code library failed to load", e); //NON-NLS //TODO: show warning bubble } openCVLoaded = openCVLoadedTemp; SUPPORTED_IMAGE_EXTENSIONS.addAll(Arrays.asList(ImageIO.getReaderFileSuffixes())); SUPPORTED_IMAGE_EXTENSIONS.add("tec"); // Add JFIF .tec files SUPPORTED_IMAGE_MIME_TYPES = new TreeSet<>(Arrays.asList(ImageIO.getReaderMIMETypes())); /* * special cases and variants that we support, but don't get registered * with ImageIO automatically */ SUPPORTED_IMAGE_MIME_TYPES.addAll(Arrays.asList( "image/x-rgb", //NON-NLS "image/x-ms-bmp", //NON-NLS "image/x-portable-graymap", //NON-NLS "image/x-portable-bitmap", //NON-NLS "application/x-123")); //TODO: is this correct? -jm //NON-NLS SUPPORTED_IMAGE_MIME_TYPES.removeIf("application/octet-stream"::equals); //NON-NLS } /** * initialized lazily */ private static FileTypeDetector fileTypeDetector; /** * thread that saves generated thumbnails to disk in the background */ private static final Executor imageSaver = Executors.newSingleThreadExecutor(new BasicThreadFactory.Builder() .namingPattern("thumbnail-saver-%d").build()); //NON-NLS public static List<String> getSupportedImageExtensions() { return Collections.unmodifiableList(SUPPORTED_IMAGE_EXTENSIONS); } public static SortedSet<String> getSupportedImageMimeTypes() { return Collections.unmodifiableSortedSet(SUPPORTED_IMAGE_MIME_TYPES); } /** * Get the default thumbnail, which is the icon for a file. Used when we can * not generate a content based thumbnail. * * @return the default thumbnail */ public static Image getDefaultThumbnail() { return DEFAULT_THUMBNAIL; } /** * Can a thumbnail be generated for the content? * * Although this method accepts Content, it always returns false for objects * that are not instances of AbstractFile. * * @param content A content object to test for thumbnail support. * * @return true if a thumbnail can be generated for the given content. */ public static boolean thumbnailSupported(Content content) { if (!(content instanceof AbstractFile)) { return false; } AbstractFile file = (AbstractFile) content; return VideoUtils.isVideoThumbnailSupported(file) || isImageThumbnailSupported(file); } /** * Is the file an image that we can read and generate a thumbnail for? * * @param file the AbstractFile to test * * @return true if the file is an image we can read and generate thumbnail * for. */ public static boolean isImageThumbnailSupported(AbstractFile file) { return isMediaThumbnailSupported(file, "image/", SUPPORTED_IMAGE_MIME_TYPES, SUPPORTED_IMAGE_EXTENSIONS) || hasImageFileHeader(file);//NON-NLS } /** * Checks the MIME type and/or extension of a file to determine whether it * is a GIF. * * @param file the AbstractFile to test * * @return true if the file is a gif */ public static boolean isGIF(AbstractFile file) { return isMediaThumbnailSupported(file, null, GIF_MIME_SET, GIF_EXTENSION_LIST); } /** * Check if making a thumbnail for the given file is supported by checking * its extension and/or MIME type against the supplied collections. * * //TODO: this should move to a better place. Should ImageUtils and * VideoUtils both implement/extend some base interface/abstract class. That * would be the natural place to put this. * * @param file the AbstractFile to test * @param mimeTypePrefix a MIME 'top-level type name' such as "image/", * including the "/". In addition to the list of * supported MIME types, any type that starts with * this prefix will be regarded as supported * @param supportedMimeTypes a collection of mimetypes that are supported * @param supportedExtension a collection of extensions that are supported * * @return true if a thumbnail can be generated for the given file based on * the given MIME type prefix and lists of supported MIME types and * extensions */ static boolean isMediaThumbnailSupported(AbstractFile file, String mimeTypePrefix, final Collection<String> supportedMimeTypes, final List<String> supportedExtension) { if (false == file.isFile() || file.getSize() <= 0) { return false; } String extension = file.getNameExtension(); if (StringUtils.isNotBlank(extension) && supportedExtension.contains(extension)) { return true; } else { try { String mimeType = getFileTypeDetector().detect(file); if (StringUtils.isNotBlank(mimeTypePrefix) && mimeType.startsWith(mimeTypePrefix)) { return true; } return supportedMimeTypes.contains(mimeType); } catch (FileTypeDetectorInitException | TskCoreException ex) { LOGGER.log(Level.SEVERE, "Error determining MIME type of " + getContentPathSafe(file), ex);//NON-NLS return false; } } } /** * //TODO: AUT-2057 this FileTypeDetector needs to be recreated when the * user adds new user defined file types. * * get a FileTypeDetector * * @return a FileTypeDetector * * @throws FileTypeDetectorInitException if initializing the * FileTypeDetector failed. */ synchronized private static FileTypeDetector getFileTypeDetector() throws FileTypeDetector.FileTypeDetectorInitException { if (fileTypeDetector == null) { fileTypeDetector = new FileTypeDetector(); } return fileTypeDetector; } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content the content to generate a thumbnail for * @param iconSize the size (one side of a square) in pixels to generate * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. */ public static BufferedImage getThumbnail(Content content, int iconSize) { if (content instanceof AbstractFile) { AbstractFile file = (AbstractFile) content; Task<javafx.scene.image.Image> thumbnailTask = newGetThumbnailTask(file, iconSize, true); thumbnailTask.run(); try { return SwingFXUtils.fromFXImage(thumbnailTask.get(), null); } catch (InterruptedException | ExecutionException ex) { LOGGER.log(Level.WARNING, "Failed to get thumbnail for {0}: " + ex.toString(), getContentPathSafe(content)); //NON-NLS return DEFAULT_THUMBNAIL; } } else { return DEFAULT_THUMBNAIL; } } /** * * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content the content to generate a thumbnail for * @param iconSize the size (one side of a square) in pixels to generate * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. */ @Nullable public static File getCachedThumbnailFile(Content content, int iconSize) { getThumbnail(content, iconSize); return getCachedThumbnailLocation(content.getId()); } /** * Get the location of the cached thumbnail for a file with the given fileID * as a java {@link File}. The returned File may not exist on disk yet. * * @param fileID the fileID to get the cached thumbnail location for * * @return a File object representing the location of the cached thumbnail. * This file may not actually exist(yet). Returns null if there was * any problem getting the file, such as no case was open. */ private static File getCachedThumbnailLocation(long fileID) { try { String cacheDirectory = Case.getCurrentCase().getCacheDirectory(); return Paths.get(cacheDirectory, "thumbnails", fileID + ".png").toFile(); //NON-NLS } catch (IllegalStateException e) { LOGGER.log(Level.WARNING, "Could not get cached thumbnail location. No case is open."); //NON-NLS return null; } } /** * Do a direct check to see if the given file has an image file header. * NOTE: Currently only jpeg and png are supported. * * @param file the AbstractFile to check * * @return true if the given file has one of the supported image headers. */ public static boolean hasImageFileHeader(AbstractFile file) { return isJpegFileHeader(file) || isPngFileHeader(file); } /** * Check if the given file is a jpeg based on header. * * @param file the AbstractFile to check * * @return true if jpeg file, false otherwise */ public static boolean isJpegFileHeader(AbstractFile file) { if (file.getSize() < 100) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 2); /* * Check for the JPEG header. Since Java bytes are signed, we cast * them to an int first. */ return (((fileHeaderBuffer[0] & 0xff) == 0xff) && ((fileHeaderBuffer[1] & 0xff) == 0xd8)); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not a JPEG return false; } } /** * Check if the given file is a JFIF based on header, but has a leading End * Of Image marker (0xFFD9) * * @param file the AbstractFile to check * * @return true if JFIF file, false otherwise */ public static boolean isJfifFileHeaderWithLeadingEOIMarker(AbstractFile file) { if (file.getSize() < 100) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 4); // Check for the JFIF header with leading EOI marker: 0xFFD9 followed by SOI marker: 0xFFD8 return (fileHeaderBuffer[0] == (byte) 0xFF && fileHeaderBuffer[1] == (byte) 0xD9 && fileHeaderBuffer[2] == (byte) 0xFF && fileHeaderBuffer[3] == (byte) 0xD8); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not a JPEG return false; } } /** * Check if the given file is a png based on header. * * @param file the AbstractFile to check * * @return true if png file, false otherwise */ public static boolean isPngFileHeader(AbstractFile file) { if (file.getSize() < 10) { return false; } try { byte[] fileHeaderBuffer = readHeader(file, 8); /* * Check for the png header. Since Java bytes are signed, we cast * them to an int first. */ return (((fileHeaderBuffer[1] & 0xff) == 0x50) && ((fileHeaderBuffer[2] & 0xff) == 0x4E) && ((fileHeaderBuffer[3] & 0xff) == 0x47) && ((fileHeaderBuffer[4] & 0xff) == 0x0D) && ((fileHeaderBuffer[5] & 0xff) == 0x0A) && ((fileHeaderBuffer[6] & 0xff) == 0x1A) && ((fileHeaderBuffer[7] & 0xff) == 0x0A)); } catch (TskCoreException ex) { //ignore if can't read the first few bytes, not an png return false; } } private static byte[] readHeader(AbstractFile file, int buffLength) throws TskCoreException { byte[] fileHeaderBuffer = new byte[buffLength]; int bytesRead = file.read(fileHeaderBuffer, 0, buffLength); if (bytesRead != buffLength) { //ignore if can't read the first few bytes, not an image throw new TskCoreException("Could not read " + buffLength + " bytes from " + file.getName());//NON-NLS } return fileHeaderBuffer; } /** * Get the width of the given image, in pixels. * * @param file * * @return the width in pixels * * @throws IOException If the file is not a supported image or the width * could not be determined. */ static public int getImageWidth(AbstractFile file) throws IOException { return getImageProperty(file, "ImageIO could not determine width of {0}: ", //NON-NLS imageReader -> imageReader.getWidth(0) ); } /** * Get the height of the given image,in pixels. * * @param file * * @return the height in pixels * * @throws IOException If the file is not a supported image or the height * could not be determined. */ static public int getImageHeight(AbstractFile file) throws IOException { return getImageProperty(file, "ImageIO could not determine height of {0}: ", //NON-NLS imageReader -> imageReader.getHeight(0) ); } /** * Functional interface for methods that extract a property out of an * ImageReader. Initially created to abstract over * {@link #getImageHeight(org.sleuthkit.datamodel.AbstractFile)} and * {@link #getImageWidth(org.sleuthkit.datamodel.AbstractFile)} * * @param <T> The type of the property. */ @FunctionalInterface private static interface PropertyExtractor<T> { public T extract(ImageReader reader) throws IOException; } /** * Private template method designed to be used as the implementation of * public methods that pull particular (usually meta-)data out of a image * file. * * @param <T> the type of the property to be retrieved. * @param file the file to extract the data from * @param errorTemplate a message template used to log errors. Should * take one parameter: the file's unique path or * name. * @param propertyExtractor an implementation of {@link PropertyExtractor} * used to retrieve the specific property. * * @return the the value of the property extracted by the given * propertyExtractor * * @throws IOException if there was a problem reading the property from the * file. * * @see PropertyExtractor * @see #getImageHeight(org.sleuthkit.datamodel.AbstractFile) */ private static <T> T getImageProperty(AbstractFile file, final String errorTemplate, PropertyExtractor<T> propertyExtractor) throws IOException { try (InputStream inputStream = new BufferedInputStream(new ReadContentInputStream(file));) { try (ImageInputStream input = ImageIO.createImageInputStream(inputStream)) { if (input == null) { IIOException iioException = new IIOException("Could not create ImageInputStream."); LOGGER.log(Level.WARNING, errorTemplate + iioException.toString(), getContentPathSafe(file)); throw iioException; } Iterator<ImageReader> readers = ImageIO.getImageReaders(input); if (readers.hasNext()) { ImageReader reader = readers.next(); reader.setInput(input); try { return propertyExtractor.extract(reader); } catch (IOException ex) { LOGGER.log(Level.WARNING, errorTemplate + ex.toString(), getContentPathSafe(file)); throw ex; } finally { reader.dispose(); } } else { IIOException iioException = new IIOException("No ImageReader found."); LOGGER.log(Level.WARNING, errorTemplate + iioException.toString(), getContentPathSafe(file)); throw iioException; } } } } /** * Create a new {@link Task} that will get a thumbnail for the given image * of the specified size. If a cached thumbnail is available it will be * returned as the result of the task, otherwise a new thumbnail will be * created and cached. * * Note: the returned task is suitable for running in a background thread, * but is not started automatically. Clients are responsible for running the * task, monitoring its progress, and using its result. * * @param file The file to create a thumbnail for. * @param iconSize The size of the thumbnail. * @param defaultOnFailure Whether or not to default on failure. * * @return a new Task that returns a thumbnail as its result. */ public static Task<javafx.scene.image.Image> newGetThumbnailTask(AbstractFile file, int iconSize, boolean defaultOnFailure) { return new GetThumbnailTask(file, iconSize, defaultOnFailure); } /** * A Task that gets cached thumbnails and makes new ones as needed. */ static private class GetThumbnailTask extends ReadImageTaskBase { private static final String FAILED_TO_READ_IMAGE_FOR_THUMBNAIL_GENERATION = "Failed to read {0} for thumbnail generation."; //NON-NLS private final int iconSize; private final File cacheFile; private final boolean defaultOnFailure; @NbBundle.Messages({"# {0} - file name", "GetOrGenerateThumbnailTask.loadingThumbnailFor=Loading thumbnail for {0}", "# {0} - file name", "GetOrGenerateThumbnailTask.generatingPreviewFor=Generating preview for {0}"}) private GetThumbnailTask(AbstractFile file, int iconSize, boolean defaultOnFailure) { super(file); updateMessage(Bundle.GetOrGenerateThumbnailTask_loadingThumbnailFor(file.getName())); this.iconSize = iconSize; this.defaultOnFailure = defaultOnFailure; this.cacheFile = getCachedThumbnailLocation(file.getId()); } @Override protected javafx.scene.image.Image call() throws Exception { if (isGIF(file)) { return readImage(); } if (isCancelled()) { return null; } // If a thumbnail file is already saved locally, just read that. if (cacheFile != null && cacheFile.exists()) { try { BufferedImage cachedThumbnail = ImageIO.read(cacheFile); if (nonNull(cachedThumbnail) && cachedThumbnail.getWidth() == iconSize) { return SwingFXUtils.toFXImage(cachedThumbnail, null); } } catch (Exception ex) { LOGGER.log(Level.WARNING, "ImageIO had a problem reading the cached thumbnail for {0}: " + ex.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS cacheFile.delete(); //since we can't read the file we might as well delete it. } } if (isCancelled()) { return null; } //There was no correctly-sized cached thumbnail so make one. BufferedImage thumbnail = null; if (VideoUtils.isVideoThumbnailSupported(file)) { if (openCVLoaded) { updateMessage(Bundle.GetOrGenerateThumbnailTask_generatingPreviewFor(file.getName())); thumbnail = VideoUtils.generateVideoThumbnail(file, iconSize); } if (null == thumbnail) { if (defaultOnFailure) { thumbnail = DEFAULT_THUMBNAIL; } else { throw new IIOException("Failed to generate a thumbnail for " + getContentPathSafe(file));//NON-NLS } } } else { //read the image into a buffered image. //TODO: I don't like this, we just converted it from BufferedIamge to fx Image -jm BufferedImage bufferedImage = SwingFXUtils.fromFXImage(readImage(), null); if (null == bufferedImage) { String msg = MessageFormat.format(FAILED_TO_READ_IMAGE_FOR_THUMBNAIL_GENERATION, getContentPathSafe(file)); LOGGER.log(Level.WARNING, msg); throw new IIOException(msg); } updateProgress(-1, 1); //resize, or if that fails, crop it try { thumbnail = ScalrWrapper.resizeFast(bufferedImage, iconSize); } catch (IllegalArgumentException | OutOfMemoryError e) { // if resizing does not work due to extreme aspect ratio or oom, crop the image instead. LOGGER.log(Level.WARNING, "Cropping {0}, because it could not be scaled: " + e.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS final int height = bufferedImage.getHeight(); final int width = bufferedImage.getWidth(); if (iconSize < height || iconSize < width) { final int cropHeight = Math.min(iconSize, height); final int cropWidth = Math.min(iconSize, width); try { thumbnail = ScalrWrapper.cropImage(bufferedImage, cropWidth, cropHeight); } catch (Exception cropException) { LOGGER.log(Level.WARNING, "Could not crop {0}: " + cropException.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } } } catch (Exception e) { LOGGER.log(Level.WARNING, "Could not scale {0}: " + e.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS throw e; } } if (isCancelled()) { return null; } updateProgress(-1, 1); //if we got a valid thumbnail save it if ((cacheFile != null) && thumbnail != null && DEFAULT_THUMBNAIL != thumbnail) { saveThumbnail(thumbnail); } return SwingFXUtils.toFXImage(thumbnail, null); } /** * submit the thumbnail saving to another background thread. * * @param thumbnail */ private void saveThumbnail(BufferedImage thumbnail) { imageSaver.execute(() -> { try { Files.createParentDirs(cacheFile); if (cacheFile.exists()) { cacheFile.delete(); } ImageIO.write(thumbnail, FORMAT, cacheFile); } catch (IllegalArgumentException | IOException ex) { LOGGER.log(Level.WARNING, "Could not write thumbnail for {0}: " + ex.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } }); } } /** * Create a new {@link Task} that will read the file into memory as an * {@link javafx.scene.image.Image} * * Note: the returned task is suitable for running in a background thread, * but is not started automatically. Clients are responsible for running the * task, monitoring its progress, and using its result(including testing for * null). * * @param file the file to read as an Image * * @return a new Task that returns an Image as its result */ public static Task<javafx.scene.image.Image> newReadImageTask(AbstractFile file) { return new ReadImageTask(file); } /** * A task that reads the content of a AbstractFile as a javafx Image. */ @NbBundle.Messages({ "# {0} - file name", "ReadImageTask.mesageText=Reading image: {0}"}) static private class ReadImageTask extends ReadImageTaskBase { ReadImageTask(AbstractFile file) { super(file); updateMessage(Bundle.ReadImageTask_mesageText(file.getName())); } @Override protected javafx.scene.image.Image call() throws Exception { return readImage(); } } /** * Base class for tasks that need to read AbstractFiles as Images. */ static private abstract class ReadImageTaskBase extends Task<javafx.scene.image.Image> implements IIOReadProgressListener { private static final String IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT = "ImageIO could not read {0}. It may be unsupported or corrupt"; //NON-NLS final AbstractFile file; // private ImageReader reader; ReadImageTaskBase(AbstractFile file) { this.file = file; } protected javafx.scene.image.Image readImage() throws IOException { if (ImageUtils.isGIF(file)) { //use JavaFX to directly read GIF to preserve potential animation javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(new ReadContentInputStream(file))); if (image.isError() == false) { return image; } } else if (file.getNameExtension().equalsIgnoreCase("tec")) { //NON-NLS int offset = 0; if (isJfifFileHeaderWithLeadingEOIMarker(file)) { offset = 2; } else if (isJpegFileHeader(file)) { offset = 0; } //use JavaFX to directly read .tec files, skipping any leading EOI markers javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(new ReadContentInputStream(file, offset))); if (image.isError() == false) { return image; } } //fall through to default image reading code if there was an error if (isCancelled()) { return null; } return getImageProperty(file, "ImageIO could not read {0}: ", imageReader -> { imageReader.addIIOReadProgressListener(ReadImageTaskBase.this); /* * This is the important part, get or create a * ImageReadParam, create a destination image to hold * the decoded result, then pass that image with the * param. */ ImageReadParam param = imageReader.getDefaultReadParam(); BufferedImage bufferedImage = imageReader.getImageTypes(0).next().createBufferedImage(imageReader.getWidth(0), imageReader.getHeight(0)); param.setDestination(bufferedImage); try { bufferedImage = imageReader.read(0, param); //should always be same bufferedImage object } catch (IOException iOException) { LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + iOException.toString(), ImageUtils.getContentPathSafe(file)); //NON-NLS } finally { imageReader.removeIIOReadProgressListener(ReadImageTaskBase.this); } if (isCancelled()) { return null; } return SwingFXUtils.toFXImage(bufferedImage, null); } ); } @Override public void imageProgress(ImageReader reader, float percentageDone) { //update this task with the progress reported by ImageReader.read updateProgress(percentageDone, 100); if (isCancelled()) { reader.removeIIOReadProgressListener(this); reader.abort(); reader.dispose(); } } @Override protected void succeeded() { super.succeeded(); try { javafx.scene.image.Image fxImage = get(); if (fxImage == null) { LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT, ImageUtils.getContentPathSafe(file)); } else if (fxImage.isError()) { //if there was somekind of error, log it LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + ObjectUtils.toString(fxImage.getException()), ImageUtils.getContentPathSafe(file)); } } catch (InterruptedException | ExecutionException ex) { failed(); } } @Override protected void failed() { super.failed(); LOGGER.log(Level.WARNING, IMAGEIO_COULD_NOT_READ_UNSUPPORTED_OR_CORRUPT + ": " + ObjectUtils.toString(getException()), ImageUtils.getContentPathSafe(file)); } @Override public void imageComplete(ImageReader source) { updateProgress(100, 100); } @Override public void imageStarted(ImageReader source, int imageIndex) { } @Override public void sequenceStarted(ImageReader source, int minIndex) { } @Override public void sequenceComplete(ImageReader source) { } @Override public void thumbnailStarted(ImageReader source, int imageIndex, int thumbnailIndex) { } @Override public void thumbnailProgress(ImageReader source, float percentageDone) { } @Override public void thumbnailComplete(ImageReader source) { } @Override public void readAborted(ImageReader source) { } } /** * Get the unique path for the content, or if that fails, just return the * name. * * @param content * * @return the unique path for the content, or if that fails, just the name. */ static String getContentPathSafe(Content content) { try { return content.getUniquePath(); } catch (TskCoreException tskCoreException) { String contentName = content.getName(); LOGGER.log(Level.SEVERE, "Failed to get unique path for " + contentName, tskCoreException); //NON-NLS return contentName; } } /** * Get the default thumbnail, which is the icon for a file. Used when we can * not generate content based thumbnail. * * @return * * @deprecated use {@link #getDefaultThumbnail() } instead. */ @Deprecated public static Image getDefaultIcon() { return getDefaultThumbnail(); } /** * Get a file object for where the cached icon should exist. The returned * file may not exist. * * @param id * * @return * * @deprecated use {@link #getCachedThumbnailLocation(long) } instead */ @Deprecated public static File getFile(long id) { return getCachedThumbnailLocation(id); } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content * @param iconSize * * @return a thumbnail for the given image or a default one if there was a * problem making a thumbnail. * * @deprecated use {@link #getThumbnail(org.sleuthkit.datamodel.Content, int) * } instead. */ @Nonnull @Deprecated public static BufferedImage getIcon(Content content, int iconSize) { return getThumbnail(content, iconSize); } /** * Get a thumbnail of a specified size for the given image. Generates the * thumbnail if it is not already cached. * * @param content * @param iconSize * * @return File object for cached image. Is guaranteed to exist, as long as * there was not an error generating or saving the thumbnail. * * @deprecated use {@link #getCachedThumbnailFile(org.sleuthkit.datamodel.Content, int) * } instead. * */ @Nullable @Deprecated public static File getIconFile(Content content, int iconSize) { return getCachedThumbnailFile(content, iconSize); } }
Use seek() instead of new constructor
Core/src/org/sleuthkit/autopsy/coreutils/ImageUtils.java
Use seek() instead of new constructor
<ide><path>ore/src/org/sleuthkit/autopsy/coreutils/ImageUtils.java <ide> return image; <ide> } <ide> } else if (file.getNameExtension().equalsIgnoreCase("tec")) { //NON-NLS <del> int offset = 0; <add> ReadContentInputStream readContentInputStream = new ReadContentInputStream(file); <ide> if (isJfifFileHeaderWithLeadingEOIMarker(file)) { <del> offset = 2; <del> } else if (isJpegFileHeader(file)) { <del> offset = 0; <del> } <del> //use JavaFX to directly read .tec files, skipping any leading EOI markers <del> javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(new ReadContentInputStream(file, offset))); <add> readContentInputStream.seek(2); // Skip any leading EOI markers <add> } <add> //use JavaFX to directly read .tec files <add> javafx.scene.image.Image image = new javafx.scene.image.Image(new BufferedInputStream(readContentInputStream)); <ide> if (image.isError() == false) { <ide> return image; <ide> }
Java
apache-2.0
69187d2127b67016ef128f372aa30f4375911768
0
YouCruit/onfido-api
package com.youcruit.onfido.api; import static java.util.Arrays.asList; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; import com.youcruit.onfido.api.http.FakeHttpClient; import com.youcruit.onfido.api.http.OnfidoHttpClient; import com.youcruit.onfido.api.webhook.Event; import com.youcruit.onfido.api.webhook.SignatureMismatchException; import com.youcruit.onfido.api.webhook.WebhookClient; public class WebhookTest extends HttpIT { private OnfidoHttpClient client; private WebhookClient webhookClient; private String onfidoWebhookHost; public WebhookTest(final Class<OnfidoHttpClient> httpClientClass) { super(httpClientClass); } @Before public void setWebhookHost() { onfidoWebhookHost = getPropEnv("ONFIDO_WEBHOOK_URI"); if (onfidoWebhookHost == null) { client = new FakeHttpClient(); onfidoWebhookHost = "https://example.com/foo/"; } else { Logger.getLogger(getClass()).warn("ONFIDO_WEBHOOK_HOST is defined, so not using FakeClient"); client = createClient(); } webhookClient = new WebhookClient(client); } @Test public void createWebhook() throws IOException { if (client instanceof FakeHttpClient) { FakeHttpClient fakeClient = (FakeHttpClient) this.client; fakeClient.addResponse(URI.create("https://api.onfido.com/v1/webhooks"), "{\"id\":\"b6c6c111-ffbb-491d-bc03-0a7bae829a53\",\"url\":\"https://example.com/foo/\",\"enabled\":true,\"token\":\"o4TfmcJ-lwEHLmJJDN9LvyIObJlkiM5l\",\"href\":\"/v1/webhooks/b6c6c111-ffbb-491d-bc03-0a7bae829a53\",\"events\":[\"report completion\",\"report withdrawal\",\"check in progress\",\"check completion\"]}"); } webhookClient.register(URI.create(onfidoWebhookHost), true, asList(Event.values())); } @Test public void validateWebhook() throws UnsupportedEncodingException, SignatureMismatchException { String token = "4zvPpGGpvcqKcgGOEC7y3Ju8v2WC0btO"; String signature1 = "130ff94a8d70e734c57b181968efe6ed32c387c6"; String payload1 = "{\"payload\":{\"resource_type\":\"check\",\"action\":\"in progress\",\"object\":{\"id\":\"c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5\",\"status\":\"in_progress\",\"completed_at\":\"2016-03-09 13:10:44 UTC\",\"href\":\"https://api.onfido.com/v1/applicants/95af7d0d-1c23-4c07-a8cd-8b66108decc6/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5\"}}}"; webhookClient.deserializeAndVerify(payload1.getBytes("UTF-8"), token, signature1); String signature2 = "fe9920536efcf6a5af4581498a39cedd0993be32"; String payload2 = "{\"payload\":{\"resource_type\":\"report\",\"action\":\"completed\",\"object\":{\"id\":\"412c7d24-3186-4cc0-b922-d608ee5c8ec3\",\"status\":\"complete\",\"completed_at\":\"2016-03-09 13:10:53 UTC\",\"href\":\"https://api.onfido.com/v1/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5/reports/412c7d24-3186-4cc0-b922-d608ee5c8ec3\"}}}"; webhookClient.deserializeAndVerify(payload2.getBytes("UTF-8"), token, signature2); String signature3 = "476a3cab6fe7e1ad2ee8e147c44e6a560b1b9e24"; String payload3 = "{\"payload\":{\"resource_type\":\"report\",\"action\":\"completed\",\"object\":{\"id\":\"412c7d24-3186-4cc0-b922-d608ee5c8ec3\",\"status\":\"complete\",\"occured_at\":\"2016-03-09T13:10:54+00:00\",\"href\":\"/v1/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5/reports/412c7d24-3186-4cc0-b922-d608ee5c8ec3\"}}}"; webhookClient.deserializeAndVerify(payload3.getBytes("UTF-8"), token, signature3); } }
onfido-test/src/test/java/com/youcruit/onfido/api/WebhookTest.java
package com.youcruit.onfido.api; import static java.util.Arrays.asList; import java.io.IOException; import java.net.URI; import org.apache.log4j.Logger; import org.junit.Before; import org.junit.Test; import com.youcruit.onfido.api.http.FakeHttpClient; import com.youcruit.onfido.api.http.OnfidoHttpClient; import com.youcruit.onfido.api.webhook.Event; import com.youcruit.onfido.api.webhook.WebhookClient; public class WebhookTest extends HttpIT { private OnfidoHttpClient client; private WebhookClient webhookClient; private String onfidoWebhookHost; public WebhookTest(final Class<OnfidoHttpClient> httpClientClass) { super(httpClientClass); } @Before public void setWebhookHost() { onfidoWebhookHost = getPropEnv("ONFIDO_WEBHOOK_URI"); if (onfidoWebhookHost == null) { client = new FakeHttpClient(); onfidoWebhookHost = "https://example.com/foo/"; } else { Logger.getLogger(getClass()).warn("ONFIDO_WEBHOOK_HOST is defined, so not using FakeClient"); client = createClient(); } webhookClient = new WebhookClient(client); } @Test public void createWebhook() throws IOException { if (client instanceof FakeHttpClient) { FakeHttpClient fakeClient = (FakeHttpClient) this.client; fakeClient.addResponse(URI.create("https://api.onfido.com/v1/webhooks"), "{\"id\":\"b6c6c111-ffbb-491d-bc03-0a7bae829a53\",\"url\":\"https://example.com/foo/\",\"enabled\":true,\"token\":\"o4TfmcJ-lwEHLmJJDN9LvyIObJlkiM5l\",\"href\":\"/v1/webhooks/b6c6c111-ffbb-491d-bc03-0a7bae829a53\",\"events\":[\"report completion\",\"report withdrawal\",\"check in progress\",\"check completion\"]}"); } webhookClient.register(URI.create(onfidoWebhookHost), true, asList(Event.values())); } }
Add testcase for webhook callbacks
onfido-test/src/test/java/com/youcruit/onfido/api/WebhookTest.java
Add testcase for webhook callbacks
<ide><path>nfido-test/src/test/java/com/youcruit/onfido/api/WebhookTest.java <ide> import static java.util.Arrays.asList; <ide> <ide> import java.io.IOException; <add>import java.io.UnsupportedEncodingException; <ide> import java.net.URI; <ide> <ide> import org.apache.log4j.Logger; <ide> import com.youcruit.onfido.api.http.FakeHttpClient; <ide> import com.youcruit.onfido.api.http.OnfidoHttpClient; <ide> import com.youcruit.onfido.api.webhook.Event; <add>import com.youcruit.onfido.api.webhook.SignatureMismatchException; <ide> import com.youcruit.onfido.api.webhook.WebhookClient; <ide> <ide> public class WebhookTest extends HttpIT { <ide> } <ide> webhookClient.register(URI.create(onfidoWebhookHost), true, asList(Event.values())); <ide> } <add> <add> @Test <add> public void validateWebhook() throws UnsupportedEncodingException, SignatureMismatchException { <add> String token = "4zvPpGGpvcqKcgGOEC7y3Ju8v2WC0btO"; <add> <add> String signature1 = "130ff94a8d70e734c57b181968efe6ed32c387c6"; <add> String payload1 = "{\"payload\":{\"resource_type\":\"check\",\"action\":\"in progress\",\"object\":{\"id\":\"c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5\",\"status\":\"in_progress\",\"completed_at\":\"2016-03-09 13:10:44 UTC\",\"href\":\"https://api.onfido.com/v1/applicants/95af7d0d-1c23-4c07-a8cd-8b66108decc6/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5\"}}}"; <add> webhookClient.deserializeAndVerify(payload1.getBytes("UTF-8"), token, signature1); <add> <add> String signature2 = "fe9920536efcf6a5af4581498a39cedd0993be32"; <add> String payload2 = "{\"payload\":{\"resource_type\":\"report\",\"action\":\"completed\",\"object\":{\"id\":\"412c7d24-3186-4cc0-b922-d608ee5c8ec3\",\"status\":\"complete\",\"completed_at\":\"2016-03-09 13:10:53 UTC\",\"href\":\"https://api.onfido.com/v1/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5/reports/412c7d24-3186-4cc0-b922-d608ee5c8ec3\"}}}"; <add> webhookClient.deserializeAndVerify(payload2.getBytes("UTF-8"), token, signature2); <add> <add> String signature3 = "476a3cab6fe7e1ad2ee8e147c44e6a560b1b9e24"; <add> String payload3 = "{\"payload\":{\"resource_type\":\"report\",\"action\":\"completed\",\"object\":{\"id\":\"412c7d24-3186-4cc0-b922-d608ee5c8ec3\",\"status\":\"complete\",\"occured_at\":\"2016-03-09T13:10:54+00:00\",\"href\":\"/v1/checks/c86f46e1-a8ae-47a2-9d48-fd54da7fe3c5/reports/412c7d24-3186-4cc0-b922-d608ee5c8ec3\"}}}"; <add> webhookClient.deserializeAndVerify(payload3.getBytes("UTF-8"), token, signature3); <add> } <ide> }
Java
mit
67fa3f95693682fb4cfbf500f80d18a9df064d51
0
SamuelKlein/xpresso,WantedTechnologies/xpresso
src/test/java/Cleanup.java
import com.wantedtech.common.xpresso.x; import com.wantedtech.common.xpresso.functional.Function; import com.wantedtech.common.xpresso.regex.Match; import com.wantedtech.common.xpresso.regex.Regex; import com.wantedtech.common.xpresso.types.HappyFile; import com.wantedtech.common.xpresso.types.dict; import com.wantedtech.common.xpresso.types.list; import com.wantedtech.common.xpresso.types.set; import com.wantedtech.common.xpresso.types.str; import com.wantedtech.common.xpresso.types.tuple.tuple; import com.wantedtech.common.xpresso.types.tuple.tuple2; public class Cleanup { String ref_file_dir = "/Users/andriy.burkov/p/workspace/python/InternationalTitleCleanup/ref"; dict<String> acceptedSpecialCharactersDict = x.dictOf( x.tupleOf("RU","йцукеёнгшщзфывапролдячсмитьъбюэжх«»€$"), x.tupleOf("FR","àâèéêëîïôœùûç«»€$"), x.tupleOf("DE","àâèéêéäöü߀$«»‹›„‚“‘”’–—"), x.tupleOf("NL","àèìòùáéíóúýäëïöüÿâêîôû«»€$„‚”’‹›“‘–—"), x.tupleOf("PT","àáâãéêíóôõúü纪«»€$"), x.tupleOf("ES","áéíóúñü¿¡ºª«»€$"), x.tupleOf("SE","åäö«»€$„‚”’‹›“‘–—"), x.tupleOf("TR","çğİıöüş«»€$„‚”’‹›“‘–—") ); list<tuple> analogsTupleList = x.listOf( x.tupleOf("«","\""), x.tupleOf("»","\""), x.tupleOf("‹","<"), x.tupleOf("›",">"), x.tupleOf("„","\""), x.tupleOf("‚",","), x.tupleOf("‘","\""), x.tupleOf("“","\""), x.tupleOf("”","\""), x.tupleOf("’","'"), x.tupleOf("–"," - "), x.tupleOf("—"," - ") ); set<String> countriesSet = x.setOf(); set<String> citiesSet = x.setOf(); set<String> unsupportedCitiesSet = x.setOf(); set<String> geoExceptionsSet = x.setOf(); set<String> cityPartsSet = x.setOf(); set<String> advertisersSet = x.setOf(); set<String> monthsSet = x.setOf(); set<String> stopWordsSet = x.setOf(); set<String> beginningWordsSet = x.setOf(); set<String> metroSet = x.setOf(); set<String> statesSet = x.setOf(); list<list<String>> periodExpressionsList = x.listOf(); dict<String> replaceRulesDict = x.dictOf(); Regex replaceRulesRegex; Regex beginningWordsRegex; Regex stopWordsRegex; Regex placesRegex; Regex placesInBeginningRegex; Regex advertisersRegex; Regex specialCharsRegex; Regex shortWordsRegex; Regex oneWordsRegex; Regex referenceNumberAndDatesRegex; Regex betweenParenthesesRegex; Regex fixRegex; Regex forRegex; String periodExpressionsRegexPattern; dict<set<String>> surroundingWordsDict = x.dictOf(); dict<Regex> surroundingWordsRegexDict = x.dictOf(); public static Function<Object, Object> fGeneralizeRegex = new Function<Object, Object>() { public String apply(Object string) { return generalizeRegex((String)string); } }; private static String generalizeRegex(String pattern){ Regex saintRegex = x.RegexNoCase("\\b(?:st|saint)e?\\b[\\\\\\.\\s-]{0,3}"); Regex wordsWithDashRegex = x.RegexNoCase("(\\w)\\\\?-(\\w)"); Regex wordsWithApostropheRegex = x.RegexNoCase("(\\w)\\\\?'(\\w)"); pattern = saintRegex.sub("(?:st|saint)e?[.\\s-]{0,3}",pattern); pattern = wordsWithDashRegex.sub("$1[\\s.-]$2",pattern); pattern = wordsWithApostropheRegex.sub("$1[\\s']$2",pattern); return pattern; } list<String> annotate(String strng,list<tuple> regexesTypesList, list<String> annotations){ for(tuple regex__regexType : regexesTypesList){ Regex regex = (Regex)regex__regexType.get(0); String regexType = (String)regex__regexType.get(1); for(Match match : regex.searchIter(x.stripAccents(strng))){ if(x.String(regexType).notEquals("sho")){ for(int i : x.count(match.start(0),match.end(0))){ annotations.setAt(i).value(regexType); } }else{ if( x.String("pla").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("pla").in(annotations.slice(match.end(0),match.end(0)+4)) || x.String("sur").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("sur").in(annotations.slice(match.end(0),match.end(0)+4)) || x.String("one").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("one").in(annotations.slice(match.end(0),match.end(0)+4)) || x.String("ref").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("ref").in(annotations.slice(match.end(0),match.end(0)+4)) || x.String("sto").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("sto").in(annotations.slice(match.end(0),match.end(0)+4)) || x.String("adv").in(annotations.slice(match.start(0)-4,match.start(0))) || x.String("adv").in(annotations.slice(match.end(0),match.end(0)+4)) ){ for(int i : x.count(match.start(0),match.end(0))){ annotations.setAt(i).value(regexType); } } } } } return annotations; } list<String> annotate(String strng,list<tuple> regexesTypesList){ list<String> annotations = x.listOf("not").times(x.len(strng)); return annotate(strng,regexesTypesList,annotations); } list<String> annotate(str strng,list<tuple> regexesTypesList, list<String> annotations){ return annotate(strng.toString(),regexesTypesList,annotations); } list<String> annotate(str strng,list<tuple> regexesTypesList){ list<String> annotations = x.listOf("not").times(x.len(strng)); return annotate(strng,regexesTypesList,annotations); } public float get_annotation_score(list<String> annotation){ return (float)(annotation.count("not") + annotation.count("sho") + annotation.count("sto"))/(float)x.len(annotation); } //mark as 'pla' every word that sticks to a 'pla' (with a '-', or '/') public void contaminate(list<String> annotationsList,int idx, String strng){ contaminate(annotationsList,idx, strng,"pla",x.<String>listOf(),x.str(" ")); } public void contaminate(list<String> annotations_lst,int idx, String strng,list<String> nonContaminableLabelsList){ contaminate(annotations_lst,idx, strng,"pla",nonContaminableLabelsList,x.str(" ")); } public void contaminate(list<String> annotationsLst,int idx,String strng,String contaminationLabel,list<String> nonContaminableLabelsList,str boundaryCharactersList){ annotationsLst.setAt(idx).value(contaminationLabel); if( ( idx-1 >= 0 && x.String(annotationsLst.get(idx-1)).notEquals(contaminationLabel) && x.String(x.String(strng).get(idx-1)).notIn(boundaryCharactersList) && x.String(annotationsLst.get(idx-1)).notIn(nonContaminableLabelsList) ) ){ contaminate(annotationsLst,idx-1,strng,contaminationLabel,nonContaminableLabelsList,boundaryCharactersList); } if(idx+1 < x.len(annotationsLst) && x.String(annotationsLst.get(idx+1)).notEquals(contaminationLabel) && x.String(x.String(strng).get(idx+1)).notIn(boundaryCharactersList) && x.String(annotationsLst.get(idx+1)).notIn(nonContaminableLabelsList)){ contaminate(annotationsLst,idx+1,strng,contaminationLabel,nonContaminableLabelsList,boundaryCharactersList); } } public String cleanTitle(String title) throws Exception{ return cleanTitle(title,"FR"); } public String cleanTitle(String title,String language) throws Exception{ str originalTitle = x.str(title); str currentTitle = x.str(title); list<tuple> annotationRegexList; annotationRegexList = x.listOf( x.tupleOf(advertisersRegex,"adv"), x.tupleOf(placesRegex,"pla"), x.tupleOf(x.dict(surroundingWordsRegexDict).get(language),"sur"), x.tupleOf(oneWordsRegex,"one"), x.tupleOf(referenceNumberAndDatesRegex,"ref"), x.tupleOf(stopWordsRegex,"sto"), x.tupleOf(specialCharsRegex,"spe"), x.tupleOf(shortWordsRegex,"sho") ); //replace analog chars currentTitle = currentTitle.translated(analogsTupleList); String accepted_special_characters = ""; if(x.String(language).in(acceptedSpecialCharactersDict)){ accepted_special_characters = acceptedSpecialCharactersDict.get(language); } if(x.String(accepted_special_characters).notEquals("")){ //remove all non-accepted characters Regex nonAcceptedCharsRegex = x.RegexNoCase("[^a-zA-Z0-9"+accepted_special_characters+",.;!|~?\\s&/()':+\\#-]"); currentTitle = x.str(nonAcceptedCharsRegex.sub(" ",currentTitle)); } boolean all_caps = false; if(currentTitle.equals(currentTitle.upper())){ all_caps = true; } currentTitle = replaceRulesRegex.sub(currentTitle); Regex multipleSlashesRegex = x.Regex("/+"); currentTitle = multipleSlashesRegex.sub("/",currentTitle); //things like this ")(blahblah" becomes like this ") (blahblah": Regex twoParenthesesThatStick = x.Regex("([\\w\\))])([\\((][^))]{6,}[\\))])"); currentTitle = x.str(twoParenthesesThatStick.sub("$1 $2",currentTitle)); //remove (h/f) Regex hfRegex = x.RegexNoCase("\\(\\s?\\b\\w\\s?[\\\\/-]\\s?\\w\\b\\s?\\)|\\b\\w\\s?[\\\\/]\\s?\\w\\b[\\s-]|\\b\\w\\s?[\\\\/-]\\s?\\w\\b\\s|\\b\\w\\s?[\\\\/-]\\s?\\w\\b$"); currentTitle = hfRegex.sub(" ",currentTitle); currentTitle = currentTitle.strip().strip(",").strip().strip(","); currentTitle = betweenParenthesesRegex.sub(" ",currentTitle); currentTitle = currentTitle.strip(); currentTitle = placesInBeginningRegex.sub(" ",currentTitle); //things like this " - " become like this " - " Regex longDashRegex = x.RegexNoCase("[\\s-]*\\s-\\s[\\s-]*"); currentTitle = longDashRegex.sub(" - ",currentTitle); currentTitle = currentTitle.strip(); //split by the " - ", "~" and "|" and keep the left part only (if the remaining title is still long enough) Regex specialCharacterSplitter = x.Regex(" - |~|\\|"); Regex wordChars = x.Regex("[\\w'-]+"); list<str> long_dash_split = specialCharacterSplitter.split(currentTitle); while(x.len(long_dash_split) > 1){ //the remaining part (has more dashes, or it's longer than 15 chars, or it contains 2 or more words) and it's at least a half of the title and contains more than 65% of non-special chars if ( ( x.len(long_dash_split.sliceTo(-1)) > 1 || x.len(x.str(" - ").join(long_dash_split.sliceTo(-1))) > 15 || x.len(x.list(wordChars.searchAll(x.str(" - ").join(long_dash_split.sliceTo(-1))))) > 1 ) && x.len(long_dash_split.get(-1)) <= x.len(currentTitle)/2 && get_annotation_score(annotate(x.str(" - ").join(long_dash_split.sliceTo(-1)),annotationRegexList.append(x.tupleOf(fixRegex,"not")))) > 0.65 ){ currentTitle = x.str(x.str(" - ").join(long_dash_split.sliceTo(-1))); long_dash_split = x.str(currentTitle).split(" - "); }else{ break; } } //if there's more than two "/" in the title then split by the third "/" and keep the left part only (if the remaining title is still long enough) //an exception is made if the first word right part looks like a part of an acronym, so we try to split by "/" after it list<str> slash_split = currentTitle.split("/"); Regex wordInTheBeginningRegex = x.Regex("\\s*[A-Z0-9]{1,5}\\b|^\\s*\\w{1,2}\\b"); Regex periodExpressionsRegex = x.RegexNoCase("^\\s*("+periodExpressionsRegexPattern+")"); if( x.len(slash_split) > 2 && ( all_caps && ( wordInTheBeginningRegex.search(slash_split.get(2)) != null && periodExpressionsRegex.search(slash_split.get(2)) != null ) ) ){ currentTitle = x.str("/").join(slash_split.sliceTo(2)); }else if( x.len(slash_split) > 3 && wordInTheBeginningRegex.search(slash_split.get(3)) != null && periodExpressionsRegex.search(slash_split.get(3)) != null ){ currentTitle = x.str("/").join(slash_split.sliceTo(3)); } if(currentTitle.equals("")){ return ""; } list<String> annotations = annotate(currentTitle,annotationRegexList); //fix annotations for some special cases Regex regex = fixRegex; String regex_type = "not"; for(Match match : regex.searchIter(currentTitle)){ for(int i : x.count(match.start(0),match.end(0))){ annotations.setAt(i).value(regex_type); } } //mark as 'pla' everything between two 'pla' or between a 'sur' and a 'pla' int idx1 = 0; while(idx1 < x.len(annotations)){ if(annotations.get(idx1).equals("sur") || annotations.get(idx1).equals("pla")){ String contaminationTag = annotations.get(idx1); int idx2 = idx1+1; while(idx2 < x.len(annotations)){ if(annotations.get(idx2).equals("pla") || (annotations.get(idx2).equals("sur") && contaminationTag.equals("pla"))){ break; } idx2 += 1; } if(idx2 != x.len(annotations)){ for(int contamination_idx : x.count(idx1,idx2)){ annotations.setAt(contamination_idx).value(contaminationTag); } } idx1 = idx2; } idx1 += 1; } //mark as 'pla' everything to the right of a surrounding word or a 'pla', if at least half of the title is remaining and it's of a descent quality for(tuple2<Integer,String> idx__annotation : x.reversed(x.list(x.enumerate(annotations)))){ int idx = idx__annotation.value0; String annotation = idx__annotation.value1; if( x.String(annotation).in(x.listOf("sur","pla")) && ( idx+1 >= x.len(annotations) || x.String(annotations.get(idx+1)).notIn(x.listOf("pla","sur")) && ( (idx > x.len(annotations)/2 && get_annotation_score(annotations.sliceTo(idx)) > 0.5) || (idx > x.len(annotations)/3 && get_annotation_score(annotations.sliceTo(idx)) > 0.7) ) ) ){ annotations = annotations.sliceTo(idx+1).append("pla").times(x.len(annotations)-1-idx); } } //contamainate with place label list<String> old_annotations = annotations.copy(); for(tuple2<Integer,String> idx__annotation : x.enumerate(annotations)){ int idx = idx__annotation.value0; String annotation = idx__annotation.value1; if(annotation.equals("pla") && x.String(currentTitle.get(idx)).notEquals(" ")){ contaminate(annotations,idx,title); } } if(get_annotation_score(annotations) < .25){ annotations = old_annotations.copy(); } //contaminate with ref_num label for(tuple2<Integer,String> idx__annotation : x.enumerate(annotations)){ int idx = idx__annotation.value0; String annotation = idx__annotation.value1; if(annotation.equals("ref") && x.String(currentTitle.get(idx)).notEquals(" ")){ contaminate(annotations,idx,title,"ref",x.listOf("not"),x.str()); } } //fix for the left-most char annotation when it's a parenthesis but the interior of parentheses is "not" if(x.String(currentTitle.get(0)).in(x.listOf("(","(")) && annotations.get(1).equals("not")){ annotations.setAt(0).value("not"); } //remove places and reference numbers from anywhere str cleanTitle = x.str(""); list<String> cleanAnnotations = x.listOf(); for(tuple2<Integer,String> idx__label : x.enumerate(annotations)){ int idx = idx__label.value0; String label = idx__label.value1; if(x.String(label).notIn(x.listOf("pla","ref")) || currentTitle.get(idx).equals(" ")){ cleanAnnotations.append(label); cleanTitle=cleanTitle.plus(x.str(currentTitle.get(idx))); } } annotations = cleanAnnotations.copy(); currentTitle = cleanTitle.copy(); //remove non-'not' from the beginning and from the end int start = 0; while(x.String(annotations.get(start)).notEquals("not")){ start += 1; if(x.len(annotations) <= start){ break; } } int end = x.len(annotations)-1; //if 'pla' in annotations and annotations.index('pla') > len(annotations)/3: while(x.String(annotations.get(end)).notEquals("not")){ end -= 1; if(end < 0){ end = 0; break; } } currentTitle = currentTitle.slice(start,end+1); Regex multipleSpacesRegex = x.Regex("\\s+"); currentTitle = multipleSpacesRegex.sub(" ",currentTitle); Regex spaceCommaRegex = x.Regex("\\s*,+"); currentTitle = spaceCommaRegex.sub(",",currentTitle); Regex multiDotsRegex = x.Regex("\\.+"); currentTitle = multiDotsRegex.sub(".",currentTitle); Regex wordEndsByDashRegex = x.Regex("(\\w)-\\s"); currentTitle = wordEndsByDashRegex.sub("$1 ",currentTitle); Regex wordStartsByDashRegex = x.Regex("(?:\\s|^)-(\\w)"); currentTitle = wordStartsByDashRegex.sub(" $1",currentTitle); currentTitle = currentTitle.strip().strip(",").strip().strip(","); Regex stringEndsByDotsRegex = x.Regex("\\.*$"); currentTitle = stringEndsByDotsRegex.sub("",currentTitle); //if we cleaned the title, and it's so clean that nothing remains, we return the original title if(x.len(currentTitle) == 0){ currentTitle = originalTitle.strip(); } Regex slashRegex = x.Regex("\\s*/\\s*"); currentTitle = slashRegex.sub(" / ",currentTitle); return currentTitle.toString(); } public Cleanup() throws Exception{ unsupportedCitiesSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("zürich","bern","schwyz","wien","vienna","aarga")); countriesSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("switzerland","schweiz","suisse","britain","british","uk","united kingdom","ireland","scotland","scottish","irish","us","united states","america","american","australia","australian","canada","canadian","china","中国","chinese","germany","deutschlandweit","deutschland","dtl","dtl. weit.","bundesrepublik deutschland","deutschland","japan","日本","india","indian","france","italy","italia","mexico","mexican","netherland","holland","belgium","belgië","belgique","belgien","portugal","república portuguesa","argentina","república argentina","sweden","sverige","russia","russian federation","rf","turkey","arab emirates","united arab emirates","emirates","uae","brazil","brazilian","saudi arabia","benelux","netherlands","nederland","рф","снг","россия","российская федерация","türkiye","السعودية","الإمارات العربية المتحدة")); //these locations cant be removed as other cities even if such cities exist: geoExceptionsSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("sta","it","cns","lot","centrale","marché","glacière","plaisance","art","la","tri","écoles","nationale","poissonniers","poissonnière","car","sur","se","foreman","mason","secretary","mobile","lead","cv","home","house","java","driver","low","field","branch","english","golf","campus","portage","trainer","teller","commerce")); HappyFile f; for (String place : x.open(ref_file_dir+"/cities_en_and_original.txt","r","utf-8")){ citiesSet.absorb(x.set(x.<String>element().transformWith(x.chainOf(x.lower,x.stripAccents)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty))); } citiesSet = citiesSet.difference(geoExceptionsSet); f = x.open(ref_file_dir+"/common_city_parts.txt","r","utf-8"); cityPartsSet = x.set(x.<String>element().transformWith(x.strip).forElementIn(f)); set<String> addToSet; for (String place : x.open(ref_file_dir+"/states_en_and_original.txt","r","utf-8")){ addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty)); statesSet.absorb(addToSet); } statesSet = statesSet.difference(geoExceptionsSet); for(String place : x.open(ref_file_dir+"/metro_en_and_original.txt","r","utf-8")){ addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty)); metroSet.absorb(addToSet); } metroSet.reject(geoExceptionsSet); list<String> langList = x.listOf(); for(tuple2<Integer,String> idx__words : x.enumerate(x.open(ref_file_dir+"/surrounding_words.txt","r","utf-8"))){ int idx = idx__words.value0; String words = idx__words.value1; if(idx == 0){ langList = x.list(x.<String>element().transformWith(x.upper).forElementIn(x.String(words).split("\t"))); for (String lang : langList){ surroundingWordsDict.setAt(lang.toUpperCase()).value(x.<String>setOf()); } }else{ for (tuple2<Integer,String> idx_word : x.enumerate(x.String(words).split("\t"))){ idx = idx_word.value0; String word = idx_word.value1; if (x.String(word.trim()).notEquals("")){ String old_key = x.list(langList).get(idx); set<String> old_value = surroundingWordsDict.get(langList.get(idx)); set<String> new_value = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(","))); surroundingWordsDict.setAt(old_key).value(old_value.union(new_value)); } } } } for (String word : x.open(ref_file_dir+"/drop_in_the_beginning_words.txt","r","utf-8")){ addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); beginningWordsSet.absorb(addToSet); } for (String word : x.open(ref_file_dir+"/stop_words.txt","r","utf-8")){ addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); stopWordsSet.absorb(addToSet); } for (String word : x.open(ref_file_dir+"/months_of_the_year.txt","r","utf-8")){ addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); monthsSet.absorb(addToSet); } f = x.open(ref_file_dir+"/top_advertisers.txt","r","utf-8"); advertisersSet = x.set(x.<String>element().transformWith(x.chainOf(x.lower,x.stripAccents)).forElementIn(f)); periodExpressionsList = x.listOf(x.listOf("AR","شهر"),x.listOf("ZH","月"),x.listOf("NL","maand"),x.listOf("FR","mois"),x.listOf("DE","monat"),x.listOf("DE","mon"),x.listOf("DE","mtl"),x.listOf("IT","mese"),x.listOf("JA","月"),x.listOf("PT","mês"),x.listOf("RU","месяц"),x.listOf("RU","мес"),x.listOf("ES","mes"),x.listOf("SE","månad"),x.listOf("TR","ay")); periodExpressionsRegexPattern = x.String("|").join(x.set(x.element(1).forElementIn(periodExpressionsList))); for(String lang : surroundingWordsDict){ surroundingWordsRegexDict.setAt(lang).value(x.RegexNoCase("(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(surroundingWordsDict.get(lang), x.len, true))+")\\b(?:\\W|_)?")); } beginningWordsRegex = x.RegexNoCase("^.*\\b(?:"+x.str("|").join(x.sorted(beginningWordsSet,x.len,true))+")"); stopWordsRegex = x.RegexNoCase("\\d+\\sans(?: ou plus)?\\b|\\d+\\sye?a?rs?(?: old)?"+"|"+ "\\d+\\s?(h|ч)(?:ours?|eures?|асо?в?а?я?)?(?:\\swe?e?k|\\ssem(?:aine)?)?\\b"+"|"+ "(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(stopWordsSet,x.len,true))+")\\b(?:\\W|_)?|\\[[^\\]]+\\]|\\(\\w[/\\\\]\\w\\)"); placesRegex = x.RegexNoCase("(?:\\W|_)?\\b(?:" + x.String("|").join(x.list(x.<String>element().transformWith(x.chainOf(x.escape,fGeneralizeRegex)).forElementIn(x.sorted(x.union(citiesSet,statesSet,metroSet,countriesSet,unsupportedCitiesSet),x.len,true)))) + "|"+ x.str("|").join(cityPartsSet) + ")\\b(?:\\W|_)?"); placesInBeginningRegex = x.RegexNoCase("^(?:\\W|_)*\\b(?:" + x.str("|").join(x.list(x.<String>element().transformWith(x.chainOf(x.escape,fGeneralizeRegex)).forElementIn(x.sorted(x.union(citiesSet,statesSet,metroSet,countriesSet,unsupportedCitiesSet),x.len,true)))) + "|" + x.str("|").join(cityPartsSet) + ")(-\\w+)?\\b(?:\\W|_)?"); advertisersRegex = x.RegexNoCase("(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(advertisersSet,x.len,true))+")\\b(?:\\W|_)?(?:\\s?(?:group|groupe|express|bank|holdings?|company|corp|inc|corporation|incorporated|limited|& ?co)\\b){0,2}"); specialCharsRegex = x.Regex("(?:\\W|_)"); shortWordsRegex = x.RegexNoCase("(?<![/\\()])\\b[a-zA-Zа-яА-Я0-9']{1,2}\\b"); oneWordsRegex = x.RegexNoCase("\\b[a-zA-Zа-яА-Я6-9']{1,1}\\b(?!-)"); referenceNumberAndDatesRegex = x.RegexNoCase("(?<=[\\s)])[\\#№][^\\s]+\\b|"+"\\b(?:20\\d\\d\\b)?\\W*(?:\\b\\d{1,2}\\b)?\\W*(?:\\b20\\d\\d\\b)?\\W*(?:\\b\\d{1,2})?\\b("+x.str("|").join(monthsSet)+")\\b\\W*(?:\\d{1,2}\\b)?\\W*(?:20\\d\\d\\b)?\\W*(?:\\d{1,2}\\b)?\\W*(?:20\\d\\d\\b)?|"+"\\b\\d{1,2}\\.\\d{1,2}\\.\\d{2,4}\\b|\\b\\d{1,2}[\\\\/]\\d{1,2}(?:[\\\\/]\\d{2,4})?\\b|\\b([a-z]+[0-9]|[0-9]+[a-z])(?:[a-z0-9]{2,}|-[0-9][a-z0-9-]{2,})\\b"); betweenParenthesesRegex = x.RegexNoCase("\\s[((][^))]+[\\))]|^[((][^))]+[))][\\s,]|\\s-\\w+-\\s|\\s-\\w+-$|[((][^))]+\\s[^))]+[\\))]"); fixRegex = x.RegexNoCase("\\bjr\\b|\\bTI\\b|\\bSOLNA 164\\b|\\b\\d{1,3}\\s?%|\\bnationale?\\b|\\bblanche\\b|\\b[123](?:st|rd|nd)\\b|\\b[123][eè][rm]e\\b|\\bbras de\\b|\\b(?:dry|mini)[\\s-]van\\b|\\b[\\d.,-]+\\s?T?(?:EUR|CHF)\\b|\\bsharepoint\\b|\\bax\\b|\\bit\\b|\\.net\\b|\\bрп\\b|\\bофис-|\\b(?:d'|de\\sl'|des\\s)usines?\\b|\\brh\\b|\\bde secteur\\b|\\bd'[ée]tat\\b|\\bit\\b|\\bzone d'attraction\\b|\\bd'établissements?\\b|\\b(?:à la|de) direction\\b|\\w{3,}\\(-?\\w{1,4}\\)|\\b\\w&\\w\\b|(?<!\\()\\b(a\\b|b\\b|c(?:(?:\\+\\+|\\#)(?=$|[^+])|\\b)|d\\b|e\\b|r\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)а\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)в\\b|с(?:\\+\\+|\\#)(?=$|[^\\+])|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)с\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)д\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)е\\b)|[\\\\/]in\\b|\\boffice 365\\b|\\b(?:en|du|de|des|\\w+rice|\\w+eure?|\\w+euse|\\w+ien|\\w+ienne|\\w+ste|\\w+tre) b.timents?\\b|\\b(?:en|du|de|des|sur) (centres?|directions?|station)\\b|\\bstation[\\s-]service\\b|\\b(?:en|de) bureau\\b|\\bbureau des\\b|\\bbureau d'\\w+s\\b|\\b(?:des?(?: la)?|en) recherches?\\b|\\bcentre(?: d'?e?s?)?\\s?(?:appels?|loisirs?|vacances?|sports?|services?)\\b|\\bbureau (?:\\w+(?:que|lle|al))\\b|\\b(i|ii|iii|iv|v|vi)\\b|\\bd. d.partement\\b|\\bthé\\b|\\bde ville\\b|\\bj2ee\\b|\\bétat des\\b"); forRegex = x.RegexNoCase("\\b(voor|pour|für|per|per(\\sil|\\sla)?|ための|para|для|в|para|para(\\sla|\\slos)?|för|için)\\b"); replaceRulesDict = x.dictOf( x.tupleOf("(?<=\\w{5,5})a\\s?\\(o\\)","o"), x.tupleOf("kauffrau\\s?/\\s?-?kaufmann\\b","kaufmann"), x.tupleOf("frau\\s?/\\s?-?mann\\b","mann"), x.tupleOf("\\bzur/zum|zum/zur\\b","zur"), x.tupleOf("\\band/or|or/and\\b","and"), x.tupleOf("\\bet/ou|ou/et\\b","et"), x.tupleOf("\\be/ou|ou/e\\b","e") ); replaceRulesRegex = x.RegexNoCase(replaceRulesDict); } /** * @param args */ public static void main(String[] args) throws Exception { try{ Cleanup cleaner = new Cleanup(); String title = "‹„La Défense - Chef de Projet H/F"; x.print(cleaner.cleanTitle(title)); }catch(Exception e){ throw e; } } }
Delete Cleanup.java
src/test/java/Cleanup.java
Delete Cleanup.java
<ide><path>rc/test/java/Cleanup.java <del>import com.wantedtech.common.xpresso.x; <del>import com.wantedtech.common.xpresso.functional.Function; <del>import com.wantedtech.common.xpresso.regex.Match; <del>import com.wantedtech.common.xpresso.regex.Regex; <del>import com.wantedtech.common.xpresso.types.HappyFile; <del>import com.wantedtech.common.xpresso.types.dict; <del>import com.wantedtech.common.xpresso.types.list; <del>import com.wantedtech.common.xpresso.types.set; <del>import com.wantedtech.common.xpresso.types.str; <del>import com.wantedtech.common.xpresso.types.tuple.tuple; <del>import com.wantedtech.common.xpresso.types.tuple.tuple2; <del> <del>public class Cleanup { <del> <del> String ref_file_dir = "/Users/andriy.burkov/p/workspace/python/InternationalTitleCleanup/ref"; <del> <del> dict<String> acceptedSpecialCharactersDict = x.dictOf( <del> x.tupleOf("RU","йцукеёнгшщзфывапролдячсмитьъбюэжх«»€$"), <del> x.tupleOf("FR","àâèéêëîïôœùûç«»€$"), <del> x.tupleOf("DE","àâèéêéäöü߀$«»‹›„‚“‘”’–—"), <del> x.tupleOf("NL","àèìòùáéíóúýäëïöüÿâêîôû«»€$„‚”’‹›“‘–—"), <del> x.tupleOf("PT","àáâãéêíóôõúü纪«»€$"), <del> x.tupleOf("ES","áéíóúñü¿¡ºª«»€$"), <del> x.tupleOf("SE","åäö«»€$„‚”’‹›“‘–—"), <del> x.tupleOf("TR","çğİıöüş«»€$„‚”’‹›“‘–—") <del> ); <del> <del> list<tuple> analogsTupleList = x.listOf( <del> x.tupleOf("«","\""), <del> x.tupleOf("»","\""), <del> x.tupleOf("‹","<"), <del> x.tupleOf("›",">"), <del> x.tupleOf("„","\""), <del> x.tupleOf("‚",","), <del> x.tupleOf("‘","\""), <del> x.tupleOf("“","\""), <del> x.tupleOf("”","\""), <del> x.tupleOf("’","'"), <del> x.tupleOf("–"," - "), <del> x.tupleOf("—"," - ") <del> ); <del> <del> set<String> countriesSet = x.setOf(); <del> set<String> citiesSet = x.setOf(); <del> set<String> unsupportedCitiesSet = x.setOf(); <del> set<String> geoExceptionsSet = x.setOf(); <del> set<String> cityPartsSet = x.setOf(); <del> set<String> advertisersSet = x.setOf(); <del> set<String> monthsSet = x.setOf(); <del> set<String> stopWordsSet = x.setOf(); <del> set<String> beginningWordsSet = x.setOf(); <del> set<String> metroSet = x.setOf(); <del> set<String> statesSet = x.setOf(); <del> list<list<String>> periodExpressionsList = x.listOf(); <del> dict<String> replaceRulesDict = x.dictOf(); <del> Regex replaceRulesRegex; <del> <del> Regex beginningWordsRegex; <del> Regex stopWordsRegex; <del> Regex placesRegex; <del> Regex placesInBeginningRegex; <del> Regex advertisersRegex; <del> Regex specialCharsRegex; <del> Regex shortWordsRegex; <del> Regex oneWordsRegex; <del> Regex referenceNumberAndDatesRegex; <del> Regex betweenParenthesesRegex; <del> Regex fixRegex; <del> Regex forRegex; <del> <del> String periodExpressionsRegexPattern; <del> <del> dict<set<String>> surroundingWordsDict = x.dictOf(); <del> dict<Regex> surroundingWordsRegexDict = x.dictOf(); <del> <del> public static Function<Object, Object> fGeneralizeRegex = new Function<Object, Object>() { <del> public String apply(Object string) { <del> return generalizeRegex((String)string); <del> } <del> }; <del> private static String generalizeRegex(String pattern){ <del> Regex saintRegex = x.RegexNoCase("\\b(?:st|saint)e?\\b[\\\\\\.\\s-]{0,3}"); <del> Regex wordsWithDashRegex = x.RegexNoCase("(\\w)\\\\?-(\\w)"); <del> Regex wordsWithApostropheRegex = x.RegexNoCase("(\\w)\\\\?'(\\w)"); <del> pattern = saintRegex.sub("(?:st|saint)e?[.\\s-]{0,3}",pattern); <del> pattern = wordsWithDashRegex.sub("$1[\\s.-]$2",pattern); <del> pattern = wordsWithApostropheRegex.sub("$1[\\s']$2",pattern); <del> return pattern; <del> } <del> <del> list<String> annotate(String strng,list<tuple> regexesTypesList, list<String> annotations){ <del> for(tuple regex__regexType : regexesTypesList){ <del> Regex regex = (Regex)regex__regexType.get(0); <del> String regexType = (String)regex__regexType.get(1); <del> for(Match match : regex.searchIter(x.stripAccents(strng))){ <del> if(x.String(regexType).notEquals("sho")){ <del> for(int i : x.count(match.start(0),match.end(0))){ <del> annotations.setAt(i).value(regexType); <del> } <del> }else{ <del> if( <del> x.String("pla").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("pla").in(annotations.slice(match.end(0),match.end(0)+4)) <del> || <del> x.String("sur").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("sur").in(annotations.slice(match.end(0),match.end(0)+4)) <del> || <del> x.String("one").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("one").in(annotations.slice(match.end(0),match.end(0)+4)) <del> || <del> x.String("ref").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("ref").in(annotations.slice(match.end(0),match.end(0)+4)) <del> || <del> x.String("sto").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("sto").in(annotations.slice(match.end(0),match.end(0)+4)) <del> || <del> x.String("adv").in(annotations.slice(match.start(0)-4,match.start(0))) <del> || <del> x.String("adv").in(annotations.slice(match.end(0),match.end(0)+4)) <del> ){ <del> for(int i : x.count(match.start(0),match.end(0))){ <del> annotations.setAt(i).value(regexType); <del> } <del> } <del> } <del> } <del> } <del> return annotations; <del> } <del> list<String> annotate(String strng,list<tuple> regexesTypesList){ <del> list<String> annotations = x.listOf("not").times(x.len(strng)); <del> return annotate(strng,regexesTypesList,annotations); <del> } <del> list<String> annotate(str strng,list<tuple> regexesTypesList, list<String> annotations){ <del> return annotate(strng.toString(),regexesTypesList,annotations); <del> } <del> list<String> annotate(str strng,list<tuple> regexesTypesList){ <del> list<String> annotations = x.listOf("not").times(x.len(strng)); <del> return annotate(strng,regexesTypesList,annotations); <del> } <del> <del> public float get_annotation_score(list<String> annotation){ <del> return (float)(annotation.count("not") + annotation.count("sho") + annotation.count("sto"))/(float)x.len(annotation); <del> } <del> <del> //mark as 'pla' every word that sticks to a 'pla' (with a '-', or '/') <del> public void contaminate(list<String> annotationsList,int idx, String strng){ <del> contaminate(annotationsList,idx, strng,"pla",x.<String>listOf(),x.str(" ")); <del> } <del> public void contaminate(list<String> annotations_lst,int idx, String strng,list<String> nonContaminableLabelsList){ <del> contaminate(annotations_lst,idx, strng,"pla",nonContaminableLabelsList,x.str(" ")); <del> } <del> public void contaminate(list<String> annotationsLst,int idx,String strng,String contaminationLabel,list<String> nonContaminableLabelsList,str boundaryCharactersList){ <del> annotationsLst.setAt(idx).value(contaminationLabel); <del> if( <del> ( <del> idx-1 >= 0 && x.String(annotationsLst.get(idx-1)).notEquals(contaminationLabel) <del> && <del> x.String(x.String(strng).get(idx-1)).notIn(boundaryCharactersList) <del> && x.String(annotationsLst.get(idx-1)).notIn(nonContaminableLabelsList) <del> ) <del> ){ <del> contaminate(annotationsLst,idx-1,strng,contaminationLabel,nonContaminableLabelsList,boundaryCharactersList); <del> } <del> if(idx+1 < x.len(annotationsLst) && x.String(annotationsLst.get(idx+1)).notEquals(contaminationLabel) && x.String(x.String(strng).get(idx+1)).notIn(boundaryCharactersList) && x.String(annotationsLst.get(idx+1)).notIn(nonContaminableLabelsList)){ <del> contaminate(annotationsLst,idx+1,strng,contaminationLabel,nonContaminableLabelsList,boundaryCharactersList); <del> } <del> } <del> <del> public String cleanTitle(String title) throws Exception{ <del> return cleanTitle(title,"FR"); <del> } <del> <del> public String cleanTitle(String title,String language) throws Exception{ <del> <del> str originalTitle = x.str(title); <del> str currentTitle = x.str(title); <del> <del> list<tuple> annotationRegexList; <del> annotationRegexList = x.listOf( <del> x.tupleOf(advertisersRegex,"adv"), <del> x.tupleOf(placesRegex,"pla"), <del> x.tupleOf(x.dict(surroundingWordsRegexDict).get(language),"sur"), <del> x.tupleOf(oneWordsRegex,"one"), <del> x.tupleOf(referenceNumberAndDatesRegex,"ref"), <del> x.tupleOf(stopWordsRegex,"sto"), <del> x.tupleOf(specialCharsRegex,"spe"), <del> x.tupleOf(shortWordsRegex,"sho") <del> ); <del> <del> //replace analog chars <del> currentTitle = currentTitle.translated(analogsTupleList); <del> <del> String accepted_special_characters = ""; <del> if(x.String(language).in(acceptedSpecialCharactersDict)){ <del> accepted_special_characters = acceptedSpecialCharactersDict.get(language); <del> } <del> <del> <del> if(x.String(accepted_special_characters).notEquals("")){ <del> //remove all non-accepted characters <del> Regex nonAcceptedCharsRegex = x.RegexNoCase("[^a-zA-Z0-9"+accepted_special_characters+",.;!|~?\\s&/()':+\\#-]"); <del> currentTitle = x.str(nonAcceptedCharsRegex.sub(" ",currentTitle)); <del> } <del> <del> boolean all_caps = false; <del> if(currentTitle.equals(currentTitle.upper())){ <del> all_caps = true; <del> } <del> <del> currentTitle = replaceRulesRegex.sub(currentTitle); <del> <del> Regex multipleSlashesRegex = x.Regex("/+"); <del> currentTitle = multipleSlashesRegex.sub("/",currentTitle); <del> <del> //things like this ")(blahblah" becomes like this ") (blahblah": <del> Regex twoParenthesesThatStick = x.Regex("([\\w\\))])([\\((][^))]{6,}[\\))])"); <del> currentTitle = x.str(twoParenthesesThatStick.sub("$1 $2",currentTitle)); <del> <del> //remove (h/f) <del> Regex hfRegex = x.RegexNoCase("\\(\\s?\\b\\w\\s?[\\\\/-]\\s?\\w\\b\\s?\\)|\\b\\w\\s?[\\\\/]\\s?\\w\\b[\\s-]|\\b\\w\\s?[\\\\/-]\\s?\\w\\b\\s|\\b\\w\\s?[\\\\/-]\\s?\\w\\b$"); <del> currentTitle = hfRegex.sub(" ",currentTitle); <del> <del> currentTitle = currentTitle.strip().strip(",").strip().strip(","); <del> <del> currentTitle = betweenParenthesesRegex.sub(" ",currentTitle); <del> <del> currentTitle = currentTitle.strip(); <del> <del> currentTitle = placesInBeginningRegex.sub(" ",currentTitle); <del> <del> //things like this " - " become like this " - " <del> Regex longDashRegex = x.RegexNoCase("[\\s-]*\\s-\\s[\\s-]*"); <del> currentTitle = longDashRegex.sub(" - ",currentTitle); <del> <del> currentTitle = currentTitle.strip(); <del> <del> //split by the " - ", "~" and "|" and keep the left part only (if the remaining title is still long enough) <del> Regex specialCharacterSplitter = x.Regex(" - |~|\\|"); <del> Regex wordChars = x.Regex("[\\w'-]+"); <del> list<str> long_dash_split = specialCharacterSplitter.split(currentTitle); <del> while(x.len(long_dash_split) > 1){ <del> //the remaining part (has more dashes, or it's longer than 15 chars, or it contains 2 or more words) and it's at least a half of the title and contains more than 65% of non-special chars <del> if ( <del> ( <del> x.len(long_dash_split.sliceTo(-1)) > 1 <del> || <del> x.len(x.str(" - ").join(long_dash_split.sliceTo(-1))) > 15 <del> || <del> x.len(x.list(wordChars.searchAll(x.str(" - ").join(long_dash_split.sliceTo(-1))))) > 1 <del> ) <del> && <del> x.len(long_dash_split.get(-1)) <= x.len(currentTitle)/2 <del> && <del> get_annotation_score(annotate(x.str(" - ").join(long_dash_split.sliceTo(-1)),annotationRegexList.append(x.tupleOf(fixRegex,"not")))) > 0.65 <del> ){ <del> currentTitle = x.str(x.str(" - ").join(long_dash_split.sliceTo(-1))); <del> long_dash_split = x.str(currentTitle).split(" - "); <del> }else{ <del> break; <del> } <del> } <del> <del> //if there's more than two "/" in the title then split by the third "/" and keep the left part only (if the remaining title is still long enough) <del> //an exception is made if the first word right part looks like a part of an acronym, so we try to split by "/" after it <del> list<str> slash_split = currentTitle.split("/"); <del> Regex wordInTheBeginningRegex = x.Regex("\\s*[A-Z0-9]{1,5}\\b|^\\s*\\w{1,2}\\b"); <del> Regex periodExpressionsRegex = x.RegexNoCase("^\\s*("+periodExpressionsRegexPattern+")"); <del> if( <del> x.len(slash_split) > 2 <del> && <del> ( <del> all_caps <del> && <del> ( <del> wordInTheBeginningRegex.search(slash_split.get(2)) != null <del> && <del> periodExpressionsRegex.search(slash_split.get(2)) != null <del> ) <del> ) <del> ){ <del> currentTitle = x.str("/").join(slash_split.sliceTo(2)); <del> }else if( <del> x.len(slash_split) > 3 <del> && <del> wordInTheBeginningRegex.search(slash_split.get(3)) != null <del> && <del> periodExpressionsRegex.search(slash_split.get(3)) != null <del> ){ <del> currentTitle = x.str("/").join(slash_split.sliceTo(3)); <del> } <del> if(currentTitle.equals("")){ <del> return ""; <del> } <del> <del> list<String> annotations = annotate(currentTitle,annotationRegexList); <del> <del> //fix annotations for some special cases <del> Regex regex = fixRegex; <del> String regex_type = "not"; <del> for(Match match : regex.searchIter(currentTitle)){ <del> for(int i : x.count(match.start(0),match.end(0))){ <del> annotations.setAt(i).value(regex_type); <del> } <del> } <del> <del> //mark as 'pla' everything between two 'pla' or between a 'sur' and a 'pla' <del> int idx1 = 0; <del> while(idx1 < x.len(annotations)){ <del> if(annotations.get(idx1).equals("sur") || annotations.get(idx1).equals("pla")){ <del> String contaminationTag = annotations.get(idx1); <del> int idx2 = idx1+1; <del> while(idx2 < x.len(annotations)){ <del> if(annotations.get(idx2).equals("pla") || (annotations.get(idx2).equals("sur") && contaminationTag.equals("pla"))){ <del> break; <del> } <del> idx2 += 1; <del> } <del> if(idx2 != x.len(annotations)){ <del> for(int contamination_idx : x.count(idx1,idx2)){ <del> annotations.setAt(contamination_idx).value(contaminationTag); <del> } <del> } <del> idx1 = idx2; <del> } <del> idx1 += 1; <del> } <del> <del> //mark as 'pla' everything to the right of a surrounding word or a 'pla', if at least half of the title is remaining and it's of a descent quality <del> for(tuple2<Integer,String> idx__annotation : x.reversed(x.list(x.enumerate(annotations)))){ <del> int idx = idx__annotation.value0; <del> String annotation = idx__annotation.value1; <del> if( <del> x.String(annotation).in(x.listOf("sur","pla")) <del> && <del> ( <del> idx+1 >= x.len(annotations) <del> || <del> x.String(annotations.get(idx+1)).notIn(x.listOf("pla","sur")) <del> && <del> ( <del> (idx > x.len(annotations)/2 && get_annotation_score(annotations.sliceTo(idx)) > 0.5) <del> || <del> (idx > x.len(annotations)/3 && get_annotation_score(annotations.sliceTo(idx)) > 0.7) <del> ) <del> ) <del> ){ <del> annotations = annotations.sliceTo(idx+1).append("pla").times(x.len(annotations)-1-idx); <del> } <del> } <del> <del> //contamainate with place label <del> list<String> old_annotations = annotations.copy(); <del> for(tuple2<Integer,String> idx__annotation : x.enumerate(annotations)){ <del> int idx = idx__annotation.value0; <del> String annotation = idx__annotation.value1; <del> if(annotation.equals("pla") && x.String(currentTitle.get(idx)).notEquals(" ")){ <del> contaminate(annotations,idx,title); <del> } <del> } <del> if(get_annotation_score(annotations) < .25){ <del> annotations = old_annotations.copy(); <del> } <del> <del> //contaminate with ref_num label <del> for(tuple2<Integer,String> idx__annotation : x.enumerate(annotations)){ <del> int idx = idx__annotation.value0; <del> String annotation = idx__annotation.value1; <del> if(annotation.equals("ref") && x.String(currentTitle.get(idx)).notEquals(" ")){ <del> contaminate(annotations,idx,title,"ref",x.listOf("not"),x.str()); <del> } <del> } <del> <del> //fix for the left-most char annotation when it's a parenthesis but the interior of parentheses is "not" <del> if(x.String(currentTitle.get(0)).in(x.listOf("(","(")) && annotations.get(1).equals("not")){ <del> annotations.setAt(0).value("not"); <del> } <del> <del> //remove places and reference numbers from anywhere <del> str cleanTitle = x.str(""); <del> list<String> cleanAnnotations = x.listOf(); <del> for(tuple2<Integer,String> idx__label : x.enumerate(annotations)){ <del> int idx = idx__label.value0; <del> String label = idx__label.value1; <del> if(x.String(label).notIn(x.listOf("pla","ref")) || currentTitle.get(idx).equals(" ")){ <del> cleanAnnotations.append(label); <del> cleanTitle=cleanTitle.plus(x.str(currentTitle.get(idx))); <del> } <del> } <del> annotations = cleanAnnotations.copy(); <del> currentTitle = cleanTitle.copy(); <del> <del> //remove non-'not' from the beginning and from the end <del> int start = 0; <del> while(x.String(annotations.get(start)).notEquals("not")){ <del> start += 1; <del> if(x.len(annotations) <= start){ <del> break; <del> } <del> } <del> <del> int end = x.len(annotations)-1; <del> <del> //if 'pla' in annotations and annotations.index('pla') > len(annotations)/3: <del> <del> while(x.String(annotations.get(end)).notEquals("not")){ <del> end -= 1; <del> if(end < 0){ <del> end = 0; <del> break; <del> } <del> } <del> currentTitle = currentTitle.slice(start,end+1); <del> Regex multipleSpacesRegex = x.Regex("\\s+"); <del> currentTitle = multipleSpacesRegex.sub(" ",currentTitle); <del> Regex spaceCommaRegex = x.Regex("\\s*,+"); <del> currentTitle = spaceCommaRegex.sub(",",currentTitle); <del> Regex multiDotsRegex = x.Regex("\\.+"); <del> currentTitle = multiDotsRegex.sub(".",currentTitle); <del> Regex wordEndsByDashRegex = x.Regex("(\\w)-\\s"); <del> currentTitle = wordEndsByDashRegex.sub("$1 ",currentTitle); <del> Regex wordStartsByDashRegex = x.Regex("(?:\\s|^)-(\\w)"); <del> currentTitle = wordStartsByDashRegex.sub(" $1",currentTitle); <del> currentTitle = currentTitle.strip().strip(",").strip().strip(","); <del> Regex stringEndsByDotsRegex = x.Regex("\\.*$"); <del> currentTitle = stringEndsByDotsRegex.sub("",currentTitle); <del> <del> //if we cleaned the title, and it's so clean that nothing remains, we return the original title <del> if(x.len(currentTitle) == 0){ <del> currentTitle = originalTitle.strip(); <del> } <del> <del> Regex slashRegex = x.Regex("\\s*/\\s*"); <del> currentTitle = slashRegex.sub(" / ",currentTitle); <del> <del> return currentTitle.toString(); <del> <del> } <del> <del> public Cleanup() throws Exception{ <del> <del> unsupportedCitiesSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("zürich","bern","schwyz","wien","vienna","aarga")); <del> <del> countriesSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("switzerland","schweiz","suisse","britain","british","uk","united kingdom","ireland","scotland","scottish","irish","us","united states","america","american","australia","australian","canada","canadian","china","中国","chinese","germany","deutschlandweit","deutschland","dtl","dtl. weit.","bundesrepublik deutschland","deutschland","japan","日本","india","indian","france","italy","italia","mexico","mexican","netherland","holland","belgium","belgië","belgique","belgien","portugal","república portuguesa","argentina","república argentina","sweden","sverige","russia","russian federation","rf","turkey","arab emirates","united arab emirates","emirates","uae","brazil","brazilian","saudi arabia","benelux","netherlands","nederland","рф","снг","россия","российская федерация","türkiye","السعودية","الإمارات العربية المتحدة")); <del> <del> //these locations cant be removed as other cities even if such cities exist: <del> geoExceptionsSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.strip,x.lower)).forElementIn("sta","it","cns","lot","centrale","marché","glacière","plaisance","art","la","tri","écoles","nationale","poissonniers","poissonnière","car","sur","se","foreman","mason","secretary","mobile","lead","cv","home","house","java","driver","low","field","branch","english","golf","campus","portage","trainer","teller","commerce")); <del> <del> HappyFile f; <del> <del> for (String place : x.open(ref_file_dir+"/cities_en_and_original.txt","r","utf-8")){ <del> citiesSet.absorb(x.set(x.<String>element().transformWith(x.chainOf(x.lower,x.stripAccents)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty))); <del> } <del> citiesSet = citiesSet.difference(geoExceptionsSet); <del> <del> f = x.open(ref_file_dir+"/common_city_parts.txt","r","utf-8"); <del> cityPartsSet = x.set(x.<String>element().transformWith(x.strip).forElementIn(f)); <del> <del> set<String> addToSet; <del> for (String place : x.open(ref_file_dir+"/states_en_and_original.txt","r","utf-8")){ <del> addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty)); <del> statesSet.absorb(addToSet); <del> } <del> statesSet = statesSet.difference(geoExceptionsSet); <del> <del> for(String place : x.open(ref_file_dir+"/metro_en_and_original.txt","r","utf-8")){ <del> addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(place).split(",")).ifNotElement(x.empty)); <del> metroSet.absorb(addToSet); <del> } <del> metroSet.reject(geoExceptionsSet); <del> <del> list<String> langList = x.listOf(); <del> for(tuple2<Integer,String> idx__words : x.enumerate(x.open(ref_file_dir+"/surrounding_words.txt","r","utf-8"))){ <del> int idx = idx__words.value0; <del> String words = idx__words.value1; <del> if(idx == 0){ <del> langList = x.list(x.<String>element().transformWith(x.upper).forElementIn(x.String(words).split("\t"))); <del> for (String lang : langList){ <del> surroundingWordsDict.setAt(lang.toUpperCase()).value(x.<String>setOf()); <del> } <del> }else{ <del> for (tuple2<Integer,String> idx_word : x.enumerate(x.String(words).split("\t"))){ <del> idx = idx_word.value0; <del> String word = idx_word.value1; <del> if (x.String(word.trim()).notEquals("")){ <del> String old_key = x.list(langList).get(idx); <del> set<String> old_value = surroundingWordsDict.get(langList.get(idx)); <del> set<String> new_value = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(","))); <del> surroundingWordsDict.setAt(old_key).value(old_value.union(new_value)); <del> } <del> } <del> } <del> } <del> <del> for (String word : x.open(ref_file_dir+"/drop_in_the_beginning_words.txt","r","utf-8")){ <del> addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); <del> beginningWordsSet.absorb(addToSet); <del> } <del> <del> for (String word : x.open(ref_file_dir+"/stop_words.txt","r","utf-8")){ <del> addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); <del> stopWordsSet.absorb(addToSet); <del> } <del> <del> for (String word : x.open(ref_file_dir+"/months_of_the_year.txt","r","utf-8")){ <del> addToSet = x.set(x.<String>element().transformWith(x.chainOf(x.stripAccents,x.lower)).forElementIn(x.String(word).split(",")).ifNotElement(x.empty)); <del> monthsSet.absorb(addToSet); <del> } <del> <del> f = x.open(ref_file_dir+"/top_advertisers.txt","r","utf-8"); <del> advertisersSet = x.set(x.<String>element().transformWith(x.chainOf(x.lower,x.stripAccents)).forElementIn(f)); <del> <del> periodExpressionsList = x.listOf(x.listOf("AR","شهر"),x.listOf("ZH","月"),x.listOf("NL","maand"),x.listOf("FR","mois"),x.listOf("DE","monat"),x.listOf("DE","mon"),x.listOf("DE","mtl"),x.listOf("IT","mese"),x.listOf("JA","月"),x.listOf("PT","mês"),x.listOf("RU","месяц"),x.listOf("RU","мес"),x.listOf("ES","mes"),x.listOf("SE","månad"),x.listOf("TR","ay")); <del> periodExpressionsRegexPattern = x.String("|").join(x.set(x.element(1).forElementIn(periodExpressionsList))); <del> <del> for(String lang : surroundingWordsDict){ <del> surroundingWordsRegexDict.setAt(lang).value(x.RegexNoCase("(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(surroundingWordsDict.get(lang), x.len, true))+")\\b(?:\\W|_)?")); <del> } <del> <del> beginningWordsRegex = x.RegexNoCase("^.*\\b(?:"+x.str("|").join(x.sorted(beginningWordsSet,x.len,true))+")"); <del> <del> stopWordsRegex = x.RegexNoCase("\\d+\\sans(?: ou plus)?\\b|\\d+\\sye?a?rs?(?: old)?"+"|"+ <del> "\\d+\\s?(h|ч)(?:ours?|eures?|асо?в?а?я?)?(?:\\swe?e?k|\\ssem(?:aine)?)?\\b"+"|"+ <del> "(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(stopWordsSet,x.len,true))+")\\b(?:\\W|_)?|\\[[^\\]]+\\]|\\(\\w[/\\\\]\\w\\)"); <del> <del> placesRegex = x.RegexNoCase("(?:\\W|_)?\\b(?:" + x.String("|").join(x.list(x.<String>element().transformWith(x.chainOf(x.escape,fGeneralizeRegex)).forElementIn(x.sorted(x.union(citiesSet,statesSet,metroSet,countriesSet,unsupportedCitiesSet),x.len,true)))) + "|"+ x.str("|").join(cityPartsSet) + ")\\b(?:\\W|_)?"); <del> <del> placesInBeginningRegex = x.RegexNoCase("^(?:\\W|_)*\\b(?:" + x.str("|").join(x.list(x.<String>element().transformWith(x.chainOf(x.escape,fGeneralizeRegex)).forElementIn(x.sorted(x.union(citiesSet,statesSet,metroSet,countriesSet,unsupportedCitiesSet),x.len,true)))) + "|" + x.str("|").join(cityPartsSet) + ")(-\\w+)?\\b(?:\\W|_)?"); <del> <del> advertisersRegex = x.RegexNoCase("(?:\\W|_)?\\b(?:"+x.str("|").join(x.sorted(advertisersSet,x.len,true))+")\\b(?:\\W|_)?(?:\\s?(?:group|groupe|express|bank|holdings?|company|corp|inc|corporation|incorporated|limited|& ?co)\\b){0,2}"); <del> <del> specialCharsRegex = x.Regex("(?:\\W|_)"); <del> <del> shortWordsRegex = x.RegexNoCase("(?<![/\\()])\\b[a-zA-Zа-яА-Я0-9']{1,2}\\b"); <del> <del> oneWordsRegex = x.RegexNoCase("\\b[a-zA-Zа-яА-Я6-9']{1,1}\\b(?!-)"); <del> <del> referenceNumberAndDatesRegex = x.RegexNoCase("(?<=[\\s)])[\\#№][^\\s]+\\b|"+"\\b(?:20\\d\\d\\b)?\\W*(?:\\b\\d{1,2}\\b)?\\W*(?:\\b20\\d\\d\\b)?\\W*(?:\\b\\d{1,2})?\\b("+x.str("|").join(monthsSet)+")\\b\\W*(?:\\d{1,2}\\b)?\\W*(?:20\\d\\d\\b)?\\W*(?:\\d{1,2}\\b)?\\W*(?:20\\d\\d\\b)?|"+"\\b\\d{1,2}\\.\\d{1,2}\\.\\d{2,4}\\b|\\b\\d{1,2}[\\\\/]\\d{1,2}(?:[\\\\/]\\d{2,4})?\\b|\\b([a-z]+[0-9]|[0-9]+[a-z])(?:[a-z0-9]{2,}|-[0-9][a-z0-9-]{2,})\\b"); <del> <del> betweenParenthesesRegex = x.RegexNoCase("\\s[((][^))]+[\\))]|^[((][^))]+[))][\\s,]|\\s-\\w+-\\s|\\s-\\w+-$|[((][^))]+\\s[^))]+[\\))]"); <del> <del> fixRegex = x.RegexNoCase("\\bjr\\b|\\bTI\\b|\\bSOLNA 164\\b|\\b\\d{1,3}\\s?%|\\bnationale?\\b|\\bblanche\\b|\\b[123](?:st|rd|nd)\\b|\\b[123][eè][rm]e\\b|\\bbras de\\b|\\b(?:dry|mini)[\\s-]van\\b|\\b[\\d.,-]+\\s?T?(?:EUR|CHF)\\b|\\bsharepoint\\b|\\bax\\b|\\bit\\b|\\.net\\b|\\bрп\\b|\\bофис-|\\b(?:d'|de\\sl'|des\\s)usines?\\b|\\brh\\b|\\bde secteur\\b|\\bd'[ée]tat\\b|\\bit\\b|\\bzone d'attraction\\b|\\bd'établissements?\\b|\\b(?:à la|de) direction\\b|\\w{3,}\\(-?\\w{1,4}\\)|\\b\\w&\\w\\b|(?<!\\()\\b(a\\b|b\\b|c(?:(?:\\+\\+|\\#)(?=$|[^+])|\\b)|d\\b|e\\b|r\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)а\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)в\\b|с(?:\\+\\+|\\#)(?=$|[^\\+])|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)с\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)д\\b|(?<=гории |ласса |гория |класс |.кат\\. |. кат |. кат\\.|..\\bкл\\. |...\\bкл |...\\bкл\\.)е\\b)|[\\\\/]in\\b|\\boffice 365\\b|\\b(?:en|du|de|des|\\w+rice|\\w+eure?|\\w+euse|\\w+ien|\\w+ienne|\\w+ste|\\w+tre) b.timents?\\b|\\b(?:en|du|de|des|sur) (centres?|directions?|station)\\b|\\bstation[\\s-]service\\b|\\b(?:en|de) bureau\\b|\\bbureau des\\b|\\bbureau d'\\w+s\\b|\\b(?:des?(?: la)?|en) recherches?\\b|\\bcentre(?: d'?e?s?)?\\s?(?:appels?|loisirs?|vacances?|sports?|services?)\\b|\\bbureau (?:\\w+(?:que|lle|al))\\b|\\b(i|ii|iii|iv|v|vi)\\b|\\bd. d.partement\\b|\\bthé\\b|\\bde ville\\b|\\bj2ee\\b|\\bétat des\\b"); <del> <del> forRegex = x.RegexNoCase("\\b(voor|pour|für|per|per(\\sil|\\sla)?|ための|para|для|в|para|para(\\sla|\\slos)?|för|için)\\b"); <del> <del> replaceRulesDict = x.dictOf( <del> x.tupleOf("(?<=\\w{5,5})a\\s?\\(o\\)","o"), <del> x.tupleOf("kauffrau\\s?/\\s?-?kaufmann\\b","kaufmann"), <del> x.tupleOf("frau\\s?/\\s?-?mann\\b","mann"), <del> x.tupleOf("\\bzur/zum|zum/zur\\b","zur"), <del> x.tupleOf("\\band/or|or/and\\b","and"), <del> x.tupleOf("\\bet/ou|ou/et\\b","et"), <del> x.tupleOf("\\be/ou|ou/e\\b","e") <del> ); <del> replaceRulesRegex = x.RegexNoCase(replaceRulesDict); <del> } <del> <del> /** <del> * @param args <del> */ <del> public static void main(String[] args) throws Exception { <del> try{ <del> Cleanup cleaner = new Cleanup(); <del> String title = "‹„La Défense - Chef de Projet H/F"; <del> x.print(cleaner.cleanTitle(title)); <del> }catch(Exception e){ <del> throw e; <del> } <del> } <del>}
JavaScript
mit
f97b7d35527cdb30bfb96f76f18ee09d8ff05167
0
dabapps/betta,dabapps/betta,dabapps/betta
'use strict'; var React = require('react'); var ModalStore = require('../stores/modal-store'); var ExportModal = require('./modal/export-modal'); var ImportModal = require('./modal/import-modal'); var SearchStore = require('../stores/search-store'); var VariableStore = require('../stores/variable-store'); var SidebarMenu = React.createClass({ preview: function () { VariableStore.action('requestPreview'); }, export: function () { ModalStore.action('open', ExportModal); }, import: function () { ModalStore.action('open', ImportModal); }, toggleDropdownFile: function () { this.setState({ dropdownFileActive: !this.state.dropdownFileActive }); }, toggleDropdownSizes: function () { this.setState({ dropdownSizesActive: !this.state.dropdownSizesActive }); }, setSearchTerm: function (event) { SearchStore.action('setSearchTerm', event.target.value); }, clearSearchTerm: function () { SearchStore.action('setSearchTerm', undefined); }, getInitialState: function () { return { dropdownFileActive: false, dropdownSizesActive: false }; }, render: function () { var self = this; var dropdownFile, dropdownSizes; var frameSizes = this.props.frameSizes.map(function (size) { return ( <li key={size.name}> <a onClick={self.props.setFrameSize.bind(null, size)}>{size.name}</a> </li> ); }); if (this.state.dropdownSizesActive) { dropdownSizes = ( <ul className='dropdown-menu'> {frameSizes} </ul> ); } if (this.state.dropdownFileActive) { dropdownFile = ( <ul className='dropdown-menu'> <li><a onClick={self.import}>Import</a></li> <li><a onClick={self.export}>Export</a></li> <li className='divider' /> <li><a onClick={self.props.reset}>Reset</a></li> </ul> ); } return ( <div className='sidebar-menu'> <div className='form-group'> <div className={'dropdown pull-left' + (self.state.dropdownSizesActive ? ' open' : '')} onClick={self.toggleDropdownSizes}> <button className='btn btn-small btn-default'> {self.props.currentFrameSize.name} <span className='caret' /> </button> {dropdownSizes} </div> <button className='btn btn-small btn-default' onClick={self.preview}>Preview</button> <div className={'dropdown pull-right' + (self.state.dropdownFileActive ? ' open' : '')} onClick={self.toggleDropdownFile}> <button className='btn btn-small btn-default'>File <span className='caret' /></button> {dropdownFile} </div> </div> <div className='form-group'> <div className='input-wrapper search-wrapper'> <input type='text' className='form-control' placeholder='Search variables' onChange={this.setSearchTerm} value={this.props.searchTerm}> </input> <span className='glyphicon glyphicon-remove' onClick={this.clearSearchTerm} /> </div> </div> </div> ); } }); module.exports = SidebarMenu;
static/js/components/sidebar-menu.js
'use strict'; var React = require('react'); var ModalStore = require('../stores/modal-store'); var ExportModal = require('./modal/export-modal'); var ImportModal = require('./modal/import-modal'); var SearchStore = require('../stores/search-store'); var VariableStore = require('../stores/variable-store'); var SidebarMenu = React.createClass({ preview: function () { VariableStore.action('requestPreview'); }, export: function () { ModalStore.action('open', ExportModal); }, import: function () { ModalStore.action('open', ImportModal); }, toggleDropdownFile: function () { this.setState({ dropdownFileActive: !this.state.dropdownFileActive }); }, toggleDropdownSizes: function () { this.setState({ dropdownSizesActive: !this.state.dropdownSizesActive }); }, setSearchTerm: function (event) { SearchStore.action('setSearchTerm', event.target.value); }, clearSearchTerm: function () { SearchStore.action('setSearchTerm', undefined); }, getInitialState: function () { return { dropdownFileActive: false, dropdownSizesActive: false }; }, render: function () { var self = this; var dropdownFile, dropdownSizes; var frameSizes = this.props.frameSizes.map(function (size) { return ( <li key={size.name}> <a onClick={self.props.setFrameSize.bind(null, size)}>{size.name}</a> </li> ); }); if (this.state.dropdownSizesActive) { dropdownSizes = ( <ul className='dropdown-menu'> {frameSizes} </ul> ); } if (this.state.dropdownFileActive) { dropdownFile = ( <ul className='dropdown-menu'> <li><a onClick={self.import}>Import</a></li> <li><a onClick={self.export}>Export</a></li> <li className='divider' /> <li><a onClick={self.props.reset}>Reset</a></li> </ul> ); } return ( <div className='sidebar-menu'> <div className='form-group'> <div className={'dropdown pull-left' + (self.state.dropdownSizesActive ? ' open' : '')} onClick={self.toggleDropdownSizes}> <button className='btn btn-small btn-default'>{self.props.currentFrameSize.name} <span className='caret' /></button> {dropdownSizes} </div> <button className='btn btn-small btn-default' onClick={self.preview}>Preview</button> <div className={'dropdown pull-right' + (self.state.dropdownFileActive ? ' open' : '')} onClick={self.toggleDropdownFile}> <button className='btn btn-small btn-default'>File <span className='caret' /></button> {dropdownFile} </div> </div> <div className='form-group'> <div className='input-wrapper search-wrapper'> <input type='text' className='form-control' placeholder='Search variables' onChange={this.setSearchTerm} value={this.props.searchTerm}> </input> <span className='glyphicon glyphicon-remove' onClick={this.clearSearchTerm} /> </div> </div> </div> ); } }); module.exports = SidebarMenu;
Fix tests
static/js/components/sidebar-menu.js
Fix tests
<ide><path>tatic/js/components/sidebar-menu.js <ide> <div <ide> className={'dropdown pull-left' + (self.state.dropdownSizesActive ? ' open' : '')} <ide> onClick={self.toggleDropdownSizes}> <del> <button className='btn btn-small btn-default'>{self.props.currentFrameSize.name} <span className='caret' /></button> <add> <button className='btn btn-small btn-default'> <add> {self.props.currentFrameSize.name} <span className='caret' /> <add> </button> <ide> {dropdownSizes} <ide> </div> <ide>
JavaScript
mit
error: pathspec 'test/test_chain.js' did not match any file(s) known to git
f8358e05502a06e53b971d9892549116a7a71ce3
1
pipesjs/core
import assert from "assert"; import chain from "../src/chain"; import connect from "../src/connect"; import pipe from "../src/pipe"; import { createTestWritable, createTestReadable, broker } from "./utils"; suite("chain"); test("check chaining", done => { let readable, writable, transform, testChain; // Create test streams readable = createTestReadable( [1,2,3] ); transform = pipe( k=>k ); writable = createTestWritable( assert ); // End case broker.on(writable.signals.close, done); // Connect the streams assert.doesNotThrow( () => { testChain = chain( new transform, new transform, new transform ); connect( readable, testChain, writable ); }); });
test/test_chain.js
Added test for chain Signed-off-by: Diwank Singh <[email protected]>
test/test_chain.js
Added test for chain
<ide><path>est/test_chain.js <add>import assert from "assert"; <add>import chain from "../src/chain"; <add>import connect from "../src/connect"; <add>import pipe from "../src/pipe"; <add>import { <add> createTestWritable, <add> createTestReadable, <add> broker <add>} from "./utils"; <add> <add>suite("chain"); <add> <add>test("check chaining", done => { <add> let readable, writable, transform, testChain; <add> <add> // Create test streams <add> readable = createTestReadable( [1,2,3] ); <add> transform = pipe( k=>k ); <add> writable = createTestWritable( assert ); <add> <add> // End case <add> broker.on(writable.signals.close, done); <add> <add> // Connect the streams <add> assert.doesNotThrow( () => { <add> testChain = chain( <add> new transform, <add> new transform, <add> new transform <add> ); <add> <add> connect( readable, testChain, writable ); <add> }); <add>}); <add> <add>
Java
agpl-3.0
f3e35fe5ba2229afa4005d4e697ae17c4d740a30
0
CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine
/* Derby - Class org.apache.derby.impl.sql.compile.AlterTableNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.splicemachine.db.impl.sql.compile; import com.splicemachine.db.iapi.reference.SQLState; import com.splicemachine.db.iapi.reference.Limits; import com.splicemachine.db.iapi.services.io.FormatableBitSet; import com.splicemachine.db.iapi.services.sanity.SanityManager; import com.splicemachine.db.iapi.error.StandardException; import com.splicemachine.db.iapi.sql.compile.Visitor; import com.splicemachine.db.iapi.sql.dictionary.ConglomerateDescriptor; import com.splicemachine.db.iapi.sql.dictionary.ConstraintDescriptorList; import com.splicemachine.db.iapi.sql.dictionary.DataDictionary; import com.splicemachine.db.iapi.sql.dictionary.SchemaDescriptor; import com.splicemachine.db.iapi.sql.dictionary.TableDescriptor; import com.splicemachine.db.iapi.sql.execute.ConstantAction; import com.splicemachine.db.impl.sql.execute.ColumnInfo; /** * A AlterTableNode represents a DDL statement that alters a table. * It contains the name of the object to be created. * */ public class AlterTableNode extends DDLStatementNode { // The alter table action public TableElementList tableElementList = null; public char lockGranularity; /** * updateStatistics will indicate that we are here for updating the * statistics. It could be statistics of just one index or all the * indexes on a given table. */ private boolean updateStatistics = false; /** * The flag updateStatisticsAll will tell if we are going to update the * statistics of all indexes or just one index on a table. */ private boolean updateStatisticsAll = false; /** * dropStatistics will indicate that we are here for dropping the * statistics. It could be statistics of just one index or all the * indexes on a given table. */ private boolean dropStatistics; /** * The flag dropStatisticsAll will tell if we are going to drop the * statistics of all indexes or just one index on a table. */ private boolean dropStatisticsAll; /** * If statistic is getting updated/dropped for just one index, then * indexNameForStatistics will tell the name of the specific index * whose statistics need to be updated/dropped. */ private String indexNameForStatistics; public boolean compressTable = false; public boolean sequential = false; //The following three (purge, defragment and truncateEndOfTable) apply for //inplace compress public boolean purge = false; public boolean defragment = false; public boolean truncateEndOfTable = false; public int behavior; // currently for drop column public TableDescriptor baseTable; private int changeType = UNKNOWN_TYPE; private boolean truncateTable = false; // constant action arguments protected SchemaDescriptor schemaDescriptor = null; /** * Initializer for a TRUNCATE TABLE * * @param objectName The name of the table being truncated * @exception StandardException Thrown on error */ public void init(Object objectName) throws StandardException { initAndCheck(objectName); /* For now, this init() only called for truncate table */ truncateTable = true; schemaDescriptor = getSchemaDescriptor(); } /** * Initializer for a AlterTableNode for COMPRESS using temporary tables * rather than inplace compress * * @param objectName The name of the table being altered * @param sequential Whether or not the COMPRESS is SEQUENTIAL * * @exception StandardException Thrown on error */ public void init(Object objectName, Object sequential) throws StandardException { initAndCheck(objectName); this.sequential = (Boolean) sequential; /* For now, this init() only called for compress table */ compressTable = true; schemaDescriptor = getSchemaDescriptor(); } /** * Initializer for a AlterTableNode for INPLACE COMPRESS * * @param objectName The name of the table being altered * @param purge PURGE during INPLACE COMPRESS? * @param defragment DEFRAGMENT during INPLACE COMPRESS? * @param truncateEndOfTable TRUNCATE END during INPLACE COMPRESS? * * @exception StandardException Thrown on error */ public void init(Object objectName, Object purge, Object defragment, Object truncateEndOfTable) throws StandardException { initAndCheck(objectName); this.purge = (Boolean) purge; this.defragment = (Boolean) defragment; this.truncateEndOfTable = (Boolean) truncateEndOfTable; compressTable = true; schemaDescriptor = getSchemaDescriptor(true, false); } /** * Initializer for a AlterTableNode. The parameter values have different * meanings based on what kind of ALTER TABLE is taking place. * * @param objectName The name of the table being altered * @param changeType ADD_TYPE or DROP_TYPE or UPDATE_STATISTICS or * or DROP_STATISTICS * @param param1 For ADD_TYPE or DROP_TYPE, param1 gives the * elements impacted by ALTER TABLE. * For UPDATE_STATISTICS or or DROP_STATISTICS, * param1 is boolean - true means update or drop * the statistics of all the indexes on the table. * False means, update or drop the statistics of * only the index name provided by next parameter. * @param param2 For ADD_TYPE or DROP_TYPE, param2 gives the * new lock granularity, if any * For UPDATE_STATISTICS or DROP_STATISTICS, * param2 can be the name of the specific index * whose statistics will be dropped/updated. This * param is used only if param1 is set to false * @param param3 For DROP_TYPE, param3 can indicate if the drop * column is CASCADE or RESTRICTED. This param is * ignored for all the other changeType. * * @exception StandardException Thrown on error */ public void init( Object objectName, Object changeType, Object param1, Object param2, Object param3 ) throws StandardException { initAndCheck(objectName); int[] ct = (int[]) changeType; this.changeType = ct[0]; switch ( this.changeType ) { case ADD_TYPE: case DROP_TYPE: case MODIFY_TYPE: case LOCKING_TYPE: this.tableElementList = (TableElementList) param1; this.lockGranularity = (Character) param2; int[] bh = (int[]) param3; this.behavior = bh[0]; break; case UPDATE_STATISTICS: this.updateStatisticsAll = (Boolean) param1; this.indexNameForStatistics = (String)param2; updateStatistics = true; break; case DROP_STATISTICS: this.dropStatisticsAll = (Boolean) param1; this.indexNameForStatistics = (String)param2; dropStatistics = true; break; default: throw StandardException.newException(SQLState.NOT_IMPLEMENTED); } schemaDescriptor = getSchemaDescriptor(); } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ public String toString() { if (SanityManager.DEBUG) { return super.toString() + "objectName: " + getObjectName() + "\n" + "lockGranularity: " + lockGranularity + "\n" + "compressTable: " + compressTable + "\n" + "sequential: " + sequential + "\n" + "truncateTable: " + truncateTable + "\n" + "purge: " + purge + "\n" + "defragment: " + defragment + "\n" + "truncateEndOfTable: " + truncateEndOfTable + "\n" + "updateStatistics: " + updateStatistics + "\n" + "updateStatisticsAll: " + updateStatisticsAll + "\n" + "dropStatistics: " + dropStatistics + "\n" + "dropStatisticsAll: " + dropStatisticsAll + "\n" + "indexNameForStatistics: " + indexNameForStatistics + "\n"; } else { return ""; } } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * @param depth The depth to indent the sub-nodes */ public void printSubNodes(int depth) { if (SanityManager.DEBUG) { if (tableElementList != null) { printLabel(depth, "tableElementList: "); tableElementList.treePrint(depth + 1); } } } public String statementToString() { if(truncateTable) return "TRUNCATE TABLE"; else return "ALTER TABLE"; } public int getChangeType() { return changeType; } // We inherit the generate() method from DDLStatementNode. /** * Bind this AlterTableNode. This means doing any static error * checking that can be done before actually creating the table. * For example, verifying that the user is not trying to add a * non-nullable column. * * * @exception StandardException Thrown on error */ public void bindStatement() throws StandardException { DataDictionary dd = getDataDictionary(); int numCheckConstraints = 0; int numReferenceConstraints = 0; int numGenerationClauses = 0; int numBackingIndexes = 0; /* ** Get the table descriptor. Checks the schema ** and the table. */ if(compressTable && (purge || defragment || truncateEndOfTable)) { //We are dealing with inplace compress here and inplace compress is //allowed on system schemas. In order to support inplace compress //on user as well as system tables, we need to use special //getTableDescriptor(boolean) call to get TableDescriptor. This //getTableDescriptor(boolean) allows getting TableDescriptor for //system tables without throwing an exception. baseTable = getTableDescriptor(false); } else baseTable = getTableDescriptor(); //throw an exception if user is attempting to alter a temporary table if (baseTable.getTableType() == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) { throw StandardException.newException(SQLState.LANG_NOT_ALLOWED_FOR_DECLARED_GLOBAL_TEMP_TABLE); } /* Statement is dependent on the TableDescriptor */ getCompilerContext().createDependency(baseTable); //If we are dealing with add column character type, then set that //column's collation type to be the collation type of the schema. //The collation derivation of such a column would be "implicit". if (changeType == ADD_TYPE) {//the action is of type add. if (tableElementList != null) {//check if is is add column for (int i=0; i<tableElementList.size();i++) { if (tableElementList.elementAt(i) instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) tableElementList.elementAt(i); //check if we are dealing with add character column // // For generated columns which omit an explicit // datatype, we have to defer this work until we bind // the generation clause // if ( cdn.hasGenerationClause() && ( cdn.getType() == null ) ) { continue; } if ( cdn.getType() == null ) { throw StandardException.newException ( SQLState.LANG_NEEDS_DATATYPE, cdn.getColumnName() ); } if (cdn.getType().getTypeId().isStringTypeId()) { //we found what we are looking for. Set the //collation type of this column to be the same as //schema descriptor's collation. Set the collation //derivation as implicit cdn.setCollationType(schemaDescriptor.getCollationType()); } } } } } if (tableElementList != null) { tableElementList.validate(this, dd, baseTable); /* Only 1012 columns allowed per table */ if ((tableElementList.countNumberOfColumns() + baseTable.getNumberOfColumns()) > Limits.DB2_MAX_COLUMNS_IN_TABLE) { throw StandardException.newException(SQLState.LANG_TOO_MANY_COLUMNS_IN_TABLE_OR_VIEW, String.valueOf(tableElementList.countNumberOfColumns() + baseTable.getNumberOfColumns()), getRelativeName(), String.valueOf(Limits.DB2_MAX_COLUMNS_IN_TABLE)); } /* Number of backing indexes in the alter table statment */ numBackingIndexes = tableElementList.countConstraints(DataDictionary.PRIMARYKEY_CONSTRAINT) + tableElementList.countConstraints(DataDictionary.FOREIGNKEY_CONSTRAINT) + tableElementList.countConstraints(DataDictionary.UNIQUE_CONSTRAINT); /* Check the validity of all check constraints */ numCheckConstraints = tableElementList.countConstraints( DataDictionary.CHECK_CONSTRAINT); numReferenceConstraints = tableElementList.countConstraints( DataDictionary.FOREIGNKEY_CONSTRAINT); numGenerationClauses = tableElementList.countGenerationClauses(); } //If the sum of backing indexes for constraints in alter table statement and total number of indexes on the table //so far is more than 32767, then we need to throw an exception if ((numBackingIndexes + baseTable.getTotalNumberOfIndexes()) > Limits.DB2_MAX_INDEXES_ON_TABLE) { throw StandardException.newException(SQLState.LANG_TOO_MANY_INDEXES_ON_TABLE, String.valueOf(numBackingIndexes + baseTable.getTotalNumberOfIndexes()), getRelativeName(), String.valueOf(Limits.DB2_MAX_INDEXES_ON_TABLE)); } if ( (numCheckConstraints > 0) || (numGenerationClauses > 0) || (numReferenceConstraints > 0)) { /* In order to check the validity of the check constraints and * generation clauses * we must goober up a FromList containing a single table, * the table being alter, with an RCL containing the existing and * new columns and their types. This will allow us to * bind the constraint definition trees against that * FromList. When doing this, we verify that there are * no nodes which can return non-deterministic results. */ FromList fromList = makeFromList( dd, tableElementList, false ); FormatableBitSet generatedColumns = baseTable.makeColumnMap( baseTable.getGeneratedColumns() ); /* Now that we've finally goobered stuff up, bind and validate * the check constraints and generation clauses. */ if (numGenerationClauses > 0) { tableElementList.bindAndValidateGenerationClauses( schemaDescriptor, fromList, generatedColumns, baseTable ); } if (numCheckConstraints > 0) { tableElementList.bindAndValidateCheckConstraints(fromList); } if ( numReferenceConstraints > 0) { tableElementList.validateForeignKeysOnGenerationClauses( fromList, generatedColumns ); } } // must be done after resolving the datatypes of the generation clauses if (tableElementList != null) { tableElementList.validatePrimaryKeyNullability(); } //Check if we are in alter table to update/drop the statistics. If yes, // then check if we are here to update/drop the statistics of a specific // index. If yes, then verify that the indexname provided is a valid one. if ((updateStatistics && !updateStatisticsAll) || (dropStatistics && !dropStatisticsAll)) { ConglomerateDescriptor cd = null; if (schemaDescriptor.getUUID() != null) cd = dd.getConglomerateDescriptor(indexNameForStatistics, schemaDescriptor, false); if (cd == null) { throw StandardException.newException( SQLState.LANG_INDEX_NOT_FOUND, schemaDescriptor.getSchemaName() + "." + indexNameForStatistics); } } /* Unlike most other DDL, we will make this ALTER TABLE statement * dependent on the table being altered. In general, we try to * avoid this for DDL, but we are already requiring the table to * exist at bind time (not required for create index) and we don't * want the column ids to change out from under us before * execution. */ getCompilerContext().createDependency(baseTable); } /** * Return true if the node references SESSION schema tables (temporary or permanent) * * @return true if references SESSION schema tables, else false * * @exception StandardException Thrown on error */ public boolean referencesSessionSchema() throws StandardException { //If alter table is on a SESSION schema table, then return true. return isSessionSchema(baseTable.getSchemaName()); } /** * Create the Constant information that will drive the guts of Execution. * * @exception StandardException Thrown on failure */ public ConstantAction makeConstantAction() throws StandardException { ColumnInfo[] colInfos = new ColumnInfo[0]; int numConstraints = 0; if (tableElementList != null) { // Generate the ColumnInfo argument for the constant action. Keep the number of constraints. colInfos = new ColumnInfo[tableElementList.countNumberOfColumns()]; numConstraints = tableElementList.genColumnInfos(colInfos); } /* If we've seen a constraint, then build a constraint list */ ConstantAction[] conActions = new ConstantAction[0]; if (numConstraints > 0) { conActions = getGenericConstantActionFactory().createConstraintConstantActionArray(numConstraints); tableElementList.genConstraintActions(false, conActions, getRelativeName(), schemaDescriptor, getDataDictionary()); for (ConstantAction cca : conActions) { if (getGenericConstantActionFactory().primaryKeyConstantActionCheck(cca)) { DataDictionary dd = getDataDictionary(); // Check to see if a constraint of the same type // already exists ConstraintDescriptorList cdl = dd.getConstraintDescriptors(baseTable); if (cdl.getPrimaryKey() != null) { throw StandardException.newException( SQLState.LANG_ADD_PRIMARY_KEY_FAILED1, baseTable.getQualifiedName()); } } } } return getGenericConstantActionFactory().getAlterTableConstantAction(schemaDescriptor, getRelativeName(), baseTable.getUUID(), baseTable.getHeapConglomerateId(), TableDescriptor.BASE_TABLE_TYPE, colInfos, conActions, lockGranularity, compressTable, behavior, sequential, truncateTable, purge, defragment, truncateEndOfTable, updateStatistics, updateStatisticsAll, dropStatistics, dropStatisticsAll, indexNameForStatistics); } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ public void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (tableElementList != null) { tableElementList.accept(v); } } /* * class interface */ }
java/engine/com/splicemachine/db/impl/sql/compile/AlterTableNode.java
/* Derby - Class org.apache.derby.impl.sql.compile.AlterTableNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.splicemachine.db.impl.sql.compile; import com.splicemachine.db.iapi.reference.SQLState; import com.splicemachine.db.iapi.reference.Limits; import com.splicemachine.db.iapi.services.io.FormatableBitSet; import com.splicemachine.db.iapi.services.sanity.SanityManager; import com.splicemachine.db.iapi.error.StandardException; import com.splicemachine.db.iapi.sql.compile.Visitor; import com.splicemachine.db.iapi.sql.dictionary.ConglomerateDescriptor; import com.splicemachine.db.iapi.sql.dictionary.ConstraintDescriptorList; import com.splicemachine.db.iapi.sql.dictionary.DataDictionary; import com.splicemachine.db.iapi.sql.dictionary.SchemaDescriptor; import com.splicemachine.db.iapi.sql.dictionary.TableDescriptor; import com.splicemachine.db.iapi.sql.execute.ConstantAction; import com.splicemachine.db.impl.sql.execute.ColumnInfo; /** * A AlterTableNode represents a DDL statement that alters a table. * It contains the name of the object to be created. * */ public class AlterTableNode extends DDLStatementNode { // The alter table action public TableElementList tableElementList = null; public char lockGranularity; /** * updateStatistics will indicate that we are here for updating the * statistics. It could be statistics of just one index or all the * indexes on a given table. */ private boolean updateStatistics = false; /** * The flag updateStatisticsAll will tell if we are going to update the * statistics of all indexes or just one index on a table. */ private boolean updateStatisticsAll = false; /** * dropStatistics will indicate that we are here for dropping the * statistics. It could be statistics of just one index or all the * indexes on a given table. */ private boolean dropStatistics; /** * The flag dropStatisticsAll will tell if we are going to drop the * statistics of all indexes or just one index on a table. */ private boolean dropStatisticsAll; /** * If statistic is getting updated/dropped for just one index, then * indexNameForStatistics will tell the name of the specific index * whose statistics need to be updated/dropped. */ private String indexNameForStatistics; public boolean compressTable = false; public boolean sequential = false; //The following three (purge, defragment and truncateEndOfTable) apply for //inplace compress public boolean purge = false; public boolean defragment = false; public boolean truncateEndOfTable = false; public int behavior; // currently for drop column public TableDescriptor baseTable; protected int numConstraints; private int changeType = UNKNOWN_TYPE; private boolean truncateTable = false; // constant action arguments protected SchemaDescriptor schemaDescriptor = null; protected ColumnInfo[] colInfos = null; protected ConstantAction[] conActions = null; /** * Initializer for a TRUNCATE TABLE * * @param objectName The name of the table being truncated * @exception StandardException Thrown on error */ public void init(Object objectName) throws StandardException { initAndCheck(objectName); /* For now, this init() only called for truncate table */ truncateTable = true; schemaDescriptor = getSchemaDescriptor(); } /** * Initializer for a AlterTableNode for COMPRESS using temporary tables * rather than inplace compress * * @param objectName The name of the table being altered * @param sequential Whether or not the COMPRESS is SEQUENTIAL * * @exception StandardException Thrown on error */ public void init(Object objectName, Object sequential) throws StandardException { initAndCheck(objectName); this.sequential = ((Boolean) sequential).booleanValue(); /* For now, this init() only called for compress table */ compressTable = true; schemaDescriptor = getSchemaDescriptor(); } /** * Initializer for a AlterTableNode for INPLACE COMPRESS * * @param objectName The name of the table being altered * @param purge PURGE during INPLACE COMPRESS? * @param defragment DEFRAGMENT during INPLACE COMPRESS? * @param truncateEndOfTable TRUNCATE END during INPLACE COMPRESS? * * @exception StandardException Thrown on error */ public void init(Object objectName, Object purge, Object defragment, Object truncateEndOfTable) throws StandardException { initAndCheck(objectName); this.purge = ((Boolean) purge).booleanValue(); this.defragment = ((Boolean) defragment).booleanValue(); this.truncateEndOfTable = ((Boolean) truncateEndOfTable).booleanValue(); compressTable = true; schemaDescriptor = getSchemaDescriptor(true, false); } /** * Initializer for a AlterTableNode. The parameter values have different * meanings based on what kind of ALTER TABLE is taking place. * * @param objectName The name of the table being altered * @param changeType ADD_TYPE or DROP_TYPE or UPDATE_STATISTICS or * or DROP_STATISTICS * @param param1 For ADD_TYPE or DROP_TYPE, param1 gives the * elements impacted by ALTER TABLE. * For UPDATE_STATISTICS or or DROP_STATISTICS, * param1 is boolean - true means update or drop * the statistics of all the indexes on the table. * False means, update or drop the statistics of * only the index name provided by next parameter. * @param param2 For ADD_TYPE or DROP_TYPE, param2 gives the * new lock granularity, if any * For UPDATE_STATISTICS or DROP_STATISTICS, * param2 can be the name of the specific index * whose statistics will be dropped/updated. This * param is used only if param1 is set to false * @param param3 For DROP_TYPE, param3 can indicate if the drop * column is CASCADE or RESTRICTED. This param is * ignored for all the other changeType. * * @exception StandardException Thrown on error */ public void init( Object objectName, Object changeType, Object param1, Object param2, Object param3 ) throws StandardException { initAndCheck(objectName); int[] ct = (int[]) changeType; this.changeType = ct[0]; switch ( this.changeType ) { case ADD_TYPE: case DROP_TYPE: case MODIFY_TYPE: case LOCKING_TYPE: this.tableElementList = (TableElementList) param1; this.lockGranularity = ((Character) param2).charValue(); int[] bh = (int[]) param3; this.behavior = bh[0]; break; case UPDATE_STATISTICS: this.updateStatisticsAll = ((Boolean) param1).booleanValue(); this.indexNameForStatistics = (String)param2; updateStatistics = true; break; case DROP_STATISTICS: this.dropStatisticsAll = ((Boolean) param1).booleanValue(); this.indexNameForStatistics = (String)param2; dropStatistics = true; break; default: throw StandardException.newException(SQLState.NOT_IMPLEMENTED); } schemaDescriptor = getSchemaDescriptor(); } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ public String toString() { if (SanityManager.DEBUG) { return super.toString() + "objectName: " + getObjectName() + "\n" + "lockGranularity: " + lockGranularity + "\n" + "compressTable: " + compressTable + "\n" + "sequential: " + sequential + "\n" + "truncateTable: " + truncateTable + "\n" + "purge: " + purge + "\n" + "defragment: " + defragment + "\n" + "truncateEndOfTable: " + truncateEndOfTable + "\n" + "updateStatistics: " + updateStatistics + "\n" + "updateStatisticsAll: " + updateStatisticsAll + "\n" + "dropStatistics: " + dropStatistics + "\n" + "dropStatisticsAll: " + dropStatisticsAll + "\n" + "indexNameForStatistics: " + indexNameForStatistics + "\n"; } else { return ""; } } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * @param depth The depth to indent the sub-nodes */ public void printSubNodes(int depth) { if (SanityManager.DEBUG) { if (tableElementList != null) { printLabel(depth, "tableElementList: "); tableElementList.treePrint(depth + 1); } } } public String statementToString() { if(truncateTable) return "TRUNCATE TABLE"; else return "ALTER TABLE"; } public int getChangeType() { return changeType; } // We inherit the generate() method from DDLStatementNode. /** * Bind this AlterTableNode. This means doing any static error * checking that can be done before actually creating the table. * For example, verifying that the user is not trying to add a * non-nullable column. * * * @exception StandardException Thrown on error */ public void bindStatement() throws StandardException { DataDictionary dd = getDataDictionary(); int numCheckConstraints = 0; int numReferenceConstraints = 0; int numGenerationClauses = 0; int numBackingIndexes = 0; /* ** Get the table descriptor. Checks the schema ** and the table. */ if(compressTable && (purge || defragment || truncateEndOfTable)) { //We are dealing with inplace compress here and inplace compress is //allowed on system schemas. In order to support inplace compress //on user as well as system tables, we need to use special //getTableDescriptor(boolean) call to get TableDescriptor. This //getTableDescriptor(boolean) allows getting TableDescriptor for //system tables without throwing an exception. baseTable = getTableDescriptor(false); } else baseTable = getTableDescriptor(); //throw an exception if user is attempting to alter a temporary table if (baseTable.getTableType() == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE) { throw StandardException.newException(SQLState.LANG_NOT_ALLOWED_FOR_DECLARED_GLOBAL_TEMP_TABLE); } /* Statement is dependent on the TableDescriptor */ getCompilerContext().createDependency(baseTable); //If we are dealing with add column character type, then set that //column's collation type to be the collation type of the schema. //The collation derivation of such a column would be "implicit". if (changeType == ADD_TYPE) {//the action is of type add. if (tableElementList != null) {//check if is is add column for (int i=0; i<tableElementList.size();i++) { if (tableElementList.elementAt(i) instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) tableElementList.elementAt(i); //check if we are dealing with add character column // // For generated columns which omit an explicit // datatype, we have to defer this work until we bind // the generation clause // if ( cdn.hasGenerationClause() && ( cdn.getType() == null ) ) { continue; } if ( cdn.getType() == null ) { throw StandardException.newException ( SQLState.LANG_NEEDS_DATATYPE, cdn.getColumnName() ); } if (cdn.getType().getTypeId().isStringTypeId()) { //we found what we are looking for. Set the //collation type of this column to be the same as //schema descriptor's collation. Set the collation //derivation as implicit cdn.setCollationType(schemaDescriptor.getCollationType()); } } } } } if (tableElementList != null) { tableElementList.validate(this, dd, baseTable); /* Only 1012 columns allowed per table */ if ((tableElementList.countNumberOfColumns() + baseTable.getNumberOfColumns()) > Limits.DB2_MAX_COLUMNS_IN_TABLE) { throw StandardException.newException(SQLState.LANG_TOO_MANY_COLUMNS_IN_TABLE_OR_VIEW, String.valueOf(tableElementList.countNumberOfColumns() + baseTable.getNumberOfColumns()), getRelativeName(), String.valueOf(Limits.DB2_MAX_COLUMNS_IN_TABLE)); } /* Number of backing indexes in the alter table statment */ numBackingIndexes = tableElementList.countConstraints(DataDictionary.PRIMARYKEY_CONSTRAINT) + tableElementList.countConstraints(DataDictionary.FOREIGNKEY_CONSTRAINT) + tableElementList.countConstraints(DataDictionary.UNIQUE_CONSTRAINT); /* Check the validity of all check constraints */ numCheckConstraints = tableElementList.countConstraints( DataDictionary.CHECK_CONSTRAINT); numReferenceConstraints = tableElementList.countConstraints( DataDictionary.FOREIGNKEY_CONSTRAINT); numGenerationClauses = tableElementList.countGenerationClauses(); } //If the sum of backing indexes for constraints in alter table statement and total number of indexes on the table //so far is more than 32767, then we need to throw an exception if ((numBackingIndexes + baseTable.getTotalNumberOfIndexes()) > Limits.DB2_MAX_INDEXES_ON_TABLE) { throw StandardException.newException(SQLState.LANG_TOO_MANY_INDEXES_ON_TABLE, String.valueOf(numBackingIndexes + baseTable.getTotalNumberOfIndexes()), getRelativeName(), String.valueOf(Limits.DB2_MAX_INDEXES_ON_TABLE)); } if ( (numCheckConstraints > 0) || (numGenerationClauses > 0) || (numReferenceConstraints > 0)) { /* In order to check the validity of the check constraints and * generation clauses * we must goober up a FromList containing a single table, * the table being alter, with an RCL containing the existing and * new columns and their types. This will allow us to * bind the constraint definition trees against that * FromList. When doing this, we verify that there are * no nodes which can return non-deterministic results. */ FromList fromList = makeFromList( dd, tableElementList, false ); FormatableBitSet generatedColumns = baseTable.makeColumnMap( baseTable.getGeneratedColumns() ); /* Now that we've finally goobered stuff up, bind and validate * the check constraints and generation clauses. */ if (numGenerationClauses > 0) { tableElementList.bindAndValidateGenerationClauses( schemaDescriptor, fromList, generatedColumns, baseTable ); } if (numCheckConstraints > 0) { tableElementList.bindAndValidateCheckConstraints(fromList); } if ( numReferenceConstraints > 0) { tableElementList.validateForeignKeysOnGenerationClauses( fromList, generatedColumns ); } } // must be done after resolving the datatypes of the generation clauses if (tableElementList != null) { tableElementList.validatePrimaryKeyNullability(); } //Check if we are in alter table to update/drop the statistics. If yes, // then check if we are here to update/drop the statistics of a specific // index. If yes, then verify that the indexname provided is a valid one. if ((updateStatistics && !updateStatisticsAll) || (dropStatistics && !dropStatisticsAll)) { ConglomerateDescriptor cd = null; if (schemaDescriptor.getUUID() != null) cd = dd.getConglomerateDescriptor(indexNameForStatistics, schemaDescriptor, false); if (cd == null) { throw StandardException.newException( SQLState.LANG_INDEX_NOT_FOUND, schemaDescriptor.getSchemaName() + "." + indexNameForStatistics); } } /* Unlike most other DDL, we will make this ALTER TABLE statement * dependent on the table being altered. In general, we try to * avoid this for DDL, but we are already requiring the table to * exist at bind time (not required for create index) and we don't * want the column ids to change out from under us before * execution. */ getCompilerContext().createDependency(baseTable); } /** * Return true if the node references SESSION schema tables (temporary or permanent) * * @return true if references SESSION schema tables, else false * * @exception StandardException Thrown on error */ public boolean referencesSessionSchema() throws StandardException { //If alter table is on a SESSION schema table, then return true. return isSessionSchema(baseTable.getSchemaName()); } /** * Create the Constant information that will drive the guts of Execution. * * @exception StandardException Thrown on failure */ public ConstantAction makeConstantAction() throws StandardException { prepConstantAction(); return getGenericConstantActionFactory().getAlterTableConstantAction(schemaDescriptor, getRelativeName(), baseTable.getUUID(), baseTable.getHeapConglomerateId(), TableDescriptor.BASE_TABLE_TYPE, colInfos, conActions, lockGranularity, compressTable, behavior, sequential, truncateTable, purge, defragment, truncateEndOfTable, updateStatistics, updateStatisticsAll, dropStatistics, dropStatisticsAll, indexNameForStatistics); } /** * Generate arguments to constant action. Called by makeConstantAction() in this class and in * our subclass RepAlterTableNode. * * * @exception StandardException Thrown on failure */ private void prepConstantAction() throws StandardException { if (tableElementList != null) { genColumnInfo(); } /* If we've seen a constraint, then build a constraint list */ if (numConstraints > 0) { conActions = getGenericConstantActionFactory().createConstraintConstantActionArray(numConstraints); tableElementList.genConstraintActions(false, conActions, getRelativeName(), schemaDescriptor, getDataDictionary()); for (int conIndex = 0; conIndex < conActions.length; conIndex++) { ConstantAction cca = conActions[conIndex]; if (getGenericConstantActionFactory().primaryKeyConstantActionCheck(cca)) { DataDictionary dd = getDataDictionary(); // Check to see if a constraint of the same type // already exists ConstraintDescriptorList cdl = dd.getConstraintDescriptors(baseTable); if (cdl.getPrimaryKey() != null) { throw StandardException.newException( SQLState.LANG_ADD_PRIMARY_KEY_FAILED1, baseTable.getQualifiedName()); } } } } } /** * Generate the ColumnInfo argument for the constant action. Return the number of constraints. */ public void genColumnInfo() throws StandardException { // for each column, stuff system.column colInfos = new ColumnInfo[tableElementList.countNumberOfColumns()]; numConstraints = tableElementList.genColumnInfos(colInfos); } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ public void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (tableElementList != null) { tableElementList.accept(v); } } /* * class interface */ }
DB-1755: add constraint. Clean up multi-method calling.
java/engine/com/splicemachine/db/impl/sql/compile/AlterTableNode.java
DB-1755: add constraint. Clean up multi-method calling.
<ide><path>ava/engine/com/splicemachine/db/impl/sql/compile/AlterTableNode.java <ide> <ide> public TableDescriptor baseTable; <ide> <del> protected int numConstraints; <del> <ide> private int changeType = UNKNOWN_TYPE; <ide> <ide> private boolean truncateTable = false; <ide> // constant action arguments <ide> <ide> protected SchemaDescriptor schemaDescriptor = null; <del> protected ColumnInfo[] colInfos = null; <del> protected ConstantAction[] conActions = null; <ide> <ide> <ide> /** <ide> { <ide> initAndCheck(objectName); <ide> <del> this.sequential = ((Boolean) sequential).booleanValue(); <add> this.sequential = (Boolean) sequential; <ide> /* For now, this init() only called for compress table */ <ide> compressTable = true; <ide> <ide> { <ide> initAndCheck(objectName); <ide> <del> this.purge = ((Boolean) purge).booleanValue(); <del> this.defragment = ((Boolean) defragment).booleanValue(); <del> this.truncateEndOfTable = ((Boolean) truncateEndOfTable).booleanValue(); <add> this.purge = (Boolean) purge; <add> this.defragment = (Boolean) defragment; <add> this.truncateEndOfTable = (Boolean) truncateEndOfTable; <ide> compressTable = true; <ide> schemaDescriptor = getSchemaDescriptor(true, false); <ide> } <ide> case MODIFY_TYPE: <ide> case LOCKING_TYPE: <ide> this.tableElementList = (TableElementList) param1; <del> this.lockGranularity = ((Character) param2).charValue(); <add> this.lockGranularity = (Character) param2; <ide> int[] bh = (int[]) param3; <ide> this.behavior = bh[0]; <ide> break; <ide> <ide> case UPDATE_STATISTICS: <del> this.updateStatisticsAll = ((Boolean) param1).booleanValue(); <add> this.updateStatisticsAll = (Boolean) param1; <ide> this.indexNameForStatistics = (String)param2; <ide> updateStatistics = true; <ide> break; <ide> <ide> case DROP_STATISTICS: <del> this.dropStatisticsAll = ((Boolean) param1).booleanValue(); <add> this.dropStatisticsAll = (Boolean) param1; <ide> this.indexNameForStatistics = (String)param2; <ide> dropStatistics = true; <ide> break; <ide> */ <ide> public ConstantAction makeConstantAction() throws StandardException <ide> { <del> prepConstantAction(); <add> ColumnInfo[] colInfos = new ColumnInfo[0]; <add> int numConstraints = 0; <add> <add> if (tableElementList != null) { <add> // Generate the ColumnInfo argument for the constant action. Keep the number of constraints. <add> colInfos = new ColumnInfo[tableElementList.countNumberOfColumns()]; <add> <add> numConstraints = tableElementList.genColumnInfos(colInfos); <add> } <add> <add> /* If we've seen a constraint, then build a constraint list */ <add> ConstantAction[] conActions = new ConstantAction[0]; <add> if (numConstraints > 0) { <add> conActions = getGenericConstantActionFactory().createConstraintConstantActionArray(numConstraints); <add> <add> tableElementList.genConstraintActions(false, conActions, getRelativeName(), schemaDescriptor, <add> getDataDictionary()); <add> <add> for (ConstantAction cca : conActions) { <add> if (getGenericConstantActionFactory().primaryKeyConstantActionCheck(cca)) { <add> DataDictionary dd = getDataDictionary(); <add> // Check to see if a constraint of the same type <add> // already exists <add> ConstraintDescriptorList cdl = <add> dd.getConstraintDescriptors(baseTable); <add> <add> if (cdl.getPrimaryKey() != null) { <add> throw StandardException.newException( <add> SQLState.LANG_ADD_PRIMARY_KEY_FAILED1, <add> baseTable.getQualifiedName()); <add> } <add> } <add> } <add> } <ide> <ide> return getGenericConstantActionFactory().getAlterTableConstantAction(schemaDescriptor, <ide> getRelativeName(), <ide> } <ide> <ide> /** <del> * Generate arguments to constant action. Called by makeConstantAction() in this class and in <del> * our subclass RepAlterTableNode. <del> * <del> * <del> * @exception StandardException Thrown on failure <del> */ <del> private void prepConstantAction() throws StandardException <del> { <del> if (tableElementList != null) <del> { <del> genColumnInfo(); <del> } <del> <del> /* If we've seen a constraint, then build a constraint list */ <del> <del> if (numConstraints > 0) <del> { <del> conActions = getGenericConstantActionFactory().createConstraintConstantActionArray(numConstraints); <del> <del> tableElementList.genConstraintActions(false, conActions, getRelativeName(), schemaDescriptor, <del> getDataDictionary()); <del> <del> for (int conIndex = 0; conIndex < conActions.length; conIndex++) { <del> ConstantAction cca = conActions[conIndex]; <del> if (getGenericConstantActionFactory().primaryKeyConstantActionCheck(cca)) { <del> DataDictionary dd = getDataDictionary(); <del> // Check to see if a constraint of the same type <del> // already exists <del> ConstraintDescriptorList cdl = <del> dd.getConstraintDescriptors(baseTable); <del> <del> if (cdl.getPrimaryKey() != null) <del> { <del> throw StandardException.newException( <del> SQLState.LANG_ADD_PRIMARY_KEY_FAILED1, <del> baseTable.getQualifiedName()); <del> } <del> } <del> } <del> } <del> } <del> <del> /** <del> * Generate the ColumnInfo argument for the constant action. Return the number of constraints. <del> */ <del> public void genColumnInfo() <del> throws StandardException <del> { <del> // for each column, stuff system.column <del> colInfos = new ColumnInfo[tableElementList.countNumberOfColumns()]; <del> <del> numConstraints = tableElementList.genColumnInfos(colInfos); <del> } <del> <del> <del> /** <ide> * Accept the visitor for all visitable children of this node. <ide> * <ide> * @param v the visitor
Java
apache-2.0
7aa4a0ec6c4650bb7681466feebb1cc3121fbfee
0
smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto,smartnews/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.iceberg; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.units.DataSize; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; import io.trino.operator.OperatorStats; import io.trino.plugin.hive.HdfsEnvironment; import io.trino.spi.QueryId; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.Constraint; import io.trino.spi.connector.ConstraintApplicationResult; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.TupleDomain; import io.trino.testing.BaseConnectorTest; import io.trino.testing.DataProviders; import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.QueryRunner; import io.trino.testing.ResultWithQueryId; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import io.trino.tpch.TpchTable; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.hadoop.fs.FileSystem; import org.intellij.lang.annotations.Language; import org.testng.SkipException; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.MoreCollectors.onlyElement; import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE; import static io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS; import static io.trino.SystemSessionProperties.SCALE_WRITERS; import static io.trino.SystemSessionProperties.TASK_WRITER_COUNT; import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext; import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.trino.plugin.iceberg.IcebergFileFormat.ORC; import static io.trino.plugin.iceberg.IcebergFileFormat.PARQUET; import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG; import static io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD; import static io.trino.spi.predicate.Domain.multipleValues; import static io.trino.spi.predicate.Domain.singleValue; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.sql.planner.OptimizerConfig.JoinDistributionType.BROADCAST; import static io.trino.testing.MaterializedResult.resultBuilder; import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder; import static io.trino.testing.TestingSession.testSessionBuilder; import static io.trino.testing.assertions.Assert.assertEquals; import static io.trino.testing.assertions.Assert.assertEventually; import static io.trino.testing.sql.TestTable.randomTableSuffix; import static io.trino.tpch.TpchTable.LINE_ITEM; import static io.trino.transaction.TransactionBuilder.transaction; import static java.lang.String.format; import static java.lang.String.join; import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toUnmodifiableList; import static java.util.stream.IntStream.range; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertTrue; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest { private static final Pattern WITH_CLAUSE_EXTRACTOR = Pattern.compile(".*(WITH\\s*\\([^)]*\\))\\s*$", Pattern.DOTALL); private final IcebergFileFormat format; protected BaseIcebergConnectorTest(IcebergFileFormat format) { this.format = requireNonNull(format, "format is null"); } @Override protected QueryRunner createQueryRunner() throws Exception { return IcebergQueryRunner.builder() .setIcebergProperties(Map.of("iceberg.file-format", format.name())) .setInitialTables(ImmutableList.<TpchTable<?>>builder() .addAll(REQUIRED_TPCH_TABLES) .add(LINE_ITEM) .build()) .build(); } @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { switch (connectorBehavior) { case SUPPORTS_TOPN_PUSHDOWN: return false; case SUPPORTS_CREATE_VIEW: return true; case SUPPORTS_CREATE_MATERIALIZED_VIEW: case SUPPORTS_RENAME_MATERIALIZED_VIEW: return true; case SUPPORTS_RENAME_MATERIALIZED_VIEW_ACROSS_SCHEMAS: return false; case SUPPORTS_DELETE: case SUPPORTS_UPDATE: return true; default: return super.hasBehavior(connectorBehavior); } } @Override protected void verifyVersionedQueryFailurePermissible(Exception e) { assertThat(e) .hasMessageMatching("Version pointer type is not supported: .*|" + "Unsupported type for temporal table version: .*|" + "Unsupported type for table version: .*|" + "No version history table tpch.nation at or before .*|" + "Iceberg snapshot ID does not exists: .*"); } @Override protected void verifyConcurrentUpdateFailurePermissible(Exception e) { assertThat(e).hasMessageContaining("Failed to commit Iceberg update to table"); } @Override protected void verifyConcurrentAddColumnFailurePermissible(Exception e) { assertThat(e) .hasMessageContaining("Cannot update Iceberg table: supplied previous location does not match current location"); } @Test public void testDeleteOnV1Table() { try (TestTable table = new TestTable(getQueryRunner()::execute, "test_delete_", "WITH (format_version = 1) AS SELECT * FROM orders")) { assertQueryFails("DELETE FROM " + table.getName() + " WHERE custkey <= 100", "Iceberg table updates require at least format version 2"); } } @Override public void testCharVarcharComparison() { assertThatThrownBy(super::testCharVarcharComparison) .hasMessage("Type not supported for Iceberg: char(3)"); } @Test @Override public void testShowCreateSchema() { assertThat(computeActual("SHOW CREATE SCHEMA tpch").getOnlyValue().toString()) .matches("CREATE SCHEMA iceberg.tpch\n" + "AUTHORIZATION USER user\n" + "WITH \\(\n" + "\\s+location = '.*/iceberg_data/tpch'\n" + "\\)"); } @Override @Test public void testDescribeTable() { MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("orderkey", "bigint", "", "") .row("custkey", "bigint", "", "") .row("orderstatus", "varchar", "", "") .row("totalprice", "double", "", "") .row("orderdate", "date", "", "") .row("orderpriority", "varchar", "", "") .row("clerk", "varchar", "", "") .row("shippriority", "integer", "", "") .row("comment", "varchar", "", "") .build(); MaterializedResult actualColumns = computeActual("DESCRIBE orders"); assertEquals(actualColumns, expectedColumns); } @Override @Test public void testShowCreateTable() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) .isEqualTo("CREATE TABLE iceberg.tpch.orders (\n" + " orderkey bigint,\n" + " custkey bigint,\n" + " orderstatus varchar,\n" + " totalprice double,\n" + " orderdate date,\n" + " orderpriority varchar,\n" + " clerk varchar,\n" + " shippriority integer,\n" + " comment varchar\n" + ")\n" + "WITH (\n" + " format = '" + format.name() + "',\n" + " format_version = 2,\n" + " location = '" + tempDir + "/iceberg_data/tpch/orders'\n" + ")"); } @Override protected void checkInformationSchemaViewsForMaterializedView(String schemaName, String viewName) { // TODO should probably return materialized view, as it's also a view -- to be double checked assertThatThrownBy(() -> super.checkInformationSchemaViewsForMaterializedView(schemaName, viewName)) .hasMessageFindingMatch("(?s)Expecting.*to contain:.*\\Q[(" + viewName + ")]"); } @Test public void testDecimal() { testDecimalWithPrecisionAndScale(1, 0); testDecimalWithPrecisionAndScale(8, 6); testDecimalWithPrecisionAndScale(9, 8); testDecimalWithPrecisionAndScale(10, 8); testDecimalWithPrecisionAndScale(18, 1); testDecimalWithPrecisionAndScale(18, 8); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(17, 16); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(24, 10); testDecimalWithPrecisionAndScale(30, 10); testDecimalWithPrecisionAndScale(37, 26); testDecimalWithPrecisionAndScale(38, 37); testDecimalWithPrecisionAndScale(38, 17); testDecimalWithPrecisionAndScale(38, 37); } private void testDecimalWithPrecisionAndScale(int precision, int scale) { checkArgument(precision >= 1 && precision <= 38, "Decimal precision (%s) must be between 1 and 38 inclusive", precision); checkArgument(scale < precision && scale >= 0, "Decimal scale (%s) must be less than the precision (%s) and non-negative", scale, precision); String decimalType = format("DECIMAL(%d,%d)", precision, scale); String beforeTheDecimalPoint = "12345678901234567890123456789012345678".substring(0, precision - scale); String afterTheDecimalPoint = "09876543210987654321098765432109876543".substring(0, scale); String decimalValue = format("%s.%s", beforeTheDecimalPoint, afterTheDecimalPoint); assertUpdate(format("CREATE TABLE test_iceberg_decimal (x %s)", decimalType)); assertUpdate(format("INSERT INTO test_iceberg_decimal (x) VALUES (CAST('%s' AS %s))", decimalValue, decimalType), 1); assertQuery("SELECT * FROM test_iceberg_decimal", format("SELECT CAST('%s' AS %s)", decimalValue, decimalType)); dropTable("test_iceberg_decimal"); } @Test public void testTime() { testSelectOrPartitionedByTime(false); } @Test public void testPartitionedByTime() { testSelectOrPartitionedByTime(true); } private void testSelectOrPartitionedByTime(boolean partitioned) { String tableName = format("test_%s_by_time", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH(partitioning = ARRAY['x'])" : ""; assertUpdate(format("CREATE TABLE %s (x TIME(6), y BIGINT) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (TIME '10:12:34', 12345)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertUpdate(format("INSERT INTO %s VALUES (TIME '9:00:00', 67890)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 2"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '10:12:34'", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '9:00:00'", tableName), "SELECT CAST('9:00:00' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('9:00:00' AS TIME)"); dropTable(tableName); } @Test public void testPartitionByTimestamp() { testSelectOrPartitionedByTimestamp(true); } @Test public void testSelectByTimestamp() { testSelectOrPartitionedByTimestamp(false); } private void testSelectOrPartitionedByTimestamp(boolean partitioned) { String tableName = format("test_%s_by_timestamp", partitioned ? "partitioned" : "selected"); assertUpdate(format("CREATE TABLE %s (_timestamp timestamp(6)) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamp'])" : "")); @Language("SQL") String select1 = "SELECT TIMESTAMP '2017-05-01 10:12:34' _timestamp"; @Language("SQL") String select2 = "SELECT TIMESTAMP '2017-10-01 10:12:34' _timestamp"; @Language("SQL") String select3 = "SELECT TIMESTAMP '2018-05-01 10:12:34' _timestamp"; assertUpdate(format("INSERT INTO %s %s", tableName, select1), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select2), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select3), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-05-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp < TIMESTAMP '2017-06-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-10-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2017-06-01 10:12:34' AND _timestamp < TIMESTAMP '2018-05-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2018-05-01 10:12:34'", tableName), select3); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2018-01-01 10:12:34'", tableName), select3); dropTable(tableName); } @Test public void testPartitionByTimestampWithTimeZone() { testSelectOrPartitionedByTimestampWithTimeZone(true); } @Test public void testSelectByTimestampWithTimeZone() { testSelectOrPartitionedByTimestampWithTimeZone(false); } private void testSelectOrPartitionedByTimestampWithTimeZone(boolean partitioned) { String tableName = format("test_%s_by_timestamptz", partitioned ? "partitioned" : "selected"); assertUpdate(format( "CREATE TABLE %s (_timestamptz timestamp(6) with time zone) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamptz'])" : "")); String instant1Utc = "TIMESTAMP '2021-10-31 00:30:00.005000 UTC'"; String instant1La = "TIMESTAMP '2021-10-30 17:30:00.005000 America/Los_Angeles'"; String instant2Utc = "TIMESTAMP '2021-10-31 00:30:00.006000 UTC'"; String instant2La = "TIMESTAMP '2021-10-30 17:30:00.006000 America/Los_Angeles'"; String instant3Utc = "TIMESTAMP '2021-10-31 00:30:00.007000 UTC'"; String instant3La = "TIMESTAMP '2021-10-30 17:30:00.007000 America/Los_Angeles'"; assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant1Utc), 1); assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant2La /* non-UTC for this one */), 1); assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant3Utc), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); // = assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant1Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant1La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant2Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant2La))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant3Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant3La))) .matches("VALUES " + instant3Utc); // < assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant2Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant2La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant3Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant3La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // <= assertThat(query(format("SELECT * from %s WHERE _timestamptz <= %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz <= %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // > assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant2Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant2La))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); // >= assertThat(query(format("SELECT * from %s WHERE _timestamptz >= %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz >= %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); // open range assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s AND _timestamptz < %s", tableName, instant1Utc, instant3Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s AND _timestamptz < %s", tableName, instant1La, instant3La))) .matches("VALUES " + instant2Utc); // closed range assertThat(query(format("SELECT * from %s WHERE _timestamptz BETWEEN %s AND %s", tableName, instant1Utc, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz BETWEEN %s AND %s", tableName, instant1La, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // != assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); // IS DISTINCT FROM assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); // IS NOT DISTINCT FROM assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant1Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant1La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant2Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant2La))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant3Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant3La))) .matches("VALUES " + instant3Utc); if (partitioned) { assertThat(query(format("SELECT record_count, file_count, partition._timestamptz FROM \"%s$partitions\"", tableName))) .matches(format("VALUES (BIGINT '1', BIGINT '1', %s), (BIGINT '1', BIGINT '1', %s), (BIGINT '1', BIGINT '1', %s)", instant1Utc, instant2Utc, instant3Utc)); } else { assertThat(query(format("SELECT record_count, file_count, data._timestamptz FROM \"%s$partitions\"", tableName))) .matches(format( "VALUES (BIGINT '3', BIGINT '3', CAST(ROW(%s, %s, 0, NULL) AS row(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)))", instant1Utc, format == ORC ? "TIMESTAMP '2021-10-31 00:30:00.007999 UTC'" : instant3Utc)); } // show stats assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + "('_timestamptz', NULL, NULL, 0e0, NULL, '2021-10-31 00:30:00.005 UTC', '2021-10-31 00:30:00.007 UTC'), " + "(NULL, NULL, NULL, NULL, 3e0, NULL, NULL)"); if (partitioned) { // show stats with predicate assertThat(query("SHOW STATS FOR (SELECT * FROM " + tableName + " WHERE _timestamptz = " + instant1La + ")")) .skippingTypesCheck() .matches("VALUES " + // TODO (https://github.com/trinodb/trino/issues/9716) the min/max values are off by 1 millisecond "('_timestamptz', NULL, NULL, 0e0, NULL, '2021-10-31 00:30:00.005 UTC', '2021-10-31 00:30:00.005 UTC'), " + "(NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); } else { // show stats with predicate assertThat(query("SHOW STATS FOR (SELECT * FROM " + tableName + " WHERE _timestamptz = " + instant1La + ")")) .skippingTypesCheck() .matches("VALUES " + "('_timestamptz', NULL, NULL, NULL, NULL, NULL, NULL), " + "(NULL, NULL, NULL, NULL, NULL, NULL, NULL)"); } assertUpdate("DROP TABLE " + tableName); } @Test public void testUuid() { testSelectOrPartitionedByUuid(false); } @Test public void testPartitionedByUuid() { testSelectOrPartitionedByUuid(true); } private void testSelectOrPartitionedByUuid(boolean partitioned) { String tableName = format("test_%s_by_uuid", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH (partitioning = ARRAY['x'])" : ""; assertUpdate(format("DROP TABLE IF EXISTS %s", tableName)); assertUpdate(format("CREATE TABLE %s (x uuid, y bigint) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (UUID '406caec7-68b9-4778-81b2-a12ece70c8b1', 12345)", tableName), 1); assertQuery(format("SELECT count(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); assertUpdate(format("INSERT INTO %s VALUES (UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7', 67890)", tableName), 1); assertUpdate(format("INSERT INTO %s VALUES (NULL, 7531)", tableName), 1); assertQuery(format("SELECT count(*) FROM %s", tableName), "SELECT 3"); assertQuery(format("SELECT * FROM %s WHERE x = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345"); assertQuery(format("SELECT * FROM %s WHERE x = UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7'", tableName), "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890"); assertQuery( format("SELECT * FROM %s WHERE x >= UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", tableName), (format == ORC && partitioned || format == PARQUET) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet, or partitioned ORC, with UUID filter yields incorrect results ? "VALUES (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)" : "VALUES (CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890), (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)"); assertQuery( format("SELECT * FROM %s WHERE x >= UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7'", tableName), partitioned ? "VALUES (CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890), (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)" : "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890"); assertQuery(format("SELECT * FROM %s WHERE x IS NULL", tableName), "SELECT NULL, 7531"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID)"); assertQuery(format("SELECT x FROM %s WHERE y = 7531", tableName), "SELECT NULL"); assertUpdate(format("INSERT INTO %s VALUES (UUID '206caec7-68b9-4778-81b2-a12ece70c8b1', 313), (UUID '906caec7-68b9-4778-81b2-a12ece70c8b1', 314)", tableName), 2); assertThat(query("SELECT y FROM " + tableName + " WHERE x >= UUID '206caec7-68b9-4778-81b2-a12ece70c8b1'")) .matches( (partitioned) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet with UUID filter yields incorrect results ? "VALUES BIGINT '12345', 313" : ((format == PARQUET) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet with UUID filter yields incorrect results ? "VALUES BIGINT '12345'" // this one is correct : "VALUES BIGINT '12345', 67890, 313, 314")); assertUpdate("DROP TABLE " + tableName); } @Test public void testNestedUuid() { assertUpdate("CREATE TABLE test_nested_uuid (int_t int, row_t row(uuid_t uuid, int_t int), map_t map(int, uuid), array_t array(uuid))"); String uuid = "UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'"; String value = format("VALUES (2, row(%1$s, 1), map(array[1], array[%1$s]), array[%1$s, %1$s])", uuid); assertUpdate("INSERT INTO test_nested_uuid " + value, 1); assertThat(query("SELECT row_t.int_t, row_t.uuid_t FROM test_nested_uuid")) .matches("VALUES (1, UUID '406caec7-68b9-4778-81b2-a12ece70c8b1')"); assertThat(query("SELECT map_t[1] FROM test_nested_uuid")) .matches("VALUES UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'"); assertThat(query("SELECT array_t FROM test_nested_uuid")) .matches("VALUES ARRAY[UUID '406caec7-68b9-4778-81b2-a12ece70c8b1', UUID '406caec7-68b9-4778-81b2-a12ece70c8b1']"); assertQuery("SELECT row_t.int_t FROM test_nested_uuid WHERE row_t.uuid_t = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", "VALUES 1"); assertQuery("SELECT int_t FROM test_nested_uuid WHERE row_t.uuid_t = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", "VALUES 2"); } @Test public void testCreatePartitionedTable() { assertUpdate("" + "CREATE TABLE test_partitioned_table (" + " a_boolean boolean, " + " an_integer integer, " + " a_bigint bigint, " + " a_real real, " + " a_double double, " + " a_short_decimal decimal(5,2), " + " a_long_decimal decimal(38,20), " + " a_varchar varchar, " + " a_varbinary varbinary, " + " a_date date, " + " a_time time(6), " + " a_timestamp timestamp(6), " + " a_timestamptz timestamp(6) with time zone, " + " a_uuid uuid, " + " a_row row(id integer , vc varchar), " + " an_array array(varchar), " + " a_map map(integer, varchar) " + ") " + "WITH (" + "partitioning = ARRAY[" + " 'a_boolean', " + " 'an_integer', " + " 'a_bigint', " + " 'a_real', " + " 'a_double', " + " 'a_short_decimal', " + " 'a_long_decimal', " + " 'a_varchar', " + " 'a_varbinary', " + " 'a_date', " + " 'a_time', " + " 'a_timestamp', " + " 'a_timestamptz', " + " 'a_uuid' " + // Note: partitioning on non-primitive columns is not allowed in Iceberg " ]" + ")"); assertQueryReturnsEmptyResult("SELECT * FROM test_partitioned_table"); String values = "VALUES (" + "true, " + "1, " + "BIGINT '1', " + "REAL '1.0', " + "DOUBLE '1.0', " + "CAST(1.0 AS decimal(5,2)), " + "CAST(11.0 AS decimal(38,20)), " + "VARCHAR 'onefsadfdsf', " + "X'000102f0feff', " + "DATE '2021-07-24'," + "TIME '02:43:57.987654', " + "TIMESTAMP '2021-07-24 03:43:57.987654'," + "TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + "UUID '20050910-1330-11e9-ffff-2a86e4085a59', " + "CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)), " + "ARRAY[VARCHAR 'uno', 'dos', 'tres'], " + "map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one'])) "; String nullValues = nCopies(17, "NULL").stream() .collect(joining(", ", "VALUES (", ")")); assertUpdate("INSERT INTO test_partitioned_table " + values, 1); assertUpdate("INSERT INTO test_partitioned_table " + nullValues, 1); // SELECT assertThat(query("SELECT * FROM test_partitioned_table")) .matches(values + " UNION ALL " + nullValues); // SELECT with predicates assertThat(query("SELECT * FROM test_partitioned_table WHERE " + " a_boolean = true " + "AND an_integer = 1 " + "AND a_bigint = BIGINT '1' " + "AND a_real = REAL '1.0' " + "AND a_double = DOUBLE '1.0' " + "AND a_short_decimal = CAST(1.0 AS decimal(5,2)) " + "AND a_long_decimal = CAST(11.0 AS decimal(38,20)) " + "AND a_varchar = VARCHAR 'onefsadfdsf' " + "AND a_varbinary = X'000102f0feff' " + "AND a_date = DATE '2021-07-24' " + "AND a_time = TIME '02:43:57.987654' " + "AND a_timestamp = TIMESTAMP '2021-07-24 03:43:57.987654' " + "AND a_timestamptz = TIMESTAMP '2021-07-24 04:43:57.987654 UTC' " + "AND a_uuid = UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + "AND a_row = CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)) " + "AND an_array = ARRAY[VARCHAR 'uno', 'dos', 'tres'] " + "AND a_map = map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one']) " + "")) .matches(values); assertThat(query("SELECT * FROM test_partitioned_table WHERE " + " a_boolean IS NULL " + "AND an_integer IS NULL " + "AND a_bigint IS NULL " + "AND a_real IS NULL " + "AND a_double IS NULL " + "AND a_short_decimal IS NULL " + "AND a_long_decimal IS NULL " + "AND a_varchar IS NULL " + "AND a_varbinary IS NULL " + "AND a_date IS NULL " + "AND a_time IS NULL " + "AND a_timestamp IS NULL " + "AND a_timestamptz IS NULL " + "AND a_uuid IS NULL " + "AND a_row IS NULL " + "AND an_array IS NULL " + "AND a_map IS NULL " + "")) .skippingTypesCheck() .matches(nullValues); // SHOW STATS if (format == ORC) { assertQuery("SHOW STATS FOR test_partitioned_table", "VALUES " + " ('a_boolean', NULL, NULL, 0.5, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5, NULL, '11.0', '11.0'), " + " ('a_varchar', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_varbinary', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5, NULL, '2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'), " + " ('a_timestamptz', NULL, NULL, 0.5, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('an_array', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_map', NULL, NULL, 0.5, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); } else { assertThat(query("SHOW STATS FOR test_partitioned_table")) .skippingTypesCheck() .matches("VALUES " + " ('a_boolean', NULL, NULL, 0.5e0, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5e0, NULL, '11.0', '11.0'), " + " ('a_varchar', 87e0, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_varbinary', 82e0, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5e0, NULL, '2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'), " + " ('a_timestamptz', NULL, NULL, 0.5e0, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, NULL, NULL, NULL, NULL), " + " ('an_array', NULL, NULL, NULL, NULL, NULL, NULL), " + " ('a_map', NULL, NULL, NULL, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); } // $partitions String schema = getSession().getSchema().orElseThrow(); assertThat(query("SELECT column_name FROM information_schema.columns WHERE table_schema = '" + schema + "' AND table_name = 'test_partitioned_table$partitions' ")) .skippingTypesCheck() .matches("VALUES 'partition', 'record_count', 'file_count', 'total_size'"); assertThat(query("SELECT " + " record_count," + " file_count, " + " partition.a_boolean, " + " partition.an_integer, " + " partition.a_bigint, " + " partition.a_real, " + " partition.a_double, " + " partition.a_short_decimal, " + " partition.a_long_decimal, " + " partition.a_varchar, " + " partition.a_varbinary, " + " partition.a_date, " + " partition.a_time, " + " partition.a_timestamp, " + " partition.a_timestamptz, " + " partition.a_uuid " + // Note: partitioning on non-primitive columns is not allowed in Iceberg " FROM \"test_partitioned_table$partitions\" ")) .matches("" + "VALUES (" + " BIGINT '1', " + " BIGINT '1', " + " true, " + " 1, " + " BIGINT '1', " + " REAL '1.0', " + " DOUBLE '1.0', " + " CAST(1.0 AS decimal(5,2)), " + " CAST(11.0 AS decimal(38,20)), " + " VARCHAR 'onefsadfdsf', " + " X'000102f0feff', " + " DATE '2021-07-24'," + " TIME '02:43:57.987654', " + " TIMESTAMP '2021-07-24 03:43:57.987654'," + " TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + " UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + ")" + "UNION ALL " + "VALUES (" + " BIGINT '1', " + " BIGINT '1', " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL " + ")"); assertUpdate("DROP TABLE test_partitioned_table"); } @Test public void testCreatePartitionedTableWithNestedTypes() { assertUpdate("" + "CREATE TABLE test_partitioned_table_nested_type (" + " _string VARCHAR" + ", _struct ROW(_field1 INT, _field2 VARCHAR)" + ", _date DATE" + ") " + "WITH (" + " partitioning = ARRAY['_date']" + ")"); dropTable("test_partitioned_table_nested_type"); } @Test public void testCreatePartitionedTableAs() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); assertUpdate( "CREATE TABLE test_create_partitioned_table_as " + "WITH (" + "format_version = 2," + "location = '" + tempDirPath + "', " + "partitioning = ARRAY['ORDER_STATUS', 'Ship_Priority', 'Bucket(order_key,9)']" + ") " + "AS " + "SELECT orderkey AS order_key, shippriority AS ship_priority, orderstatus AS order_status " + "FROM tpch.tiny.orders", "SELECT count(*) from orders"); assertEquals( computeScalar("SHOW CREATE TABLE test_create_partitioned_table_as"), format( "CREATE TABLE %s.%s.%s (\n" + " order_key bigint,\n" + " ship_priority integer,\n" + " order_status varchar\n" + ")\n" + "WITH (\n" + " format = '%s',\n" + " format_version = 2,\n" + " location = '%s',\n" + " partitioning = ARRAY['order_status','ship_priority','bucket(order_key, 9)']\n" + ")", getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), "test_create_partitioned_table_as", format, tempDirPath)); assertQuery("SELECT * from test_create_partitioned_table_as", "SELECT orderkey, shippriority, orderstatus FROM orders"); dropTable("test_create_partitioned_table_as"); } @Test public void testTableComments() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); String createTableTemplate = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "COMMENT '%s'\n" + "WITH (\n" + format(" format = '%s',\n", format) + " format_version = 2,\n" + format(" location = '%s'\n", tempDirPath) + ")"; String createTableWithoutComment = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "WITH (\n" + " format = '" + format + "',\n" + " format_version = 2,\n" + " location = '" + tempDirPath + "'\n" + ")"; String createTableSql = format(createTableTemplate, "test table comment", format); assertUpdate(createTableSql); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableSql); assertUpdate("COMMENT ON TABLE test_table_comments IS 'different test table comment'"); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), format(createTableTemplate, "different test table comment", format)); assertUpdate("COMMENT ON TABLE test_table_comments IS NULL"); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableWithoutComment); dropTable("iceberg.tpch.test_table_comments"); assertUpdate(createTableWithoutComment); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableWithoutComment); dropTable("iceberg.tpch.test_table_comments"); } @Test public void testRollbackSnapshot() { assertUpdate("CREATE TABLE test_rollback (col0 INTEGER, col1 BIGINT)"); long afterCreateTableId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (123, CAST(987 AS BIGINT))", 1); long afterFirstInsertId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (456, CAST(654 AS BIGINT))", 1); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT)), (456, CAST(654 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterFirstInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterCreateTableId)); assertEquals((long) computeActual("SELECT COUNT(*) FROM test_rollback").getOnlyValue(), 0); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (789, CAST(987 AS BIGINT))", 1); long afterSecondInsertId = getLatestSnapshotId("test_rollback"); // extra insert which should be dropped on rollback assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (999, CAST(999 AS BIGINT))", 1); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterSecondInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (789, CAST(987 AS BIGINT))"); dropTable("test_rollback"); } private long getLatestSnapshotId(String tableName) { return (long) computeActual(format("SELECT snapshot_id FROM \"%s$snapshots\" ORDER BY committed_at DESC LIMIT 1", tableName)) .getOnlyValue(); } @Override protected String errorMessageForInsertIntoNotNullColumn(String columnName) { return "NULL value not allowed for NOT NULL column: " + columnName; } @Test public void testSchemaEvolution() { assertUpdate("CREATE TABLE test_schema_evolution_drop_end (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end DROP COLUMN col2"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end ADD COLUMN col2 INTEGER"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_end"); assertUpdate("CREATE TABLE test_schema_evolution_drop_middle (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle DROP COLUMN col1"); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle ADD COLUMN col1 INTEGER"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_middle"); } @Test public void testShowStatsAfterAddColumn() { assertUpdate("CREATE TABLE test_show_stats_after_add_column (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); // Insert separately to ensure the table has multiple data files assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (1, 2, 3)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (4, 5, 6)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (NULL, NULL, NULL)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (7, 8, 9)", 1); assertThat(query("SHOW STATS FOR test_show_stats_after_add_column")) .skippingTypesCheck() .matches("VALUES " + " ('col0', NULL, NULL, 25e-2, NULL, '1', '7')," + " ('col1', NULL, NULL, 25e-2, NULL, '2', '8'), " + " ('col2', NULL, NULL, 25e-2, NULL, '3', '9'), " + " (NULL, NULL, NULL, NULL, 4e0, NULL, NULL)"); // Columns added after some data files exist will not have valid statistics because not all files have min/max/null count statistics for the new column assertUpdate("ALTER TABLE test_show_stats_after_add_column ADD COLUMN col3 INTEGER"); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (10, 11, 12, 13)", 1); assertThat(query("SHOW STATS FOR test_show_stats_after_add_column")) .skippingTypesCheck() .matches("VALUES " + " ('col0', NULL, NULL, 2e-1, NULL, '1', '10')," + " ('col1', NULL, NULL, 2e-1, NULL, '2', '11'), " + " ('col2', NULL, NULL, 2e-1, NULL, '3', '12'), " + " ('col3', NULL, NULL, NULL, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)"); } @Test public void testLargeInOnPartitionedColumns() { assertUpdate("CREATE TABLE test_in_predicate_large_set (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); assertUpdate("INSERT INTO test_in_predicate_large_set VALUES (1, 10)", 1L); assertUpdate("INSERT INTO test_in_predicate_large_set VALUES (2, 20)", 1L); List<String> predicates = IntStream.range(0, 25_000).boxed() .map(Object::toString) .collect(toImmutableList()); String filter = format("col2 IN (%s)", join(",", predicates)); assertThat(query("SELECT * FROM test_in_predicate_large_set WHERE " + filter)) .matches("TABLE test_in_predicate_large_set"); dropTable("test_in_predicate_large_set"); } @Test public void testCreateTableFailsOnNonEmptyPath() { String tableName = "test_rename_table_" + randomTableSuffix(); String tmpName = "test_rename_table_tmp_" + randomTableSuffix(); try { assertUpdate("CREATE TABLE " + tmpName + " AS SELECT 1 as a", 1); assertUpdate("ALTER TABLE " + tmpName + " RENAME TO " + tableName); assertQueryFails("CREATE TABLE " + tmpName + " AS SELECT 1 as a", "Cannot create a table on a non-empty location.*"); } finally { assertUpdate("DROP TABLE IF EXISTS " + tableName); assertUpdate("DROP TABLE IF EXISTS " + tmpName); } } @Test public void testCreateTableSucceedsOnEmptyDirectory() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tmpName = "test_rename_table_tmp_" + randomTableSuffix(); Path newPath = tempDir.toPath().resolve(tmpName); File directory = newPath.toFile(); verify(directory.mkdirs(), "Could not make directory on filesystem"); try { assertUpdate("CREATE TABLE " + tmpName + " WITH (location='" + directory + "') AS SELECT 1 as a", 1); } finally { assertUpdate("DROP TABLE IF EXISTS " + tmpName); } } @Test public void testCreateTableLike() { IcebergFileFormat otherFormat = (format == PARQUET) ? ORC : PARQUET; testCreateTableLikeForFormat(otherFormat); } private void testCreateTableLikeForFormat(IcebergFileFormat otherFormat) { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); // LIKE source INCLUDING PROPERTIES copies all the properties of the source table, including the `location`. // For this reason the source and the copied table will share the same directory. // This test does not drop intentionally the created tables to avoid affecting the source table or the information_schema. assertUpdate(format("CREATE TABLE test_create_table_like_original (col1 INTEGER, aDate DATE) WITH(format = '%s', location = '%s', partitioning = ARRAY['aDate'])", format, tempDirPath)); assertEquals(getTablePropertiesString("test_create_table_like_original"), "WITH (\n" + format(" format = '%s',\n", format) + " format_version = 2,\n" + format(" location = '%s',\n", tempDirPath) + " partitioning = ARRAY['adate']\n" + ")"); assertUpdate("CREATE TABLE test_create_table_like_copy0 (LIKE test_create_table_like_original, col2 INTEGER)"); assertUpdate("INSERT INTO test_create_table_like_copy0 (col1, aDate, col2) VALUES (1, CAST('1950-06-28' AS DATE), 3)", 1); assertQuery("SELECT * from test_create_table_like_copy0", "VALUES(1, CAST('1950-06-28' AS DATE), 3)"); assertUpdate("CREATE TABLE test_create_table_like_copy1 (LIKE test_create_table_like_original)"); assertEquals(getTablePropertiesString("test_create_table_like_copy1"), "WITH (\n" + format(" format = '%s',\n format_version = 2,\n location = '%s'\n)", format, tempDir + "/iceberg_data/tpch/test_create_table_like_copy1")); assertUpdate("CREATE TABLE test_create_table_like_copy2 (LIKE test_create_table_like_original EXCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy2"), "WITH (\n" + format(" format = '%s',\n format_version = 2,\n location = '%s'\n)", format, tempDir + "/iceberg_data/tpch/test_create_table_like_copy2")); dropTable("test_create_table_like_copy2"); assertQueryFails("CREATE TABLE test_create_table_like_copy3 (LIKE test_create_table_like_original INCLUDING PROPERTIES)", "Cannot create a table on a non-empty location.*"); assertQueryFails(format("CREATE TABLE test_create_table_like_copy4 (LIKE test_create_table_like_original INCLUDING PROPERTIES) WITH (format = '%s')", otherFormat), "Cannot create a table on a non-empty location.*"); } private String getTablePropertiesString(String tableName) { MaterializedResult showCreateTable = computeActual("SHOW CREATE TABLE " + tableName); String createTable = (String) getOnlyElement(showCreateTable.getOnlyColumnAsSet()); Matcher matcher = WITH_CLAUSE_EXTRACTOR.matcher(createTable); return matcher.matches() ? matcher.group(1) : null; } @Test public void testPredicating() { assertUpdate("CREATE TABLE test_predicating_on_real (col REAL)"); assertUpdate("INSERT INTO test_predicating_on_real VALUES 1.2", 1); assertQuery("SELECT * FROM test_predicating_on_real WHERE col = 1.2", "VALUES 1.2"); dropTable("test_predicating_on_real"); } @Test public void testHourTransform() { assertUpdate("CREATE TABLE test_hour_transform (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['hour(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-31 22:22:22.222222', 8)," + "(TIMESTAMP '1969-12-31 23:33:11.456789', 9)," + "(TIMESTAMP '1969-12-31 23:44:55.567890', 10)," + "(TIMESTAMP '1970-01-01 00:55:44.765432', 11)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 10:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 10:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 12:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 12:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 13:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 13:12:12.654321', 7)"; assertUpdate("INSERT INTO test_hour_transform " + values, 11); assertQuery("SELECT * FROM test_hour_transform", values); @Language("SQL") String expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222222', TIMESTAMP '1969-12-31 22:22:22.222222', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456789', TIMESTAMP '1969-12-31 23:44:55.567890', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765432', TIMESTAMP '1970-01-01 00:55:44.765432', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 10:55:00.456789', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234567', TIMESTAMP '2015-05-15 12:21:02.345678', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876543', TIMESTAMP '2020-02-21 13:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-12-31 22:22:22.222222', '2020-02-21 13:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222000', TIMESTAMP '1969-12-31 22:22:22.222999', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456000', TIMESTAMP '1969-12-31 23:44:55.567999', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765000', TIMESTAMP '1970-01-01 00:55:44.765999', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 10:55:00.456999', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234000', TIMESTAMP '2015-05-15 12:21:02.345999', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876000', TIMESTAMP '2020-02-21 13:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-12-31 22:22:22.222000', '2020-02-21 13:12:12.654999'"; } assertQuery("SELECT partition.d_hour, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_hour_transform$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_hour_transform WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 23:44:55.567890', 10)"); assertThat(query("SHOW STATS FOR test_hour_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_hour_transform"); } @Test public void testDayTransformDate() { assertUpdate("CREATE TABLE test_day_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-01-01', 10), " + "(DATE '1969-12-31', 11), " + "(DATE '1970-01-01', 1), " + "(DATE '1970-03-04', 2), " + "(DATE '2015-01-01', 3), " + "(DATE '2015-01-13', 4), " + "(DATE '2015-01-13', 5), " + "(DATE '2015-05-15', 6), " + "(DATE '2015-05-15', 7), " + "(DATE '2020-02-21', 8), " + "(DATE '2020-02-21', 9)"; assertUpdate("INSERT INTO test_day_transform_date " + values, 11); assertQuery("SELECT * FROM test_day_transform_date", values); assertQuery( "SELECT partition.d_day, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_day_transform_date$partitions\"", "VALUES " + "(DATE '1969-01-01', 1, DATE '1969-01-01', DATE '1969-01-01', 10, 10), " + "(DATE '1969-12-31', 1, DATE '1969-12-31', DATE '1969-12-31', 11, 11), " + "(DATE '1970-01-01', 1, DATE '1970-01-01', DATE '1970-01-01', 1, 1), " + "(DATE '1970-03-04', 1, DATE '1970-03-04', DATE '1970-03-04', 2, 2), " + "(DATE '2015-01-01', 1, DATE '2015-01-01', DATE '2015-01-01', 3, 3), " + "(DATE '2015-01-13', 2, DATE '2015-01-13', DATE '2015-01-13', 4, 5), " + "(DATE '2015-05-15', 2, DATE '2015-05-15', DATE '2015-05-15', 6, 7), " + "(DATE '2020-02-21', 2, DATE '2020-02-21', DATE '2020-02-21', 8, 9)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_date WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (DATE '1969-01-01', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1969-01-01', '2020-02-21'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_day_transform_date"); } @Test public void testDayTransformTimestamp() { assertUpdate("CREATE TABLE test_day_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-25 15:13:12.876543', 8)," + "(TIMESTAMP '1969-12-30 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-31 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-31 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_day_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_day_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876543', TIMESTAMP '1969-12-25 15:13:12.876543', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345678', TIMESTAMP '1969-12-30 18:47:33.345678', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234567', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-12-25 15:13:12.876543', '2020-02-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876000', TIMESTAMP '1969-12-25 15:13:12.876999', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345000', TIMESTAMP '1969-12-30 18:47:33.345999', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234999', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456000', TIMESTAMP '1970-01-01 12:03:08.456999', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 12:55:00.456999', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-05-15 14:21:02.345999', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-02-21 16:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-12-25 15:13:12.876000', '2020-02-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_day, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_day_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_timestamp WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_day_transform_timestamp"); } @Test public void testMonthTransformDate() { assertUpdate("CREATE TABLE test_month_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-11-13', 1)," + "(DATE '1969-12-01', 2)," + "(DATE '1969-12-02', 3)," + "(DATE '1969-12-31', 4)," + "(DATE '1970-01-01', 5), " + "(DATE '1970-05-13', 6), " + "(DATE '1970-12-31', 7), " + "(DATE '2020-01-01', 8), " + "(DATE '2020-06-16', 9), " + "(DATE '2020-06-28', 10), " + "(DATE '2020-06-06', 11), " + "(DATE '2020-07-18', 12), " + "(DATE '2020-07-28', 13), " + "(DATE '2020-12-31', 14)"; assertUpdate("INSERT INTO test_month_transform_date " + values, 14); assertQuery("SELECT * FROM test_month_transform_date", values); assertQuery( "SELECT partition.d_month, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_month_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1969-11-13', DATE '1969-11-13', 1, 1), " + "(-1, 3, DATE '1969-12-01', DATE '1969-12-31', 2, 4), " + "(0, 1, DATE '1970-01-01', DATE '1970-01-01', 5, 5), " + "(4, 1, DATE '1970-05-13', DATE '1970-05-13', 6, 6), " + "(11, 1, DATE '1970-12-31', DATE '1970-12-31', 7, 7), " + "(600, 1, DATE '2020-01-01', DATE '2020-01-01', 8, 8), " + "(605, 3, DATE '2020-06-06', DATE '2020-06-28', 9, 11), " + "(606, 2, DATE '2020-07-18', DATE '2020-07-28', 12, 13), " + "(611, 1, DATE '2020-12-31', DATE '2020-12-31', 14, 14)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_date WHERE day_of_week(d) = 7 AND b % 7 = 3", "VALUES (DATE '2020-06-28', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1969-11-13', '2020-12-31'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '14'), " + " (NULL, NULL, NULL, NULL, 14e0, NULL, NULL)"); dropTable("test_month_transform_date"); } @Test public void testMonthTransformTimestamp() { assertUpdate("CREATE TABLE test_month_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-11-15 15:13:12.876543', 8)," + "(TIMESTAMP '1969-11-19 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-01 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-01 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_month_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_month_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876543', TIMESTAMP '1969-11-19 18:47:33.345678', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234567', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-11-15 15:13:12.876543', '2020-02-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876000', TIMESTAMP '1969-11-19 18:47:33.345999', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234999', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456000', TIMESTAMP '1970-01-01 12:03:08.456999', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 12:55:00.456999', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-05-15 14:21:02.345999', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-02-21 16:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-11-15 15:13:12.876000', '2020-02-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_month, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_month_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_timestamp WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-01 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_month_transform_timestamp"); } @Test public void testYearTransformDate() { assertUpdate("CREATE TABLE test_year_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1968-10-13', 1), " + "(DATE '1969-01-01', 2), " + "(DATE '1969-03-15', 3), " + "(DATE '1970-01-01', 4), " + "(DATE '1970-03-05', 5), " + "(DATE '2015-01-01', 6), " + "(DATE '2015-06-16', 7), " + "(DATE '2015-07-28', 8), " + "(DATE '2016-05-15', 9), " + "(DATE '2016-06-06', 10), " + "(DATE '2020-02-21', 11), " + "(DATE '2020-11-10', 12)"; assertUpdate("INSERT INTO test_year_transform_date " + values, 12); assertQuery("SELECT * FROM test_year_transform_date", values); assertQuery( "SELECT partition.d_year, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_year_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1968-10-13', DATE '1968-10-13', 1, 1), " + "(-1, 2, DATE '1969-01-01', DATE '1969-03-15', 2, 3), " + "(0, 2, DATE '1970-01-01', DATE '1970-03-05', 4, 5), " + "(45, 3, DATE '2015-01-01', DATE '2015-07-28', 6, 8), " + "(46, 2, DATE '2016-05-15', DATE '2016-06-06', 9, 10), " + "(50, 2, DATE '2020-02-21', DATE '2020-11-10', 11, 12)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_date WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (DATE '2016-06-06', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1968-10-13', '2020-11-10'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_date"); } @Test public void testYearTransformTimestamp() { assertUpdate("CREATE TABLE test_year_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1968-03-15 15:13:12.876543', 1)," + "(TIMESTAMP '1968-11-19 18:47:33.345678', 2)," + "(TIMESTAMP '1969-01-01 00:00:00.000000', 3)," + "(TIMESTAMP '1969-01-01 05:06:07.234567', 4)," + "(TIMESTAMP '1970-01-18 12:03:08.456789', 5)," + "(TIMESTAMP '1970-03-14 10:01:23.123456', 6)," + "(TIMESTAMP '1970-08-19 11:10:02.987654', 7)," + "(TIMESTAMP '1970-12-31 12:55:00.456789', 8)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 9)," + "(TIMESTAMP '2015-09-15 14:21:02.345678', 10)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 11)," + "(TIMESTAMP '2020-08-21 16:12:12.654321', 12)"; assertUpdate("INSERT INTO test_year_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_year_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876543', TIMESTAMP '1968-11-19 18:47:33.345678', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234567', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456789', TIMESTAMP '1970-12-31 12:55:00.456789', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-09-15 14:21:02.345678', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-08-21 16:12:12.654321', 11, 12)"; String expectedTimestampStats = "'1968-03-15 15:13:12.876543', '2020-08-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876000', TIMESTAMP '1968-11-19 18:47:33.345999', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234999', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456000', TIMESTAMP '1970-12-31 12:55:00.456999', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-09-15 14:21:02.345999', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-08-21 16:12:12.654999', 11, 12)"; expectedTimestampStats = "'1968-03-15 15:13:12.876000', '2020-08-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_year, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_year_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_timestamp WHERE day_of_week(d) = 2 AND b % 7 = 3", "VALUES (TIMESTAMP '2015-09-15 14:21:02.345678', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_timestamp"); } @Test public void testTruncateTextTransform() { assertUpdate("CREATE TABLE test_truncate_text_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 2)'])"); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"test_truncate_text_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_text_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT partition.d_trunc FROM \"test_truncate_text_transform$partitions\"", "VALUES 'ab', 'mo', 'Gr'"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'ab'", "VALUES 1, 2, 3"); assertQuery(select + " WHERE partition.d_trunc = 'ab'", "VALUES ('ab', 3, 'ab598', 'abxy', 1, 3)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'mo'", "VALUES 4, 5"); assertQuery(select + " WHERE partition.d_trunc = 'mo'", "VALUES ('mo', 2, 'mommy', 'moscow', 4, 5)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'Gr'", "VALUES 6, 7"); assertQuery(select + " WHERE partition.d_trunc = 'Gr'", "VALUES ('Gr', 2, 'Greece', 'Grozny', 6, 7)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_text_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_truncate_text_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', " + (format == PARQUET ? "169e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_truncate_text_transform"); } @Test(dataProvider = "truncateNumberTypesProvider") public void testTruncateIntegerTransform(String dataType) { String table = format("test_truncate_%s_transform", dataType); assertUpdate(format("CREATE TABLE " + table + " (d %s, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])", dataType)); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"" + table + "$partitions\""; assertUpdate("INSERT INTO " + table + " VALUES" + "(0, 1)," + "(1, 2)," + "(5, 3)," + "(9, 4)," + "(10, 5)," + "(11, 6)," + "(120, 7)," + "(121, 8)," + "(123, 9)," + "(-1, 10)," + "(-5, 11)," + "(-10, 12)," + "(-11, 13)," + "(-123, 14)," + "(-130, 15)", 15); assertQuery("SELECT partition.d_trunc FROM \"" + table + "$partitions\"", "VALUES 0, 10, 120, -10, -20, -130"); assertQuery("SELECT b FROM " + table + " WHERE d IN (0, 1, 5, 9)", "VALUES 1, 2, 3, 4"); assertQuery(select + " WHERE partition.d_trunc = 0", "VALUES (0, 4, 0, 9, 1, 4)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (10, 11)", "VALUES 5, 6"); assertQuery(select + " WHERE partition.d_trunc = 10", "VALUES (10, 2, 10, 11, 5, 6)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (120, 121, 123)", "VALUES 7, 8, 9"); assertQuery(select + " WHERE partition.d_trunc = 120", "VALUES (120, 3, 120, 123, 7, 9)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-1, -5, -10)", "VALUES 10, 11, 12"); assertQuery(select + " WHERE partition.d_trunc = -10", "VALUES (-10, 3, -10, -1, 10, 12)"); assertQuery("SELECT b FROM " + table + " WHERE d = -11", "VALUES 13"); assertQuery(select + " WHERE partition.d_trunc = -20", "VALUES (-20, 1, -11, -11, 13, 13)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-123, -130)", "VALUES 14, 15"); assertQuery(select + " WHERE partition.d_trunc = -130", "VALUES (-130, 2, -130, -123, 14, 15)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM " + table + " WHERE d % 10 = -1 AND b % 7 = 3", "VALUES (-1, 10)"); assertThat(query("SHOW STATS FOR " + table)) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '-130', '123'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '15'), " + " (NULL, NULL, NULL, NULL, 15e0, NULL, NULL)"); dropTable(table); } @DataProvider public Object[][] truncateNumberTypesProvider() { return new Object[][] { {"integer"}, {"bigint"}, }; } @Test public void testTruncateDecimalTransform() { assertUpdate("CREATE TABLE test_truncate_decimal_transform (d DECIMAL(9, 2), b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])"); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"test_truncate_decimal_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_decimal_transform VALUES" + "(12.34, 1)," + "(12.30, 2)," + "(12.29, 3)," + "(0.05, 4)," + "(-0.05, 5)", 5); assertQuery("SELECT partition.d_trunc FROM \"test_truncate_decimal_transform$partitions\"", "VALUES 12.30, 12.20, 0.00, -0.10"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d IN (12.34, 12.30)", "VALUES 1, 2"); assertQuery(select + " WHERE partition.d_trunc = 12.30", "VALUES (12.30, 2, 12.30, 12.34, 1, 2)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 12.29", "VALUES 3"); assertQuery(select + " WHERE partition.d_trunc = 12.20", "VALUES (12.20, 1, 12.29, 12.29, 3, 3)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 0.05", "VALUES 4"); assertQuery(select + " WHERE partition.d_trunc = 0.00", "VALUES (0.00, 1, 0.05, 0.05, 4, 4)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = -0.05", "VALUES 5"); assertQuery(select + " WHERE partition.d_trunc = -0.10", "VALUES (-0.10, 1, -0.05, -0.05, 5, 5)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_decimal_transform WHERE d * 100 % 10 = 9 AND b % 7 = 3", "VALUES (12.29, 3)"); assertThat(query("SHOW STATS FOR test_truncate_decimal_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '-0.05', '12.34'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '5'), " + " (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)"); dropTable("test_truncate_decimal_transform"); } @Test public void testBucketTransform() { testBucketTransformForType("DATE", "DATE '2020-05-19'", "DATE '2020-08-19'", "DATE '2020-11-19'"); testBucketTransformForType("VARCHAR", "CAST('abcd' AS VARCHAR)", "CAST('mommy' AS VARCHAR)", "CAST('abxy' AS VARCHAR)"); testBucketTransformForType("BIGINT", "CAST(100000000 AS BIGINT)", "CAST(200000002 AS BIGINT)", "CAST(400000001 AS BIGINT)"); testBucketTransformForType( "UUID", "CAST('206caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)", "CAST('906caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)", "CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); } protected void testBucketTransformForType( String type, String value, String greaterValueInSameBucket, String valueInOtherBucket) { String tableName = format("test_bucket_transform%s", type.toLowerCase(Locale.ENGLISH)); assertUpdate(format("CREATE TABLE %s (d %s) WITH (partitioning = ARRAY['bucket(d, 2)'])", tableName, type)); assertUpdate(format("INSERT INTO %s VALUES (%s), (%s), (%s)", tableName, value, greaterValueInSameBucket, valueInOtherBucket), 3); assertThat(query(format("SELECT * FROM %s", tableName))).matches(format("VALUES (%s), (%s), (%s)", value, greaterValueInSameBucket, valueInOtherBucket)); String selectFromPartitions = format("SELECT partition.d_bucket, record_count, data.d.min AS d_min, data.d.max AS d_max FROM \"%s$partitions\"", tableName); if (supportsIcebergFileStatistics(type)) { assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 0", format("VALUES(0, %d, %s, %s)", 2, value, greaterValueInSameBucket)); assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 1", format("VALUES(1, %d, %s, %s)", 1, valueInOtherBucket, valueInOtherBucket)); } else { assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 0", format("VALUES(0, %d, null, null)", 2)); assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 1", format("VALUES(1, %d, null, null)", 1)); } assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .projected(0, 2, 3, 4) // data size, min and max may vary between types .matches("VALUES " + " ('d', NULL, 0e0, NULL), " + " (NULL, NULL, NULL, 3e0)"); dropTable(tableName); } @Test public void testApplyFilterWithNonEmptyConstraintPredicate() { assertUpdate("CREATE TABLE test_apply_functional_constraint (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); assertUpdate( "INSERT INTO test_apply_functional_constraint VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery( "SELECT * FROM test_apply_functional_constraint WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertUpdate("DROP TABLE test_apply_functional_constraint"); } @Test public void testVoidTransform() { assertUpdate("CREATE TABLE test_void_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['void(d)'])"); String values = "VALUES " + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('Warsaw', 5)," + "(NULL, 6)," + "(NULL, 7)"; assertUpdate("INSERT INTO test_void_transform " + values, 7); assertQuery("SELECT * FROM test_void_transform", values); assertQuery("SELECT COUNT(*) FROM \"test_void_transform$partitions\"", "SELECT 1"); assertQuery( "SELECT partition.d_null, record_count, file_count, data.d.min, data.d.max, data.d.null_count, data.d.nan_count, data.b.min, data.b.max, data.b.null_count, data.b.nan_count FROM \"test_void_transform$partitions\"", "VALUES (NULL, 7, 1, 'Warsaw', 'mommy', 2, NULL, 1, 7, 0, NULL)"); assertQuery( "SELECT d, b FROM test_void_transform WHERE d IS NOT NULL", "VALUES " + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('Warsaw', 5)"); assertQuery("SELECT b FROM test_void_transform WHERE d IS NULL", "VALUES 6, 7"); assertThat(query("SHOW STATS FOR test_void_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', " + (format == PARQUET ? "76e0" : "NULL") + ", NULL, 0.2857142857142857, NULL, NULL, NULL), " + " ('b', NULL, NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)"); assertUpdate("DROP TABLE " + "test_void_transform"); } @Test public void testMetadataDeleteSimple() { assertUpdate("CREATE TABLE test_metadata_delete_simple (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO test_metadata_delete_simple VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 1004"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 3"); assertUpdate("DELETE FROM test_metadata_delete_simple WHERE col1 = 1", 3); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 701"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 2"); dropTable("test_metadata_delete_simple"); } @Test public void testMetadataDelete() { assertUpdate("CREATE TABLE test_metadata_delete (" + " orderkey BIGINT," + " linenumber INTEGER," + " linestatus VARCHAR" + ") " + "WITH (" + " partitioning = ARRAY[ 'linenumber', 'linestatus' ]" + ")"); assertUpdate( "" + "INSERT INTO test_metadata_delete " + "SELECT orderkey, linenumber, linestatus " + "FROM tpch.tiny.lineitem", "SELECT count(*) FROM lineitem"); assertQuery("SELECT COUNT(*) FROM \"test_metadata_delete$partitions\"", "SELECT 14"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus = 'F' AND linenumber = 3", 5378); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'F' or linenumber <> 3"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 13"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus='O'", 30049); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 6"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'O' AND linenumber <> 3"); dropTable("test_metadata_delete"); } @Test public void testInSet() { testInSet(31); testInSet(35); } private void testInSet(int inCount) { String values = range(1, inCount + 1) .mapToObj(n -> format("(%s, %s)", n, n + 10)) .collect(joining(", ")); String inList = range(1, inCount + 1) .mapToObj(Integer::toString) .collect(joining(", ")); assertUpdate("CREATE TABLE test_in_set (col1 INTEGER, col2 BIGINT)"); assertUpdate(format("INSERT INTO test_in_set VALUES %s", values), inCount); // This proves that SELECTs with large IN phrases work correctly computeActual(format("SELECT col1 FROM test_in_set WHERE col1 IN (%s)", inList)); dropTable("test_in_set"); } @Test public void testBasicTableStatistics() { String tableName = "test_basic_table_statistics"; assertUpdate(format("CREATE TABLE %s (col REAL)", tableName)); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', 0e0, 0e0, 1e0, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 0e0, NULL, NULL)"); assertUpdate("INSERT INTO " + tableName + " VALUES -10", 1); assertUpdate("INSERT INTO " + tableName + " VALUES 100", 1); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', NULL, NULL, 0e0, NULL, '-10.0', '100.0'), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); assertUpdate("INSERT INTO " + tableName + " VALUES 200", 1); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', NULL, NULL, 0e0, NULL, '-10.0', '200.0'), " + " (NULL, NULL, NULL, NULL, 3e0, NULL, NULL)"); dropTable(tableName); } @Test public void testMultipleColumnTableStatistics() { String tableName = "test_multiple_table_statistics"; assertUpdate(format("CREATE TABLE %s (col1 REAL, col2 INTEGER, col3 DATE)", tableName)); assertUpdate("INSERT INTO " + tableName + " VALUES (-10, -1, DATE '2019-06-28')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (100, 10, DATE '2020-01-01')", 1); MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "100.0") .row("col2", null, null, 0.0, null, "-1", "10") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-01-01") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate("INSERT INTO " + tableName + " VALUES (200, 20, DATE '2020-06-28')", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "20") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-06-28") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate("INSERT INTO " + tableName + " VALUES " + IntStream.rangeClosed(21, 25) .mapToObj(i -> format("(200, %d, DATE '2020-07-%d')", i, i)) .collect(joining(", ")), 5); assertUpdate("INSERT INTO " + tableName + " VALUES " + IntStream.rangeClosed(26, 30) .mapToObj(i -> format("(NULL, %d, DATE '2020-06-%d')", i, i)) .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 5.0 / 13.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "30") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-07-25") .row(null, null, null, null, 13.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testPartitionedTableStatistics() { assertUpdate("CREATE TABLE iceberg.tpch.test_partitioned_table_statistics (col1 REAL, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES (-10, -1)", 1); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES (100, 10)", 1); MaterializedResult result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); MaterializedRow row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 0.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "100.0"); MaterializedRow row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); MaterializedRow row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 2.0); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(1, 5) .mapToObj(i -> format("(%d, 10)", i + 100)) .collect(joining(", ")), 5); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(NULL, 10)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 12.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 12.0); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(100, NULL)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 17.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 5.0 / 17.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 17.0); dropTable("iceberg.tpch.test_partitioned_table_statistics"); } @Test public void testPredicatePushdown() { QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_predicate"); assertUpdate(format("CREATE TABLE %s (col1 BIGINT, col2 BIGINT, col3 BIGINT) WITH (partitioning = ARRAY['col2', 'col3'])", tableName)); assertUpdate(format("INSERT INTO %s VALUES (1, 10, 100)", tableName), 1L); assertUpdate(format("INSERT INTO %s VALUES (2, 20, 200)", tableName), 1L); assertQuery(format("SELECT * FROM %s WHERE col1 = 1", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L)), ImmutableMap.of(), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); assertQuery(format("SELECT * FROM %s WHERE col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of()); assertQuery(format("SELECT * FROM %s WHERE col1 = 1 AND col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L), "col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); // Assert pushdown for an IN predicate with value count above the default compaction threshold List<Long> values = LongStream.range(1L, 1010L).boxed() .filter(index -> index != 20L) .collect(toImmutableList()); assertThat(values).hasSizeGreaterThan(ICEBERG_DOMAIN_COMPACTION_THRESHOLD); String valuesString = join(",", values.stream().map(Object::toString).collect(toImmutableList())); String inPredicate = "%s IN (" + valuesString + ")"; assertQuery( format("SELECT * FROM %s WHERE %s AND %s", tableName, format(inPredicate, "col1"), format(inPredicate, "col2")), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", multipleValues(BIGINT, values), "col2", multipleValues(BIGINT, values)), ImmutableMap.of("col2", multipleValues(BIGINT, values)), // Unenforced predicate is simplified during split generation, but not reflected here ImmutableMap.of("col1", multipleValues(BIGINT, values))); dropTable(tableName.getObjectName()); } @Test public void testPredicatesWithStructuralTypes() { String tableName = "test_predicate_with_structural_types"; assertUpdate("CREATE TABLE " + tableName + " (id INT, array_t ARRAY(BIGINT), map_t MAP(BIGINT, BIGINT), struct_t ROW(f1 BIGINT, f2 BIGINT))"); assertUpdate("INSERT INTO " + tableName + " VALUES " + "(1, ARRAY[1, 2, 3], MAP(ARRAY[1,3], ARRAY[2,4]), ROW(1, 2)), " + "(11, ARRAY[11, 12, 13], MAP(ARRAY[11, 13], ARRAY[12, 14]), ROW(11, 12)), " + "(11, ARRAY[111, 112, 113], MAP(ARRAY[111, 13], ARRAY[112, 114]), ROW(111, 112)), " + "(21, ARRAY[21, 22, 23], MAP(ARRAY[21, 23], ARRAY[22, 24]), ROW(21, 22))", 4); assertQuery("SELECT id FROM " + tableName + " WHERE array_t = ARRAY[1, 2, 3]", "VALUES 1"); assertQuery("SELECT id FROM " + tableName + " WHERE map_t = MAP(ARRAY[11, 13], ARRAY[12, 14])", "VALUES 11"); assertQuery("SELECT id FROM " + tableName + " WHERE struct_t = ROW(21, 22)", "VALUES 21"); assertQuery("SELECT struct_t.f1 FROM " + tableName + " WHERE id = 11 AND map_t = MAP(ARRAY[11, 13], ARRAY[12, 14])", "VALUES 11"); dropTable(tableName); } @Test(dataProviderClass = DataProviders.class, dataProvider = "trueFalse") public void testPartitionsTableWithColumnNameConflict(boolean partitioned) { assertUpdate("DROP TABLE IF EXISTS test_partitions_with_conflict"); assertUpdate("CREATE TABLE test_partitions_with_conflict (" + " p integer, " + " row_count integer, " + " record_count integer, " + " file_count integer, " + " total_size integer " + ") " + (partitioned ? "WITH(partitioning = ARRAY['p'])" : "")); assertUpdate("INSERT INTO test_partitions_with_conflict VALUES (11, 12, 13, 14, 15)", 1); // sanity check assertThat(query("SELECT * FROM test_partitions_with_conflict")) .matches("VALUES (11, 12, 13, 14, 15)"); // test $partitions assertThat(query("SELECT * FROM \"test_partitions_with_conflict$partitions\"")) .matches("SELECT " + (partitioned ? "CAST(ROW(11) AS row(p integer)), " : "") + "BIGINT '1', " + "BIGINT '1', " + // total_size is not exactly deterministic, so grab whatever value there is "(SELECT total_size FROM \"test_partitions_with_conflict$partitions\"), " + "CAST(" + " ROW (" + (partitioned ? "" : " ROW(11, 11, 0, NULL), ") + " ROW(12, 12, 0, NULL), " + " ROW(13, 13, 0, NULL), " + " ROW(14, 14, 0, NULL), " + " ROW(15, 15, 0, NULL) " + " ) " + " AS row(" + (partitioned ? "" : " p row(min integer, max integer, null_count bigint, nan_count bigint), ") + " row_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " record_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " file_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " total_size row(min integer, max integer, null_count bigint, nan_count bigint) " + " )" + ")"); assertUpdate("DROP TABLE test_partitions_with_conflict"); } private void assertFilterPushdown( QualifiedObjectName tableName, Map<String, Domain> filter, Map<String, Domain> expectedEnforcedPredicate, Map<String, Domain> expectedUnenforcedPredicate) { Metadata metadata = getQueryRunner().getMetadata(); newTransaction().execute(getSession(), session -> { TableHandle table = metadata.getTableHandle(session, tableName) .orElseThrow(() -> new TableNotFoundException(tableName.asSchemaTableName())); Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, table); TupleDomain<ColumnHandle> domains = TupleDomain.withColumnDomains( filter.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue))); Optional<ConstraintApplicationResult<TableHandle>> result = metadata.applyFilter(session, table, new Constraint(domains)); assertTrue(result.isEmpty() == (expectedUnenforcedPredicate == null && expectedEnforcedPredicate == null)); if (result.isPresent()) { IcebergTableHandle newTable = (IcebergTableHandle) result.get().getHandle().getConnectorHandle(); assertEquals( newTable.getEnforcedPredicate(), TupleDomain.withColumnDomains(expectedEnforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); assertEquals( newTable.getUnenforcedPredicate(), TupleDomain.withColumnDomains(expectedUnenforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); } }); } @Test public void testCreateNestedPartitionedTable() { assertUpdate("CREATE TABLE test_nested_table_1 (" + " bool BOOLEAN" + ", int INTEGER" + ", arr ARRAY(VARCHAR)" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, VARCHAR)" + ", dec DECIMAL(5,2)" + ", vc VARCHAR" + ", vb VARBINARY" + ", ts TIMESTAMP(6)" + ", tstz TIMESTAMP(6) WITH TIME ZONE" + ", str ROW(id INTEGER , vc VARCHAR)" + ", dt DATE)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_1 " + " select true, 1, array['uno', 'dos', 'tres'], BIGINT '1', REAL '1.0', DOUBLE '1.0', map(array[1,2,3,4], array['ek','don','teen','char'])," + " CAST(1.0 as DECIMAL(5,2))," + " 'one', VARBINARY 'binary0/1values',\n" + " TIMESTAMP '2021-07-24 02:43:57.348000'," + " TIMESTAMP '2021-07-24 02:43:57.348000 UTC'," + " (CAST(ROW(null, 'this is a random value') AS ROW(int, varchar))), " + " DATE '2021-07-24'", 1); assertEquals(computeActual("SELECT * from test_nested_table_1").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_1")) .skippingTypesCheck() .matches("VALUES " + " ('bool', NULL, NULL, 0e0, NULL, 'true', 'true'), " + " ('int', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', " + (format == PARQUET ? "43e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('vb', " + (format == PARQUET ? "55e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('ts', NULL, NULL, 0e0, NULL, '2021-07-24 02:43:57.348000', " + (format == ORC ? "'2021-07-24 02:43:57.348999'" : "'2021-07-24 02:43:57.348000'") + "), " + " ('tstz', NULL, NULL, 0e0, NULL, '2021-07-24 02:43:57.348 UTC', '2021-07-24 02:43:57.348 UTC'), " + " ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dt', NULL, NULL, 0e0, NULL, '2021-07-24', '2021-07-24'), " + " (NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); dropTable("test_nested_table_1"); assertUpdate("" + "CREATE TABLE test_nested_table_2 (" + " int INTEGER" + ", arr ARRAY(ROW(id INTEGER, vc VARCHAR))" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, ARRAY(VARCHAR))" + ", dec DECIMAL(5,2)" + ", str ROW(id INTEGER, vc VARCHAR, arr ARRAY(INTEGER))" + ", vc VARCHAR)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_2 " + " select 1, array[cast(row(1, null) as row(int, varchar)), cast(row(2, 'dos') as row(int, varchar))], BIGINT '1', REAL '1.0', DOUBLE '1.0', " + "map(array[1,2], array[array['ek', 'one'], array['don', 'do', 'two']]), CAST(1.0 as DECIMAL(5,2)), " + "CAST(ROW(1, 'this is a random value', null) AS ROW(int, varchar, array(int))), 'one'", 1); assertEquals(computeActual("SELECT * from test_nested_table_2").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_2")) .skippingTypesCheck() .matches("VALUES " + " ('int', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', " + (format == PARQUET ? "43e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); assertUpdate("CREATE TABLE test_nested_table_3 WITH (partitioning = ARRAY['int']) AS SELECT * FROM test_nested_table_2", 1); assertEquals(computeActual("SELECT * FROM test_nested_table_3").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_3")) .matches("SHOW STATS FOR test_nested_table_2"); dropTable("test_nested_table_2"); dropTable("test_nested_table_3"); } @Test public void testSerializableReadIsolation() { assertUpdate("CREATE TABLE test_read_isolation (x int)"); assertUpdate("INSERT INTO test_read_isolation VALUES 123, 456", 2); withTransaction(session -> { assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); assertUpdate("INSERT INTO test_read_isolation VALUES 789", 1); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); }); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); dropTable("test_read_isolation"); } private void withTransaction(Consumer<Session> consumer) { transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .readCommitted() .execute(getSession(), consumer); } private void dropTable(String table) { Session session = getSession(); assertUpdate(session, "DROP TABLE " + table); assertFalse(getQueryRunner().tableExists(session, table)); } @Test public void testOptimizedMetadataQueries() { Session session = Session.builder(getSession()) .setSystemProperty("optimize_metadata_queries", "true") .build(); assertUpdate("CREATE TABLE test_metadata_optimization (a BIGINT, b BIGINT, c BIGINT) WITH (PARTITIONING = ARRAY['b', 'c'])"); assertUpdate("INSERT INTO test_metadata_optimization VALUES (5, 6, 7), (8, 9, 10)", 2); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (6), (9)"); assertQuery(session, "SELECT DISTINCT b, c FROM test_metadata_optimization", "VALUES (6, 7), (9, 10)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE b < 7", "VALUES (6)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE c > 8", "VALUES (9)"); // Assert behavior after metadata delete assertUpdate("DELETE FROM test_metadata_optimization WHERE b = 6", 1); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (9)"); // TODO: assert behavior after deleting the last row of a partition, once row-level deletes are supported. // i.e. a query like 'DELETE FROM test_metadata_optimization WHERE b = 6 AND a = 5' dropTable("test_metadata_optimization"); } @Test public void testFileSizeInManifest() throws Exception { assertUpdate("CREATE TABLE test_file_size_in_manifest (" + "a_bigint bigint, " + "a_varchar varchar, " + "a_long_decimal decimal(38,20), " + "a_map map(varchar, integer))"); assertUpdate( "INSERT INTO test_file_size_in_manifest VALUES " + "(NULL, NULL, NULL, NULL), " + "(42, 'some varchar value', DECIMAL '123456789123456789.123456789123456789', map(ARRAY['abc', 'def'], ARRAY[113, -237843832]))", 2); MaterializedResult files = computeActual("SELECT file_path, record_count, file_size_in_bytes FROM \"test_file_size_in_manifest$files\""); long totalRecordCount = 0; for (MaterializedRow row : files.getMaterializedRows()) { String path = (String) row.getField(0); Long recordCount = (Long) row.getField(1); Long fileSizeInBytes = (Long) row.getField(2); totalRecordCount += recordCount; assertThat(fileSizeInBytes).isEqualTo(Files.size(Paths.get(path))); } // Verify sum(record_count) to make sure we have all the files. assertThat(totalRecordCount).isEqualTo(2); } @Test public void testIncorrectIcebergFileSizes() throws Exception { // Create a table with a single insert assertUpdate("CREATE TABLE test_iceberg_file_size (x BIGINT)"); assertUpdate("INSERT INTO test_iceberg_file_size VALUES (123), (456), (758)", 3); // Get manifest file MaterializedResult result = computeActual("SELECT path FROM \"test_iceberg_file_size$manifests\""); assertEquals(result.getRowCount(), 1); String manifestFile = (String) result.getOnlyValue(); // Read manifest file Schema schema; GenericData.Record entry = null; try (DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<>(new File(manifestFile), new GenericDatumReader<>())) { schema = dataFileReader.getSchema(); int recordCount = 0; while (dataFileReader.hasNext()) { entry = dataFileReader.next(); recordCount++; } assertEquals(recordCount, 1); } // Alter data file entry to store incorrect file size GenericData.Record dataFile = (GenericData.Record) entry.get("data_file"); long alteredValue = 50L; assertNotEquals((long) dataFile.get("file_size_in_bytes"), alteredValue); dataFile.put("file_size_in_bytes", alteredValue); // Replace the file through HDFS client. This is required for correct checksums. HdfsEnvironment.HdfsContext context = new HdfsContext(getSession().toConnectorSession()); org.apache.hadoop.fs.Path manifestFilePath = new org.apache.hadoop.fs.Path(manifestFile); FileSystem fs = HDFS_ENVIRONMENT.getFileSystem(context, manifestFilePath); // Write altered metadata try (OutputStream out = fs.create(manifestFilePath); DataFileWriter<GenericData.Record> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.create(schema, out); dataFileWriter.append(entry); } // Ignoring Iceberg provided file size makes the query succeed Session session = Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "use_file_size_from_metadata", "false") .build(); assertQuery(session, "SELECT * FROM test_iceberg_file_size", "VALUES (123), (456), (758)"); // Using Iceberg provided file size fails the query assertQueryFails("SELECT * FROM test_iceberg_file_size", format == ORC ? format(".*Error opening Iceberg split.*\\QIncorrect file size (%s) for file (end of stream not reached)\\E.*", alteredValue) : format("Error reading tail from .* with length %d", alteredValue)); dropTable("test_iceberg_file_size"); } @Test public void testSplitPruningForFilterOnPartitionColumn() { String tableName = "nation_partitioned_pruning"; assertUpdate("DROP TABLE IF EXISTS " + tableName); // disable writes redistribution to have predictable number of files written per partition (one). Session noRedistributeWrites = Session.builder(getSession()) .setSystemProperty("redistribute_writes", "false") .build(); assertUpdate(noRedistributeWrites, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY['regionkey']) AS SELECT * FROM nation", 25); // sanity check that table contains exactly 5 files assertThat(query("SELECT count(*) FROM \"" + tableName + "$files\"")).matches("VALUES CAST(5 AS BIGINT)"); verifySplitCount("SELECT * FROM " + tableName, 5); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey = 3", 1); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey < 2", 2); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey < 0", 0); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey > 1 AND regionkey < 4", 2); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey % 5 = 3", 1); assertUpdate("DROP TABLE " + tableName); } @Test public void testAllAvailableTypes() { assertUpdate("CREATE TABLE test_all_types (" + " a_boolean boolean, " + " an_integer integer, " + " a_bigint bigint, " + " a_real real, " + " a_double double, " + " a_short_decimal decimal(5,2), " + " a_long_decimal decimal(38,20), " + " a_varchar varchar, " + " a_varbinary varbinary, " + " a_date date, " + " a_time time(6), " + " a_timestamp timestamp(6), " + " a_timestamptz timestamp(6) with time zone, " + " a_uuid uuid, " + " a_row row(id integer , vc varchar), " + " an_array array(varchar), " + " a_map map(integer, varchar) " + ")"); String values = "VALUES (" + "true, " + "1, " + "BIGINT '1', " + "REAL '1.0', " + "DOUBLE '1.0', " + "CAST(1.0 AS decimal(5,2)), " + "CAST(11.0 AS decimal(38,20)), " + "VARCHAR 'onefsadfdsf', " + "X'000102f0feff', " + "DATE '2021-07-24'," + "TIME '02:43:57.987654', " + "TIMESTAMP '2021-07-24 03:43:57.987654'," + "TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + "UUID '20050910-1330-11e9-ffff-2a86e4085a59', " + "CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)), " + "ARRAY[VARCHAR 'uno', 'dos', 'tres'], " + "map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one'])) "; String nullValues = nCopies(17, "NULL").stream() .collect(joining(", ", "VALUES (", ")")); assertUpdate("INSERT INTO test_all_types " + values, 1); assertUpdate("INSERT INTO test_all_types " + nullValues, 1); // SELECT assertThat(query("SELECT * FROM test_all_types")) .matches(values + " UNION ALL " + nullValues); // SELECT with predicates assertThat(query("SELECT * FROM test_all_types WHERE " + " a_boolean = true " + "AND an_integer = 1 " + "AND a_bigint = BIGINT '1' " + "AND a_real = REAL '1.0' " + "AND a_double = DOUBLE '1.0' " + "AND a_short_decimal = CAST(1.0 AS decimal(5,2)) " + "AND a_long_decimal = CAST(11.0 AS decimal(38,20)) " + "AND a_varchar = VARCHAR 'onefsadfdsf' " + "AND a_varbinary = X'000102f0feff' " + "AND a_date = DATE '2021-07-24' " + "AND a_time = TIME '02:43:57.987654' " + "AND a_timestamp = TIMESTAMP '2021-07-24 03:43:57.987654' " + "AND a_timestamptz = TIMESTAMP '2021-07-24 04:43:57.987654 UTC' " + "AND a_uuid = UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + "AND a_row = CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)) " + "AND an_array = ARRAY[VARCHAR 'uno', 'dos', 'tres'] " + "AND a_map = map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one']) " + "")) .matches(values); assertThat(query("SELECT * FROM test_all_types WHERE " + " a_boolean IS NULL " + "AND an_integer IS NULL " + "AND a_bigint IS NULL " + "AND a_real IS NULL " + "AND a_double IS NULL " + "AND a_short_decimal IS NULL " + "AND a_long_decimal IS NULL " + "AND a_varchar IS NULL " + "AND a_varbinary IS NULL " + "AND a_date IS NULL " + "AND a_time IS NULL " + "AND a_timestamp IS NULL " + "AND a_timestamptz IS NULL " + "AND a_uuid IS NULL " + "AND a_row IS NULL " + "AND an_array IS NULL " + "AND a_map IS NULL " + "")) .skippingTypesCheck() .matches(nullValues); // SHOW STATS assertThat(query("SHOW STATS FOR test_all_types")) .skippingTypesCheck() .matches("VALUES " + " ('a_boolean', NULL, NULL, 0.5e0, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5e0, NULL, '11.0', '11.0'), " + " ('a_varchar', " + (format == PARQUET ? "87e0" : "NULL") + ", NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_varbinary', " + (format == PARQUET ? "82e0" : "NULL") + ", NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5e0, NULL, " + (format == ORC ? "'2021-07-24 03:43:57.987000', '2021-07-24 03:43:57.987999'" : "'2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'") + "), " + " ('a_timestamptz', NULL, NULL, 0.5e0, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " ('an_array', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " ('a_map', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); // $partitions String schema = getSession().getSchema().orElseThrow(); assertThat(query("SELECT column_name FROM information_schema.columns WHERE table_schema = '" + schema + "' AND table_name = 'test_all_types$partitions' ")) .skippingTypesCheck() .matches("VALUES 'record_count', 'file_count', 'total_size', 'data'"); assertThat(query("SELECT " + " record_count," + " file_count, " + " data.a_boolean, " + " data.an_integer, " + " data.a_bigint, " + " data.a_real, " + " data.a_double, " + " data.a_short_decimal, " + " data.a_long_decimal, " + " data.a_varchar, " + " data.a_varbinary, " + " data.a_date, " + " data.a_time, " + " data.a_timestamp, " + " data.a_timestamptz, " + " data.a_uuid " + " FROM \"test_all_types$partitions\" ")) .matches( "VALUES (" + " BIGINT '2', " + " BIGINT '2', " + " CAST(ROW(true, true, 1, NULL) AS ROW(min boolean, max boolean, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min integer, max integer, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min bigint, max bigint, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min real, max real, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min double, max double, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min decimal(5,2), max decimal(5,2), null_count bigint, nan_count bigint)), " + " CAST(ROW(11, 11, 1, NULL) AS ROW(min decimal(38,20), max decimal(38,20), null_count bigint, nan_count bigint)), " + " CAST(ROW('onefsadfdsf', 'onefsadfdsf', 1, NULL) AS ROW(min varchar, max varchar, null_count bigint, nan_count bigint)), " + (format == ORC ? " CAST(ROW(NULL, NULL, 1, NULL) AS ROW(min varbinary, max varbinary, null_count bigint, nan_count bigint)), " : " CAST(ROW(X'000102f0feff', X'000102f0feff', 1, NULL) AS ROW(min varbinary, max varbinary, null_count bigint, nan_count bigint)), ") + " CAST(ROW(DATE '2021-07-24', DATE '2021-07-24', 1, NULL) AS ROW(min date, max date, null_count bigint, nan_count bigint)), " + " CAST(ROW(TIME '02:43:57.987654', TIME '02:43:57.987654', 1, NULL) AS ROW(min time(6), max time(6), null_count bigint, nan_count bigint)), " + (format == ORC ? " CAST(ROW(TIMESTAMP '2021-07-24 03:43:57.987000', TIMESTAMP '2021-07-24 03:43:57.987999', 1, NULL) AS ROW(min timestamp(6), max timestamp(6), null_count bigint, nan_count bigint)), " : " CAST(ROW(TIMESTAMP '2021-07-24 03:43:57.987654', TIMESTAMP '2021-07-24 03:43:57.987654', 1, NULL) AS ROW(min timestamp(6), max timestamp(6), null_count bigint, nan_count bigint)), ") + (format == ORC ? " CAST(ROW(TIMESTAMP '2021-07-24 04:43:57.987000 UTC', TIMESTAMP '2021-07-24 04:43:57.987999 UTC', 1, NULL) AS ROW(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)), " : " CAST(ROW(TIMESTAMP '2021-07-24 04:43:57.987654 UTC', TIMESTAMP '2021-07-24 04:43:57.987654 UTC', 1, NULL) AS ROW(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)), ") + (format == ORC ? " CAST(ROW(NULL, NULL, 1, NULL) AS ROW(min uuid, max uuid, null_count bigint, nan_count bigint)) " : " CAST(ROW(UUID '20050910-1330-11e9-ffff-2a86e4085a59', UUID '20050910-1330-11e9-ffff-2a86e4085a59', 1, NULL) AS ROW(min uuid, max uuid, null_count bigint, nan_count bigint)) " ) + ")"); assertUpdate("DROP TABLE test_all_types"); } @Test public void testLocalDynamicFilteringWithSelectiveBuildSizeJoin() { // We need to prepare tables for this test. The test is required to use tables that are backed by at lest two files Session session = Session.builder(getSession()) .setSystemProperty(TASK_WRITER_COUNT, "2") .build(); getQueryRunner().execute(session, format("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM %s", "linetime_multiple_file_backed", "tpch.tiny.lineitem")).getMaterializedRows(); getQueryRunner().execute(session, format("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM %s", "orders_multiple_file_backed", "tpch.tiny.orders")).getMaterializedRows(); long fullTableScan = (Long) computeActual("SELECT count(*) FROM linetime_multiple_file_backed").getOnlyValue(); // Pick a value for totalprice where file level stats will not be able to filter out any data // This assumes the totalprice ranges in every file have some overlap, otherwise this test will fail. MaterializedRow range = getOnlyElement(computeActual("SELECT max(lower_bounds[4]), min(upper_bounds[4]) FROM \"orders_multiple_file_backed$files\"").getMaterializedRows()); double totalPrice = (Double) computeActual(format( "SELECT totalprice FROM orders_multiple_file_backed WHERE totalprice > %s AND totalprice < %s LIMIT 1", range.getField(0), range.getField(1))) .getOnlyValue(); session = Session.builder(getSession()) .setSystemProperty(JOIN_DISTRIBUTION_TYPE, BROADCAST.name()) .build(); ResultWithQueryId<MaterializedResult> result = getDistributedQueryRunner().executeWithQueryId( session, "SELECT * FROM linetime_multiple_file_backed JOIN orders_multiple_file_backed ON linetime_multiple_file_backed.orderkey = orders_multiple_file_backed.orderkey AND orders_multiple_file_backed.totalprice = " + totalPrice); OperatorStats probeStats = searchScanFilterAndProjectOperatorStats( result.getQueryId(), new QualifiedObjectName(ICEBERG_CATALOG, "tpch", "linetime_multiple_file_backed")); // Assert some lineitem rows were filtered out on file level assertThat(probeStats.getInputPositions()).isLessThan(fullTableScan); } @Test(dataProvider = "repartitioningDataProvider") public void testRepartitionDataOnCtas(Session session, String partitioning, int expectedFiles) { testRepartitionData(session, "tpch.tiny.orders", true, partitioning, expectedFiles); } @Test(dataProvider = "repartitioningDataProvider") public void testRepartitionDataOnInsert(Session session, String partitioning, int expectedFiles) { testRepartitionData(session, "tpch.tiny.orders", false, partitioning, expectedFiles); } @DataProvider public Object[][] repartitioningDataProvider() { Session defaultSession = getSession(); // For identity-only partitioning, Iceberg connector returns ConnectorTableLayout with partitionColumns set, but without partitioning. // This is treated by engine as "preferred", but not mandatory partitioning, and gets ignored if stats suggest number of partitions // written is low. Without partitioning, number of files created is nondeterministic, as a writer (worker node) may or may not receive data. Session obeyConnectorPartitioning = Session.builder(defaultSession) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "1") .build(); return new Object[][] { // identity partitioning column {obeyConnectorPartitioning, "'orderstatus'", 3}, // bucketing {defaultSession, "'bucket(custkey, 13)'", 13}, // varchar-based {defaultSession, "'truncate(comment, 1)'", 35}, // complex; would exceed 100 open writers limit in IcebergPageSink without write repartitioning {defaultSession, "'bucket(custkey, 4)', 'truncate(comment, 1)'", 131}, // same column multiple times {defaultSession, "'truncate(comment, 1)', 'orderstatus', 'bucket(comment, 2)'", 180}, }; } @Test public void testStatsBasedRepartitionDataOnCtas() { testStatsBasedRepartitionData(true); } @Test public void testStatsBasedRepartitionDataOnInsert() { testStatsBasedRepartitionData(false); } private void testStatsBasedRepartitionData(boolean ctas) { Session sessionRepartitionSmall = Session.builder(getSession()) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "2") .build(); Session sessionRepartitionMany = Session.builder(getSession()) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "5") .setSystemProperty(SCALE_WRITERS, "false") .build(); // Use DISTINCT to add data redistribution between source table and the writer. This makes it more likely that all writers get some data. String sourceRelation = "(SELECT DISTINCT orderkey, custkey, orderstatus FROM tpch.tiny.orders)"; testRepartitionData( sessionRepartitionSmall, sourceRelation, ctas, "'orderstatus'", 3); // Test uses relatively small table (60K rows). When engine doesn't redistribute data for writes, // occasionally a worker node doesn't get any data and fewer files get created. assertEventually(() -> { testRepartitionData( sessionRepartitionMany, sourceRelation, ctas, "'orderstatus'", 9); }); } private void testRepartitionData(Session session, String sourceRelation, boolean ctas, String partitioning, int expectedFiles) { String tableName = "repartition" + "_" + sourceRelation.replaceAll("[^a-zA-Z0-9]", "") + (ctas ? "ctas" : "insert") + "_" + partitioning.replaceAll("[^a-zA-Z0-9]", "") + "_" + randomTableSuffix(); long rowCount = (long) computeScalar(session, "SELECT count(*) FROM " + sourceRelation); if (ctas) { assertUpdate( session, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY[" + partitioning + "]) " + "AS SELECT * FROM " + sourceRelation, rowCount); } else { assertUpdate( session, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY[" + partitioning + "]) " + "AS SELECT * FROM " + sourceRelation + " WITH NO DATA", 0); // Use source table big enough so that there will be multiple pages being written. assertUpdate(session, "INSERT INTO " + tableName + " SELECT * FROM " + sourceRelation, rowCount); } // verify written data assertThat(query(session, "TABLE " + tableName)) .skippingTypesCheck() .matches("SELECT * FROM " + sourceRelation); // verify data files, i.e. repartitioning took place assertThat(query(session, "SELECT count(*) FROM \"" + tableName + "$files\"")) .matches("VALUES BIGINT '" + expectedFiles + "'"); assertUpdate(session, "DROP TABLE " + tableName); } @Test(dataProvider = "testDataMappingSmokeTestDataProvider") public void testSplitPruningForFilterOnNonPartitionColumn(DataMappingTestSetup testSetup) { if (testSetup.isUnsupportedType()) { return; } try (TestTable table = new TestTable(getQueryRunner()::execute, "test_split_pruning_non_partitioned", "(row_id int, col " + testSetup.getTrinoTypeName() + ")")) { String tableName = table.getName(); String sampleValue = testSetup.getSampleValueLiteral(); String highValue = testSetup.getHighValueLiteral(); // Insert separately to ensure two files with one value each assertUpdate("INSERT INTO " + tableName + " VALUES (1, " + sampleValue + ")", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (2, " + highValue + ")", 1); assertQuery("select count(*) from \"" + tableName + "$files\"", "VALUES 2"); int expectedSplitCount = supportsIcebergFileStatistics(testSetup.getTrinoTypeName()) ? 1 : 2; verifySplitCount("SELECT row_id FROM " + tableName, 2); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col = " + sampleValue, expectedSplitCount); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col = " + highValue, expectedSplitCount); // ORC max timestamp statistics are truncated to millisecond precision and then appended with 999 microseconds. // Therefore, sampleValue and highValue are within the max timestamp & there will be 2 splits. verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col > " + sampleValue, (format == ORC && testSetup.getTrinoTypeName().contains("timestamp") ? 2 : expectedSplitCount)); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col < " + highValue, (format == ORC && testSetup.getTrinoTypeName().contains("timestamp") ? 2 : expectedSplitCount)); } } @Test public void testGetIcebergTableProperties() { assertUpdate("CREATE TABLE test_iceberg_get_table_props (x BIGINT)"); assertThat(query("SELECT * FROM \"test_iceberg_get_table_props$properties\"")) .matches(format("VALUES (VARCHAR 'write.format.default', VARCHAR '%s')", format.name())); dropTable("test_iceberg_get_table_props"); } protected abstract boolean supportsIcebergFileStatistics(String typeName); @Test(dataProvider = "testDataMappingSmokeTestDataProvider") public void testSplitPruningFromDataFileStatistics(DataMappingTestSetup testSetup) { if (testSetup.isUnsupportedType()) { return; } try (TestTable table = new TestTable( getQueryRunner()::execute, "test_split_pruning_data_file_statistics", // Random double is needed to make sure rows are different. Otherwise compression may deduplicate rows, resulting in only one row group "(col " + testSetup.getTrinoTypeName() + ", r double)")) { String tableName = table.getName(); String values = Stream.concat( nCopies(100, testSetup.getSampleValueLiteral()).stream(), nCopies(100, testSetup.getHighValueLiteral()).stream()) .map(value -> "(" + value + ", rand())") .collect(Collectors.joining(", ")); assertUpdate(withSmallRowGroups(getSession()), "INSERT INTO " + tableName + " VALUES " + values, 200); String query = "SELECT * FROM " + tableName + " WHERE col = " + testSetup.getSampleValueLiteral(); verifyPredicatePushdownDataRead(query, supportsRowGroupStatistics(testSetup.getTrinoTypeName())); } } protected abstract Session withSmallRowGroups(Session session); protected abstract boolean supportsRowGroupStatistics(String typeName); private void verifySplitCount(String query, int expectedSplitCount) { ResultWithQueryId<MaterializedResult> selectAllPartitionsResult = getDistributedQueryRunner().executeWithQueryId(getSession(), query); assertEqualsIgnoreOrder(selectAllPartitionsResult.getResult().getMaterializedRows(), computeActual(withoutPredicatePushdown(getSession()), query).getMaterializedRows()); verifySplitCount(selectAllPartitionsResult.getQueryId(), expectedSplitCount); } private void verifyPredicatePushdownDataRead(@Language("SQL") String query, boolean supportsPushdown) { ResultWithQueryId<MaterializedResult> resultWithPredicatePushdown = getDistributedQueryRunner().executeWithQueryId(getSession(), query); ResultWithQueryId<MaterializedResult> resultWithoutPredicatePushdown = getDistributedQueryRunner().executeWithQueryId( withoutPredicatePushdown(getSession()), query); DataSize withPushdownDataSize = getOperatorStats(resultWithPredicatePushdown.getQueryId()).getInputDataSize(); DataSize withoutPushdownDataSize = getOperatorStats(resultWithoutPredicatePushdown.getQueryId()).getInputDataSize(); if (supportsPushdown) { assertThat(withPushdownDataSize).isLessThan(withoutPushdownDataSize); } else { assertThat(withPushdownDataSize).isEqualTo(withoutPushdownDataSize); } } private Session withoutPredicatePushdown(Session session) { return Session.builder(session) .setSystemProperty("allow_pushdown_into_connectors", "false") .build(); } private void verifySplitCount(QueryId queryId, long expectedSplitCount) { checkArgument(expectedSplitCount >= 0); OperatorStats operatorStats = getOperatorStats(queryId); if (expectedSplitCount > 0) { assertThat(operatorStats.getTotalDrivers()).isEqualTo(expectedSplitCount); assertThat(operatorStats.getPhysicalInputPositions()).isGreaterThan(0); } else { // expectedSplitCount == 0 assertThat(operatorStats.getTotalDrivers()).isEqualTo(1); assertThat(operatorStats.getPhysicalInputPositions()).isEqualTo(0); } } private OperatorStats getOperatorStats(QueryId queryId) { try { return getDistributedQueryRunner().getCoordinator() .getQueryManager() .getFullQueryInfo(queryId) .getQueryStats() .getOperatorSummaries() .stream() .filter(summary -> summary.getOperatorType().startsWith("TableScan") || summary.getOperatorType().startsWith("Scan")) .collect(onlyElement()); } catch (NoSuchElementException e) { throw new RuntimeException("Couldn't find operator summary, probably due to query statistic collection error", e); } } @Override protected TestTable createTableWithDefaultColumns() { throw new SkipException("Iceberg connector does not support column default values"); } @Override protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("tinyint") || typeName.equals("smallint") || typeName.startsWith("char(")) { // These types are not supported by Iceberg return Optional.of(dataMappingTestSetup.asUnsupported()); } // According to Iceberg specification all time and timestamp values are stored with microsecond precision. if (typeName.equals("time") || typeName.equals("timestamp") || typeName.equals("timestamp(3) with time zone")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } @Override protected Optional<DataMappingTestSetup> filterCaseSensitiveDataMappingTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("char(1)")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } @Test public void testAmbiguousColumnsWithDots() { assertThatThrownBy(() -> assertUpdate("CREATE TABLE ambiguous (\"a.cow\" BIGINT, a ROW(cow BIGINT))")) .hasMessage("Invalid schema: multiple fields for name a.cow: 1 and 3"); assertUpdate("CREATE TABLE ambiguous (\"a.cow\" BIGINT, b ROW(cow BIGINT))"); assertThatThrownBy(() -> assertUpdate("ALTER TABLE ambiguous RENAME COLUMN b TO a")) .hasMessage("Invalid schema: multiple fields for name a.cow: 1 and 3"); assertUpdate("DROP TABLE ambiguous"); assertUpdate("CREATE TABLE ambiguous (a ROW(cow BIGINT))"); assertThatThrownBy(() -> assertUpdate("ALTER TABLE ambiguous ADD COLUMN \"a.cow\" BIGINT")) .hasMessage("Cannot add column with ambiguous name: a.cow, use addColumn(parent, name, type)"); assertUpdate("DROP TABLE ambiguous"); } @Test public void testSchemaEvolutionWithDereferenceProjections() { // Fields are identified uniquely based on unique id's. If a column is dropped and recreated with the same name it should not return dropped data. assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a row(b BIGINT, c VARCHAR))"); assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(1, 'abc'))", 1); assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(b VARCHAR, c BIGINT)"); assertQuery("SELECT a.b FROM evolve_test", "VALUES NULL"); assertUpdate("DROP TABLE evolve_test"); // Very changing subfield ordering does not revive dropped data assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a ROW(b BIGINT, c VARCHAR), d BIGINT) with (partitioning = ARRAY['d'])"); assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(2, 'abc'), 3)", 1); assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(c VARCHAR, b BIGINT)"); assertUpdate("INSERT INTO evolve_test VALUES (4, 5, ROW('def', 6))", 1); assertQuery("SELECT a.b FROM evolve_test WHERE d = 3", "VALUES NULL"); assertQuery("SELECT a.b FROM evolve_test WHERE d = 5", "VALUES 6"); assertUpdate("DROP TABLE evolve_test"); } @Test public void testHighlyNestedData() { assertUpdate("CREATE TABLE nested_data (id INT, row_t ROW(f1 INT, f2 INT, row_t ROW (f1 INT, f2 INT, row_t ROW(f1 INT, f2 INT))))"); assertUpdate("INSERT INTO nested_data VALUES (1, ROW(2, 3, ROW(4, 5, ROW(6, 7)))), (11, ROW(12, 13, ROW(14, 15, ROW(16, 17))))", 2); assertUpdate("INSERT INTO nested_data VALUES (21, ROW(22, 23, ROW(24, 25, ROW(26, 27))))", 1); // Test select projected columns, with and without their parent column assertQuery("SELECT id, row_t.row_t.row_t.f2 FROM nested_data", "VALUES (1, 7), (11, 17), (21, 27)"); assertQuery("SELECT id, row_t.row_t.row_t.f2, CAST(row_t AS JSON) FROM nested_data", "VALUES (1, 7, '{\"f1\":2,\"f2\":3,\"row_t\":{\"f1\":4,\"f2\":5,\"row_t\":{\"f1\":6,\"f2\":7}}}'), " + "(11, 17, '{\"f1\":12,\"f2\":13,\"row_t\":{\"f1\":14,\"f2\":15,\"row_t\":{\"f1\":16,\"f2\":17}}}'), " + "(21, 27, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); // Test predicates on immediate child column and deeper nested column assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":26,\"f2\":27}')"); assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":26,\"f2\":27}')"); assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); // Test predicates on parent columns assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t.row_t.row_t = ROW(16, 17)", "VALUES (11, 16)"); assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t = ROW(22, 23, ROW(24, 25, ROW(26, 27)))", "VALUES (21, 26)"); assertUpdate("DROP TABLE IF EXISTS nested_data"); } @Test public void testProjectionPushdownAfterRename() { assertUpdate("CREATE TABLE projection_pushdown_after_rename (id INT, a ROW(b INT, c ROW (d INT)))"); assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (1, ROW(2, ROW(3))), (11, ROW(12, ROW(13)))", 2); assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (21, ROW(22, ROW(23)))", 1); String expected = "VALUES (11, JSON '{\"b\":12,\"c\":{\"d\":13}}', 13)"; assertQuery("SELECT id, CAST(a AS JSON), a.c.d FROM projection_pushdown_after_rename WHERE a.b = 12", expected); assertUpdate("ALTER TABLE projection_pushdown_after_rename RENAME COLUMN a TO row_t"); assertQuery("SELECT id, CAST(row_t AS JSON), row_t.c.d FROM projection_pushdown_after_rename WHERE row_t.b = 12", expected); assertUpdate("DROP TABLE IF EXISTS projection_pushdown_after_rename"); } @Test public void testProjectionWithCaseSensitiveField() { assertUpdate("CREATE TABLE projection_with_case_sensitive_field (id INT, a ROW(\"UPPER_CASE\" INT, \"lower_case\" INT, \"MiXeD_cAsE\" INT))"); assertUpdate("INSERT INTO projection_with_case_sensitive_field VALUES (1, ROW(2, 3, 4)), (5, ROW(6, 7, 8))", 2); String expected = "VALUES (2, 3, 4), (6, 7, 8)"; assertQuery("SELECT a.UPPER_CASE, a.lower_case, a.MiXeD_cAsE FROM projection_with_case_sensitive_field", expected); assertQuery("SELECT a.upper_case, a.lower_case, a.mixed_case FROM projection_with_case_sensitive_field", expected); assertQuery("SELECT a.UPPER_CASE, a.LOWER_CASE, a.MIXED_CASE FROM projection_with_case_sensitive_field", expected); assertUpdate("DROP TABLE IF EXISTS projection_with_case_sensitive_field"); } @Test public void testProjectionPushdownReadsLessData() { String largeVarchar = "ZZZ".repeat(1000); assertUpdate("CREATE TABLE projection_pushdown_reads_less_data (id INT, a ROW(b VARCHAR, c INT))"); assertUpdate( format("INSERT INTO projection_pushdown_reads_less_data VALUES (1, ROW('%s', 3)), (11, ROW('%1$s', 13)), (21, ROW('%1$s', 23)), (31, ROW('%1$s', 33))", largeVarchar), 4); String selectQuery = "SELECT a.c FROM projection_pushdown_reads_less_data"; Set<Integer> expected = ImmutableSet.of(3, 13, 23, 33); Session sessionWithoutPushdown = Session.builder(getSession()) .setCatalogSessionProperty(ICEBERG_CATALOG, "projection_pushdown_enabled", "false") .build(); assertQueryStats( getSession(), selectQuery, statsWithPushdown -> { DataSize processedDataSizeWithPushdown = statsWithPushdown.getProcessedInputDataSize(); assertQueryStats( sessionWithoutPushdown, selectQuery, statsWithoutPushdown -> assertThat(statsWithoutPushdown.getProcessedInputDataSize()).isGreaterThan(processedDataSizeWithPushdown), results -> assertEquals(results.getOnlyColumnAsSet(), expected)); }, results -> assertEquals(results.getOnlyColumnAsSet(), expected)); assertUpdate("DROP TABLE IF EXISTS projection_pushdown_reads_less_data"); } @Test public void testProjectionPushdownOnPartitionedTables() { assertUpdate("CREATE TABLE table_with_partition_at_beginning (id BIGINT, root ROW(f1 BIGINT, f2 BIGINT)) WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO table_with_partition_at_beginning VALUES (1, ROW(1, 2)), (1, ROW(2, 3)), (1, ROW(3, 4))", 3); assertQuery("SELECT id, root.f2 FROM table_with_partition_at_beginning", "VALUES (1, 2), (1, 3), (1, 4)"); assertUpdate("DROP TABLE table_with_partition_at_beginning"); assertUpdate("CREATE TABLE table_with_partition_at_end (root ROW(f1 BIGINT, f2 BIGINT), id BIGINT) WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO table_with_partition_at_end VALUES (ROW(1, 2), 1), (ROW(2, 3), 1), (ROW(3, 4), 1)", 3); assertQuery("SELECT root.f2, id FROM table_with_partition_at_end", "VALUES (2, 1), (3, 1), (4, 1)"); assertUpdate("DROP TABLE table_with_partition_at_end"); } @Test public void testProjectionPushdownOnPartitionedTableWithComments() { assertUpdate("CREATE TABLE test_projection_pushdown_comments (id BIGINT COMMENT 'id', qid BIGINT COMMENT 'QID', root ROW(f1 BIGINT, f2 BIGINT) COMMENT 'root') WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO test_projection_pushdown_comments VALUES (1, 1, ROW(1, 2)), (1, 2, ROW(2, 3)), (1, 3, ROW(3, 4))", 3); assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments", "VALUES (1, 2), (1, 3), (1, 4)"); // Query with predicates on both nested and top-level columns (with partition column) assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE id = 1 AND qid = 1 AND root.f1 = 1", "VALUES (1, 2)"); // Query with predicates on both nested and top-level columns (no partition column) assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE qid = 2 AND root.f1 = 2", "VALUES (1, 3)"); // Query with predicates on top-level columns only assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE id = 1 AND qid = 1", "VALUES (1, 2)"); // Query with predicates on nested columns only assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE root.f1 = 2", "VALUES (1, 3)"); assertUpdate("DROP TABLE IF EXISTS test_projection_pushdown_comments"); } @Test(dataProvider = "tableFormatVersion") public void testOptimize(int formatVersion) throws Exception { String tableName = "test_optimize_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key integer, value varchar) WITH (format_version = " + formatVersion + ")"); // DistributedQueryRunner sets node-scheduler.include-coordinator by default, so include coordinator int workerCount = getQueryRunner().getNodeCount(); // optimize an empty table assertQuerySucceeds("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(getActiveFiles(tableName)).isEmpty(); assertUpdate("INSERT INTO " + tableName + " VALUES (11, 'eleven')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (12, 'zwölf')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (13, 'trzynaście')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (14, 'quatorze')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (15, 'пʼятнадцять')", 1); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles) .hasSize(5) // Verify we have sufficiently many test rows with respect to worker count. .hasSizeGreaterThan(workerCount); computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(query("SELECT sum(key), listagg(value, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '65', VARCHAR 'eleven zwölf trzynaście quatorze пʼятнадцять')"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles) .hasSizeBetween(1, workerCount) .doesNotContainAnyElementsOf(initialFiles); // No files should be removed (this is expire_snapshots's job, when it exists) assertThat(getAllDataFilesFromTableDirectory(tableName)) .containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); // optimize with low retention threshold, nothing should change computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE (file_size_threshold => '33B')"); assertThat(query("SELECT sum(key), listagg(value, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '65', VARCHAR 'eleven zwölf trzynaście quatorze пʼятнадцять')"); assertThat(getActiveFiles(tableName)).isEqualTo(updatedFiles); assertThat(getAllDataFilesFromTableDirectory(tableName)) .containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); // optimize with delimited procedure name assertQueryFails("ALTER TABLE " + tableName + " EXECUTE \"optimize\"", "Procedure optimize not registered for catalog iceberg"); assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\""); // optimize with delimited parameter name (and procedure name) assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\" (\"file_size_threshold\" => '33B')"); // TODO (https://github.com/trinodb/trino/issues/11326) this should fail assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\" (\"FILE_SIZE_THRESHOLD\" => '33B')"); assertUpdate("DROP TABLE " + tableName); } @Test(dataProvider = "tableFormatVersion") public void testOptimizeForPartitionedTable(int formatVersion) throws IOException { // This test will have its own session to make sure partitioning is indeed forced and is not a result // of session configuration Session session = testSessionBuilder() .setCatalog(getQueryRunner().getDefaultSession().getCatalog()) .setSchema(getQueryRunner().getDefaultSession().getSchema()) .setSystemProperty("use_preferred_write_partitioning", "true") .setSystemProperty("preferred_write_partitioning_min_number_of_partitions", "100") .build(); String tableName = "test_repartitiong_during_optimize_" + randomTableSuffix(); assertUpdate(session, "CREATE TABLE " + tableName + " (key varchar, value integer) WITH (format_version = " + formatVersion + ", partitioning = ARRAY['key'])"); // optimize an empty table assertQuerySucceeds(session, "ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 2)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 3)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 4)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 5)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 6)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 7)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('two', 8)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('two', 9)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('three', 10)", 1); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles).hasSize(10); computeActual(session, "ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(query(session, "SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '55', VARCHAR 'one one one one one one one three two two')"); List<String> updatedFiles = getActiveFiles(tableName); // as we force repartitioning there should be only 3 partitions assertThat(updatedFiles).hasSize(3); assertThat(getAllDataFilesFromTableDirectory(tableName)).containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); assertUpdate("DROP TABLE " + tableName); } @DataProvider public Object[][] tableFormatVersion() { return IntStream.rangeClosed(IcebergConfig.FORMAT_VERSION_SUPPORT_MIN, IcebergConfig.FORMAT_VERSION_SUPPORT_MAX).boxed() .collect(DataProviders.toDataProvider()); } @Test public void testOptimizeTableAfterDeleteWithFormatVersion2() { String tableName = "test_optimize_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", 25); List<String> initialFiles = getActiveFiles(tableName); assertUpdate("DELETE FROM " + tableName + " WHERE nationkey = 7", 1); // Verify that delete files exists assertQuery( "SELECT summary['total-delete-files'] FROM \"" + tableName + "$snapshots\" WHERE snapshot_id = " + getCurrentSnapshotId(tableName), "VALUES '1'"); computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles) .hasSize(1) .isNotEqualTo(initialFiles); assertThat(query("SELECT * FROM " + tableName)) .matches("SELECT * FROM nation WHERE nationkey != 7"); assertUpdate("DROP TABLE " + tableName); } private List<String> getActiveFiles(String tableName) { return computeActual(format("SELECT file_path FROM \"%s$files\"", tableName)).getOnlyColumn() .map(String.class::cast) .collect(toImmutableList()); } private List<String> getAllDataFilesFromTableDirectory(String tableName) throws IOException { String schema = getSession().getSchema().orElseThrow(); Path tableDataDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve("data"); try (Stream<Path> walk = Files.walk(tableDataDir)) { return walk .filter(Files::isRegularFile) .filter(path -> !path.getFileName().toString().matches("\\..*\\.crc")) .map(Path::toString) .collect(toImmutableList()); } } @Test public void testOptimizeParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE OPTIMIZE", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE OPTIMIZE (file_size_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'OPTIMIZE' property 'file_size_threshold' to ['33']: size is not a valid data size string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE OPTIMIZE (file_size_threshold => '33s')", "\\QUnable to set catalog 'iceberg' table procedure 'OPTIMIZE' property 'file_size_threshold' to ['33s']: Unknown unit: s"); } @Test public void testTargetMaxFileSize() { String tableName = "test_default_max_file_size" + randomTableSuffix(); @Language("SQL") String createTableSql = format("CREATE TABLE %s AS SELECT * FROM tpch.sf1.lineitem LIMIT 100000", tableName); Session session = Session.builder(getSession()) .setSystemProperty("task_writer_count", "1") .build(); assertUpdate(session, createTableSql, 100000); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles.size()).isLessThanOrEqualTo(3); assertUpdate(format("DROP TABLE %s", tableName)); DataSize maxSize = DataSize.of(40, DataSize.Unit.KILOBYTE); session = Session.builder(getSession()) .setSystemProperty("task_writer_count", "1") .setCatalogSessionProperty("iceberg", "target_max_file_size", maxSize.toString()) .build(); assertUpdate(session, createTableSql, 100000); assertThat(query(format("SELECT count(*) FROM %s", tableName))).matches("VALUES BIGINT '100000'"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles.size()).isGreaterThan(10); computeActual(format("SELECT file_size_in_bytes FROM \"%s$files\"", tableName)) .getMaterializedRows() // as target_max_file_size is set to quite low value it can happen that created files are bigger, // so just to be safe we check if it is not much bigger .forEach(row -> assertThat((Long) row.getField(0)).isBetween(1L, maxSize.toBytes() * 3)); } @Test public void testDroppingIcebergAndCreatingANewTableWithTheSameNameShouldBePossible() { assertUpdate("CREATE TABLE test_iceberg_recreate (a_int) AS VALUES (1)", 1); assertThat(query("SELECT min(a_int) FROM test_iceberg_recreate")).matches("VALUES 1"); dropTable("test_iceberg_recreate"); assertUpdate("CREATE TABLE test_iceberg_recreate (a_varchar) AS VALUES ('Trino')", 1); assertThat(query("SELECT min(a_varchar) FROM test_iceberg_recreate")).matches("VALUES CAST('Trino' AS varchar)"); dropTable("test_iceberg_recreate"); } @Test public void testPathHiddenColumn() { String tableName = "test_path_" + randomTableSuffix(); @Language("SQL") String createTable = "CREATE TABLE " + tableName + " " + "WITH ( partitioning = ARRAY['zip'] ) AS " + "SELECT * FROM (VALUES " + "(0, 0), (3, 0), (6, 0), " + "(1, 1), (4, 1), (7, 1), " + "(2, 2), (5, 2) " + " ) t(userid, zip)"; assertUpdate(createTable, 8); MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("userid", "integer", "", "") .row("zip", "integer", "", "") .build(); MaterializedResult actualColumns = computeActual(format("DESCRIBE %s", tableName)); // Describe output should not have the $path hidden column assertEquals(actualColumns, expectedColumns); assertThat(query("SELECT file_path FROM \"" + tableName + "$files\"")) .matches("SELECT DISTINCT \"$path\" as file_path FROM " + tableName); String somePath = (String) computeScalar("SELECT \"$path\" FROM " + tableName + " WHERE userid = 2"); assertThat(query("SELECT userid FROM " + tableName + " WHERE \"$path\" = '" + somePath + "'")) .matches("VALUES 2, 5"); assertThat(query("SELECT userid FROM " + tableName + " WHERE \"$path\" = '" + somePath + "' AND userid > 0")) .matches("VALUES 2, 5"); assertUpdate("DROP TABLE " + tableName); } @Test public void testExpireSnapshots() throws Exception { String tableName = "test_expiring_snapshots_" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer)"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertThat(query("SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '3', VARCHAR 'one two')"); List<Long> initialSnapshots = getSnapshotIds(tableName); List<String> initialFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); assertThat(query("SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '3', VARCHAR 'one two')"); List<String> updatedFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertThat(updatedFiles.size()).isEqualTo(initialFiles.size() - 1); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); assertThat(updatedSnapshots.size()).isEqualTo(1); assertThat(initialSnapshots).containsAll(updatedSnapshots); } @Test public void testExpireSnapshotsPartitionedTable() throws Exception { String tableName = "test_expiring_snapshots_partitioned_table" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO " + tableName + " VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertUpdate("DELETE FROM " + tableName + " WHERE col1 = 1", 3); assertUpdate("INSERT INTO " + tableName + " VALUES(4, 400)", 1); assertQuery("SELECT sum(col2) FROM " + tableName, "SELECT 1101"); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); List<Long> initialSnapshots = getSnapshotIds(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertQuery("SELECT sum(col2) FROM " + tableName, "SELECT 1101"); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); } @Test public void testExplainExpireSnapshotOutput() { String tableName = "test_expiring_snapshots_output" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertExplain("EXPLAIN ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')", "SimpleTableExecute\\[iceberg:schemaTableName:tpch.test_expiring_snapshots.*\\{retentionThreshold=0\\.00s}.*"); } @Test public void testExpireSnapshotsParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE EXPIRE_SNAPSHOTS", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'EXPIRE_SNAPSHOTS' property 'retention_threshold' to ['33']: duration is not a valid data duration string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33mb')", "\\QUnable to set catalog 'iceberg' table procedure 'EXPIRE_SNAPSHOTS' property 'retention_threshold' to ['33mb']: Unknown time unit: mb"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33s')", "\\QRetention specified (33.00s) is shorter than the minimum retention configured in the system (7.00d). Minimum retention can be changed with iceberg.expire_snapshots.min-retention configuration property or iceberg.expire_snapshots_min_retention session property"); } @Test public void testRemoveOrphanFiles() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer)"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); Path orphanFile = Files.createFile(Path.of(getIcebergTableDataPath(tableName).toString(), "invalidData." + format)); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedDataFiles).doesNotContain(orphanFile.toString()); } @Test public void testIfRemoveOrphanFilesCleansUnnecessaryDataFilesInPartitionedTable() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); Path orphanFile = Files.createFile(Path.of(getIcebergTableDataPath(tableName) + "/key=one/", "invalidData." + format)); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedDataFiles).doesNotContain(orphanFile.toString()); } @Test public void testIfRemoveOrphanFilesCleansUnnecessaryMetadataFilesInPartitionedTable() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); Path orphanMetadataFile = Files.createFile(Path.of(getIcebergTableMetadataPath(tableName).toString(), "invalidData." + format)); List<String> initialMetadataFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedMetadataFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertThat(updatedMetadataFiles.size()).isLessThan(initialMetadataFiles.size()); assertThat(updatedMetadataFiles).doesNotContain(orphanMetadataFile.toString()); } @Test public void testCleaningUpWithTableWithSpecifiedLocationWithSlashAtTheEnd() throws IOException { testCleaningUpWithTableWithSpecifiedLocation("/"); } @Test public void testCleaningUpWithTableWithSpecifiedLocationWithoutSlashAtTheEnd() throws IOException { testCleaningUpWithTableWithSpecifiedLocation(""); } private void testCleaningUpWithTableWithSpecifiedLocation(String suffix) throws IOException { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix() + suffix; String tableName = "test_table_cleaning_up_with_location" + randomTableSuffix(); assertUpdate(format("CREATE TABLE %s (key varchar, value integer) WITH(location = '%s')", tableName, tempDirPath)); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); List<String> initialFiles = getAllMetadataFilesFromTableDirectory(tempDirPath); List<Long> initialSnapshots = getSnapshotIds(tableName); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedFiles = getAllMetadataFilesFromTableDirectory(tempDirPath); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertThat(updatedFiles.size()).isEqualTo(initialFiles.size() - 1); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); assertThat(updatedSnapshots.size()).isEqualTo(1); assertThat(initialSnapshots).containsAll(updatedSnapshots); } @Test public void testExplainRemoveOrphanFilesOutput() { String tableName = "test_remove_orphan_files_output" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertExplain("EXPLAIN ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')", "SimpleTableExecute\\[iceberg:schemaTableName:tpch.test_remove_orphan_files.*\\{retentionThreshold=0\\.00s}.*"); } @Test public void testRemoveOrphanFilesParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE REMOVE_ORPHAN_FILES", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'REMOVE_ORPHAN_FILES' property 'retention_threshold' to ['33']: duration is not a valid data duration string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33mb')", "\\QUnable to set catalog 'iceberg' table procedure 'REMOVE_ORPHAN_FILES' property 'retention_threshold' to ['33mb']: Unknown time unit: mb"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33s')", "\\QRetention specified (33.00s) is shorter than the minimum retention configured in the system (7.00d). Minimum retention can be changed with iceberg.remove_orphan_files.min-retention configuration property or iceberg.remove_orphan_files_min_retention session property"); } @Test public void testIfDeletesReturnsNumberOfRemovedRows() { String tableName = "test_delete_returns_number_of_rows_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 2)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 3)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'one'", 3); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'one'"); // TODO change this when iceberg will guarantee to always return this (https://github.com/apache/iceberg/issues/4647) assertUpdate("DELETE FROM " + tableName + " WHERE key = 'three'"); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'two'", 2); } @Test public void testUpdatingFileFormat() { String tableName = "test_updating_file_format_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " WITH (format = 'orc') AS SELECT * FROM nation WHERE nationkey < 10", "SELECT count(*) FROM nation WHERE nationkey < 10"); assertQuery("SELECT value FROM \"" + tableName + "$properties\" WHERE key = 'write.format.default'", "VALUES 'ORC'"); assertUpdate("ALTER TABLE " + tableName + " SET PROPERTIES format = 'parquet'"); assertQuery("SELECT value FROM \"" + tableName + "$properties\" WHERE key = 'write.format.default'", "VALUES 'PARQUET'"); assertUpdate("INSERT INTO " + tableName + " SELECT * FROM nation WHERE nationkey >= 10", "SELECT count(*) FROM nation WHERE nationkey >= 10"); assertQuery("SELECT * FROM " + tableName, "SELECT * FROM nation"); assertQuery("SELECT count(*) FROM \"" + tableName + "$files\" WHERE file_path LIKE '%.orc'", "VALUES 1"); assertQuery("SELECT count(*) FROM \"" + tableName + "$files\" WHERE file_path LIKE '%.parquet'", "VALUES 1"); assertUpdate("DROP TABLE " + tableName); } @Test public void testUpdatingInvalidTableProperty() { String tableName = "test_updating_invalid_table_property_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (a INT, b INT)"); assertThatThrownBy(() -> query("ALTER TABLE " + tableName + " SET PROPERTIES not_a_valid_table_property = 'a value'")) .hasMessage("Catalog 'iceberg' table property 'not_a_valid_table_property' does not exist"); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyCreateTableAsSelect() { String tableName = "test_empty_ctas_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation WHERE false", 0); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots.size()) .withFailMessage("CTAS operations must create Iceberg snapshot independently whether the selection is empty or not") .isEqualTo(1); assertQueryReturnsEmptyResult("SELECT * FROM " + tableName); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyInsert() { String tableName = "test_empty_insert_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("INSERT INTO " + tableName + " SELECT * FROM nation WHERE false", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("INSERT operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyUpdate() { String tableName = "test_empty_update_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("UPDATE " + tableName + " SET comment = 'new comment' WHERE nationkey IS NULL", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("UPDATE operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyDelete() { String tableName = "test_empty_delete_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " WITH (format = '" + format.name() + "') AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("DELETE FROM " + tableName + " WHERE nationkey IS NULL", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("DELETE operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } private Session prepareCleanUpSession() { return Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "expire_snapshots_min_retention", "0s") .setCatalogSessionProperty("iceberg", "remove_orphan_files_min_retention", "0s") .build(); } private List<String> getAllMetadataFilesFromTableDirectoryForTable(String tableName) throws IOException { String schema = getSession().getSchema().orElseThrow(); Path tableDataDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve("metadata"); return listAllTableFilesInDirectory(tableDataDir); } private List<String> getAllMetadataFilesFromTableDirectory(String tableDataDir) throws IOException { return listAllTableFilesInDirectory(Path.of(URI.create(tableDataDir).getPath())); } private List<String> listAllTableFilesInDirectory(Path tableDataPath) throws IOException { try (Stream<Path> walk = Files.walk(tableDataPath)) { return walk .filter(Files::isRegularFile) .filter(path -> !path.getFileName().toString().matches("\\..*\\.crc")) .map(Path::toString) .collect(toImmutableList()); } } private List<Long> getSnapshotIds(String tableName) { return getQueryRunner().execute(format("SELECT snapshot_id FROM \"%s$snapshots\"", tableName)) .getOnlyColumn() .map(Long.class::cast) .collect(toUnmodifiableList()); } private long getCurrentSnapshotId(String tableName) { return (long) computeScalar("SELECT snapshot_id FROM \"" + tableName + "$snapshots\" ORDER BY committed_at DESC LIMIT 1"); } private Path getIcebergTableDataPath(String tableName) { return getIcebergTablePath(tableName, "data"); } private Path getIcebergTableMetadataPath(String tableName) { return getIcebergTablePath(tableName, "metadata"); } private Path getIcebergTablePath(String tableName, String suffix) { String schema = getSession().getSchema().orElseThrow(); return getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve(suffix); } }
plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.plugin.iceberg; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.units.DataSize; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; import io.trino.operator.OperatorStats; import io.trino.plugin.hive.HdfsEnvironment; import io.trino.spi.QueryId; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.Constraint; import io.trino.spi.connector.ConstraintApplicationResult; import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.predicate.Domain; import io.trino.spi.predicate.TupleDomain; import io.trino.testing.BaseConnectorTest; import io.trino.testing.DataProviders; import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedRow; import io.trino.testing.QueryRunner; import io.trino.testing.ResultWithQueryId; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; import io.trino.tpch.TpchTable; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.hadoop.fs.FileSystem; import org.intellij.lang.annotations.Language; import org.testng.SkipException; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.MoreCollectors.onlyElement; import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE; import static io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS; import static io.trino.SystemSessionProperties.SCALE_WRITERS; import static io.trino.SystemSessionProperties.TASK_WRITER_COUNT; import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext; import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT; import static io.trino.plugin.iceberg.IcebergFileFormat.ORC; import static io.trino.plugin.iceberg.IcebergFileFormat.PARQUET; import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG; import static io.trino.plugin.iceberg.IcebergSplitManager.ICEBERG_DOMAIN_COMPACTION_THRESHOLD; import static io.trino.spi.predicate.Domain.multipleValues; import static io.trino.spi.predicate.Domain.singleValue; import static io.trino.spi.type.BigintType.BIGINT; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.sql.planner.OptimizerConfig.JoinDistributionType.BROADCAST; import static io.trino.testing.MaterializedResult.resultBuilder; import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder; import static io.trino.testing.TestingSession.testSessionBuilder; import static io.trino.testing.assertions.Assert.assertEquals; import static io.trino.testing.assertions.Assert.assertEventually; import static io.trino.testing.sql.TestTable.randomTableSuffix; import static io.trino.tpch.TpchTable.LINE_ITEM; import static io.trino.transaction.TransactionBuilder.transaction; import static java.lang.String.format; import static java.lang.String.join; import static java.util.Collections.nCopies; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toUnmodifiableList; import static java.util.stream.IntStream.range; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertTrue; public abstract class BaseIcebergConnectorTest extends BaseConnectorTest { private static final Pattern WITH_CLAUSE_EXTRACTOR = Pattern.compile(".*(WITH\\s*\\([^)]*\\))\\s*$", Pattern.DOTALL); private final IcebergFileFormat format; protected BaseIcebergConnectorTest(IcebergFileFormat format) { this.format = requireNonNull(format, "format is null"); } @Override protected QueryRunner createQueryRunner() throws Exception { return IcebergQueryRunner.builder() .setIcebergProperties(Map.of("iceberg.file-format", format.name())) .setInitialTables(ImmutableList.<TpchTable<?>>builder() .addAll(REQUIRED_TPCH_TABLES) .add(LINE_ITEM) .build()) .build(); } @Override protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) { switch (connectorBehavior) { case SUPPORTS_TOPN_PUSHDOWN: return false; case SUPPORTS_CREATE_VIEW: return true; case SUPPORTS_CREATE_MATERIALIZED_VIEW: case SUPPORTS_RENAME_MATERIALIZED_VIEW: return true; case SUPPORTS_RENAME_MATERIALIZED_VIEW_ACROSS_SCHEMAS: return false; case SUPPORTS_DELETE: case SUPPORTS_UPDATE: return true; default: return super.hasBehavior(connectorBehavior); } } @Override protected void verifyVersionedQueryFailurePermissible(Exception e) { assertThat(e) .hasMessageMatching("Version pointer type is not supported: .*|" + "Unsupported type for temporal table version: .*|" + "Unsupported type for table version: .*|" + "No version history table tpch.nation at or before .*|" + "Iceberg snapshot ID does not exists: .*"); } @Override protected void verifyConcurrentUpdateFailurePermissible(Exception e) { assertThat(e).hasMessageContaining("Failed to commit Iceberg update to table"); } @Override protected void verifyConcurrentAddColumnFailurePermissible(Exception e) { assertThat(e) .hasMessageContaining("Cannot update Iceberg table: supplied previous location does not match current location"); } @Test public void testDeleteOnV1Table() { try (TestTable table = new TestTable(getQueryRunner()::execute, "test_delete_", "WITH (format_version = 1) AS SELECT * FROM orders")) { assertQueryFails("DELETE FROM " + table.getName() + " WHERE custkey <= 100", "Iceberg table updates require at least format version 2"); } } @Override public void testCharVarcharComparison() { assertThatThrownBy(super::testCharVarcharComparison) .hasMessage("Type not supported for Iceberg: char(3)"); } @Test @Override public void testShowCreateSchema() { assertThat(computeActual("SHOW CREATE SCHEMA tpch").getOnlyValue().toString()) .matches("CREATE SCHEMA iceberg.tpch\n" + "AUTHORIZATION USER user\n" + "WITH \\(\n" + "\\s+location = '.*/iceberg_data/tpch'\n" + "\\)"); } @Override @Test public void testDescribeTable() { MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("orderkey", "bigint", "", "") .row("custkey", "bigint", "", "") .row("orderstatus", "varchar", "", "") .row("totalprice", "double", "", "") .row("orderdate", "date", "", "") .row("orderpriority", "varchar", "", "") .row("clerk", "varchar", "", "") .row("shippriority", "integer", "", "") .row("comment", "varchar", "", "") .build(); MaterializedResult actualColumns = computeActual("DESCRIBE orders"); assertEquals(actualColumns, expectedColumns); } @Override @Test public void testShowCreateTable() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue()) .isEqualTo("CREATE TABLE iceberg.tpch.orders (\n" + " orderkey bigint,\n" + " custkey bigint,\n" + " orderstatus varchar,\n" + " totalprice double,\n" + " orderdate date,\n" + " orderpriority varchar,\n" + " clerk varchar,\n" + " shippriority integer,\n" + " comment varchar\n" + ")\n" + "WITH (\n" + " format = '" + format.name() + "',\n" + " format_version = 2,\n" + " location = '" + tempDir + "/iceberg_data/tpch/orders'\n" + ")"); } @Override protected void checkInformationSchemaViewsForMaterializedView(String schemaName, String viewName) { // TODO should probably return materialized view, as it's also a view -- to be double checked assertThatThrownBy(() -> super.checkInformationSchemaViewsForMaterializedView(schemaName, viewName)) .hasMessageFindingMatch("(?s)Expecting.*to contain:.*\\Q[(" + viewName + ")]"); } @Test public void testDecimal() { testDecimalWithPrecisionAndScale(1, 0); testDecimalWithPrecisionAndScale(8, 6); testDecimalWithPrecisionAndScale(9, 8); testDecimalWithPrecisionAndScale(10, 8); testDecimalWithPrecisionAndScale(18, 1); testDecimalWithPrecisionAndScale(18, 8); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(17, 16); testDecimalWithPrecisionAndScale(18, 17); testDecimalWithPrecisionAndScale(24, 10); testDecimalWithPrecisionAndScale(30, 10); testDecimalWithPrecisionAndScale(37, 26); testDecimalWithPrecisionAndScale(38, 37); testDecimalWithPrecisionAndScale(38, 17); testDecimalWithPrecisionAndScale(38, 37); } private void testDecimalWithPrecisionAndScale(int precision, int scale) { checkArgument(precision >= 1 && precision <= 38, "Decimal precision (%s) must be between 1 and 38 inclusive", precision); checkArgument(scale < precision && scale >= 0, "Decimal scale (%s) must be less than the precision (%s) and non-negative", scale, precision); String decimalType = format("DECIMAL(%d,%d)", precision, scale); String beforeTheDecimalPoint = "12345678901234567890123456789012345678".substring(0, precision - scale); String afterTheDecimalPoint = "09876543210987654321098765432109876543".substring(0, scale); String decimalValue = format("%s.%s", beforeTheDecimalPoint, afterTheDecimalPoint); assertUpdate(format("CREATE TABLE test_iceberg_decimal (x %s)", decimalType)); assertUpdate(format("INSERT INTO test_iceberg_decimal (x) VALUES (CAST('%s' AS %s))", decimalValue, decimalType), 1); assertQuery("SELECT * FROM test_iceberg_decimal", format("SELECT CAST('%s' AS %s)", decimalValue, decimalType)); dropTable("test_iceberg_decimal"); } @Test public void testTime() { testSelectOrPartitionedByTime(false); } @Test public void testPartitionedByTime() { testSelectOrPartitionedByTime(true); } private void testSelectOrPartitionedByTime(boolean partitioned) { String tableName = format("test_%s_by_time", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH(partitioning = ARRAY['x'])" : ""; assertUpdate(format("CREATE TABLE %s (x TIME(6), y BIGINT) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (TIME '10:12:34', 12345)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertUpdate(format("INSERT INTO %s VALUES (TIME '9:00:00', 67890)", tableName), 1); assertQuery(format("SELECT COUNT(*) FROM %s", tableName), "SELECT 2"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '10:12:34'", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE x = TIME '9:00:00'", tableName), "SELECT CAST('9:00:00' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('10:12:34' AS TIME)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('9:00:00' AS TIME)"); dropTable(tableName); } @Test public void testPartitionByTimestamp() { testSelectOrPartitionedByTimestamp(true); } @Test public void testSelectByTimestamp() { testSelectOrPartitionedByTimestamp(false); } private void testSelectOrPartitionedByTimestamp(boolean partitioned) { String tableName = format("test_%s_by_timestamp", partitioned ? "partitioned" : "selected"); assertUpdate(format("CREATE TABLE %s (_timestamp timestamp(6)) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamp'])" : "")); @Language("SQL") String select1 = "SELECT TIMESTAMP '2017-05-01 10:12:34' _timestamp"; @Language("SQL") String select2 = "SELECT TIMESTAMP '2017-10-01 10:12:34' _timestamp"; @Language("SQL") String select3 = "SELECT TIMESTAMP '2018-05-01 10:12:34' _timestamp"; assertUpdate(format("INSERT INTO %s %s", tableName, select1), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select2), 1); assertUpdate(format("INSERT INTO %s %s", tableName, select3), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-05-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp < TIMESTAMP '2017-06-01 10:12:34'", tableName), select1); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2017-10-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2017-06-01 10:12:34' AND _timestamp < TIMESTAMP '2018-05-01 10:12:34'", tableName), select2); assertQuery(format("SELECT * from %s WHERE _timestamp = TIMESTAMP '2018-05-01 10:12:34'", tableName), select3); assertQuery(format("SELECT * from %s WHERE _timestamp > TIMESTAMP '2018-01-01 10:12:34'", tableName), select3); dropTable(tableName); } @Test public void testPartitionByTimestampWithTimeZone() { testSelectOrPartitionedByTimestampWithTimeZone(true); } @Test public void testSelectByTimestampWithTimeZone() { testSelectOrPartitionedByTimestampWithTimeZone(false); } private void testSelectOrPartitionedByTimestampWithTimeZone(boolean partitioned) { String tableName = format("test_%s_by_timestamptz", partitioned ? "partitioned" : "selected"); assertUpdate(format( "CREATE TABLE %s (_timestamptz timestamp(6) with time zone) %s", tableName, partitioned ? "WITH (partitioning = ARRAY['_timestamptz'])" : "")); String instant1Utc = "TIMESTAMP '2021-10-31 00:30:00.005000 UTC'"; String instant1La = "TIMESTAMP '2021-10-30 17:30:00.005000 America/Los_Angeles'"; String instant2Utc = "TIMESTAMP '2021-10-31 00:30:00.006000 UTC'"; String instant2La = "TIMESTAMP '2021-10-30 17:30:00.006000 America/Los_Angeles'"; String instant3Utc = "TIMESTAMP '2021-10-31 00:30:00.007000 UTC'"; String instant3La = "TIMESTAMP '2021-10-30 17:30:00.007000 America/Los_Angeles'"; assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant1Utc), 1); assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant2La /* non-UTC for this one */), 1); assertUpdate(format("INSERT INTO %s VALUES %s", tableName, instant3Utc), 1); assertQuery(format("SELECT COUNT(*) from %s", tableName), "SELECT 3"); // = assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant1Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant1La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant2Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant2La))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant3Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz = %s", tableName, instant3La))) .matches("VALUES " + instant3Utc); // < assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant2Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant2La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant3Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz < %s", tableName, instant3La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // <= assertThat(query(format("SELECT * from %s WHERE _timestamptz <= %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz <= %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // > assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant2Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant2La))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); // >= assertThat(query(format("SELECT * from %s WHERE _timestamptz >= %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz >= %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); // open range assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s AND _timestamptz < %s", tableName, instant1Utc, instant3Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz > %s AND _timestamptz < %s", tableName, instant1La, instant3La))) .matches("VALUES " + instant2Utc); // closed range assertThat(query(format("SELECT * from %s WHERE _timestamptz BETWEEN %s AND %s", tableName, instant1Utc, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz BETWEEN %s AND %s", tableName, instant1La, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant2Utc)); // != assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz != %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); // IS DISTINCT FROM assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant1Utc))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant1La))) .matches(format("VALUES %s, %s", instant2Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant2Utc))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS DISTINCT FROM %s", tableName, instant2La))) .matches(format("VALUES %s, %s", instant1Utc, instant3Utc)); // IS NOT DISTINCT FROM assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant1Utc))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant1La))) .matches("VALUES " + instant1Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant2Utc))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant2La))) .matches("VALUES " + instant2Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant3Utc))) .matches("VALUES " + instant3Utc); assertThat(query(format("SELECT * from %s WHERE _timestamptz IS NOT DISTINCT FROM %s", tableName, instant3La))) .matches("VALUES " + instant3Utc); if (partitioned) { assertThat(query(format("SELECT record_count, file_count, partition._timestamptz FROM \"%s$partitions\"", tableName))) .matches(format("VALUES (BIGINT '1', BIGINT '1', %s), (BIGINT '1', BIGINT '1', %s), (BIGINT '1', BIGINT '1', %s)", instant1Utc, instant2Utc, instant3Utc)); } else { assertThat(query(format("SELECT record_count, file_count, data._timestamptz FROM \"%s$partitions\"", tableName))) .matches(format( "VALUES (BIGINT '3', BIGINT '3', CAST(ROW(%s, %s, 0, NULL) AS row(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)))", instant1Utc, format == ORC ? "TIMESTAMP '2021-10-31 00:30:00.007999 UTC'" : instant3Utc)); } // show stats assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + "('_timestamptz', NULL, NULL, 0e0, NULL, '2021-10-31 00:30:00.005 UTC', '2021-10-31 00:30:00.007 UTC'), " + "(NULL, NULL, NULL, NULL, 3e0, NULL, NULL)"); if (partitioned) { // show stats with predicate assertThat(query("SHOW STATS FOR (SELECT * FROM " + tableName + " WHERE _timestamptz = " + instant1La + ")")) .skippingTypesCheck() .matches("VALUES " + // TODO (https://github.com/trinodb/trino/issues/9716) the min/max values are off by 1 millisecond "('_timestamptz', NULL, NULL, 0e0, NULL, '2021-10-31 00:30:00.005 UTC', '2021-10-31 00:30:00.005 UTC'), " + "(NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); } else { // show stats with predicate assertThat(query("SHOW STATS FOR (SELECT * FROM " + tableName + " WHERE _timestamptz = " + instant1La + ")")) .skippingTypesCheck() .matches("VALUES " + "('_timestamptz', NULL, NULL, NULL, NULL, NULL, NULL), " + "(NULL, NULL, NULL, NULL, NULL, NULL, NULL)"); } assertUpdate("DROP TABLE " + tableName); } @Test public void testUuid() { testSelectOrPartitionedByUuid(false); } @Test public void testPartitionedByUuid() { testSelectOrPartitionedByUuid(true); } private void testSelectOrPartitionedByUuid(boolean partitioned) { String tableName = format("test_%s_by_uuid", partitioned ? "partitioned" : "selected"); String partitioning = partitioned ? "WITH (partitioning = ARRAY['x'])" : ""; assertUpdate(format("DROP TABLE IF EXISTS %s", tableName)); assertUpdate(format("CREATE TABLE %s (x uuid, y bigint) %s", tableName, partitioning)); assertUpdate(format("INSERT INTO %s VALUES (UUID '406caec7-68b9-4778-81b2-a12ece70c8b1', 12345)", tableName), 1); assertQuery(format("SELECT count(*) FROM %s", tableName), "SELECT 1"); assertQuery(format("SELECT x FROM %s", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); assertUpdate(format("INSERT INTO %s VALUES (UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7', 67890)", tableName), 1); assertUpdate(format("INSERT INTO %s VALUES (NULL, 7531)", tableName), 1); assertQuery(format("SELECT count(*) FROM %s", tableName), "SELECT 3"); assertQuery(format("SELECT * FROM %s WHERE x = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345"); assertQuery(format("SELECT * FROM %s WHERE x = UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7'", tableName), "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890"); assertQuery( format("SELECT * FROM %s WHERE x >= UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", tableName), (format == ORC && partitioned || format == PARQUET) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet, or partitioned ORC, with UUID filter yields incorrect results ? "VALUES (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)" : "VALUES (CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890), (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)"); assertQuery( format("SELECT * FROM %s WHERE x >= UUID 'f79c3e09-677c-4bbd-a479-3f349cb785e7'", tableName), partitioned ? "VALUES (CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890), (CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID), 12345)" : "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID), 67890"); assertQuery(format("SELECT * FROM %s WHERE x IS NULL", tableName), "SELECT NULL, 7531"); assertQuery(format("SELECT x FROM %s WHERE y = 12345", tableName), "SELECT CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); assertQuery(format("SELECT x FROM %s WHERE y = 67890", tableName), "SELECT CAST('f79c3e09-677c-4bbd-a479-3f349cb785e7' AS UUID)"); assertQuery(format("SELECT x FROM %s WHERE y = 7531", tableName), "SELECT NULL"); assertUpdate(format("INSERT INTO %s VALUES (UUID '206caec7-68b9-4778-81b2-a12ece70c8b1', 313), (UUID '906caec7-68b9-4778-81b2-a12ece70c8b1', 314)", tableName), 2); assertThat(query("SELECT y FROM " + tableName + " WHERE x >= UUID '206caec7-68b9-4778-81b2-a12ece70c8b1'")) .matches( (partitioned) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet with UUID filter yields incorrect results ? "VALUES BIGINT '12345', 313" : ((format == PARQUET) // TODO (https://github.com/trinodb/trino/issues/12834): reading Parquet with UUID filter yields incorrect results ? "VALUES BIGINT '12345'" // this one is correct : "VALUES BIGINT '12345', 67890, 313, 314")); assertUpdate("DROP TABLE " + tableName); } @Test public void testNestedUuid() { assertUpdate("CREATE TABLE test_nested_uuid (int_t int, row_t row(uuid_t uuid, int_t int), map_t map(int, uuid), array_t array(uuid))"); String uuid = "UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'"; String value = format("VALUES (2, row(%1$s, 1), map(array[1], array[%1$s]), array[%1$s, %1$s])", uuid); assertUpdate("INSERT INTO test_nested_uuid " + value, 1); assertThat(query("SELECT row_t.int_t, row_t.uuid_t FROM test_nested_uuid")) .matches("VALUES (1, UUID '406caec7-68b9-4778-81b2-a12ece70c8b1')"); assertThat(query("SELECT map_t[1] FROM test_nested_uuid")) .matches("VALUES UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'"); assertThat(query("SELECT array_t FROM test_nested_uuid")) .matches("VALUES ARRAY[UUID '406caec7-68b9-4778-81b2-a12ece70c8b1', UUID '406caec7-68b9-4778-81b2-a12ece70c8b1']"); assertQuery("SELECT row_t.int_t FROM test_nested_uuid WHERE row_t.uuid_t = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", "VALUES 1"); assertQuery("SELECT int_t FROM test_nested_uuid WHERE row_t.uuid_t = UUID '406caec7-68b9-4778-81b2-a12ece70c8b1'", "VALUES 2"); } @Test public void testCreatePartitionedTable() { assertUpdate("" + "CREATE TABLE test_partitioned_table (" + " a_boolean boolean, " + " an_integer integer, " + " a_bigint bigint, " + " a_real real, " + " a_double double, " + " a_short_decimal decimal(5,2), " + " a_long_decimal decimal(38,20), " + " a_varchar varchar, " + " a_varbinary varbinary, " + " a_date date, " + " a_time time(6), " + " a_timestamp timestamp(6), " + " a_timestamptz timestamp(6) with time zone, " + " a_uuid uuid, " + " a_row row(id integer , vc varchar), " + " an_array array(varchar), " + " a_map map(integer, varchar) " + ") " + "WITH (" + "partitioning = ARRAY[" + " 'a_boolean', " + " 'an_integer', " + " 'a_bigint', " + " 'a_real', " + " 'a_double', " + " 'a_short_decimal', " + " 'a_long_decimal', " + " 'a_varchar', " + " 'a_varbinary', " + " 'a_date', " + " 'a_time', " + " 'a_timestamp', " + " 'a_timestamptz', " + " 'a_uuid' " + // Note: partitioning on non-primitive columns is not allowed in Iceberg " ]" + ")"); assertQueryReturnsEmptyResult("SELECT * FROM test_partitioned_table"); String values = "VALUES (" + "true, " + "1, " + "BIGINT '1', " + "REAL '1.0', " + "DOUBLE '1.0', " + "CAST(1.0 AS decimal(5,2)), " + "CAST(11.0 AS decimal(38,20)), " + "VARCHAR 'onefsadfdsf', " + "X'000102f0feff', " + "DATE '2021-07-24'," + "TIME '02:43:57.987654', " + "TIMESTAMP '2021-07-24 03:43:57.987654'," + "TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + "UUID '20050910-1330-11e9-ffff-2a86e4085a59', " + "CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)), " + "ARRAY[VARCHAR 'uno', 'dos', 'tres'], " + "map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one'])) "; String nullValues = nCopies(17, "NULL").stream() .collect(joining(", ", "VALUES (", ")")); assertUpdate("INSERT INTO test_partitioned_table " + values, 1); assertUpdate("INSERT INTO test_partitioned_table " + nullValues, 1); // SELECT assertThat(query("SELECT * FROM test_partitioned_table")) .matches(values + " UNION ALL " + nullValues); // SELECT with predicates assertThat(query("SELECT * FROM test_partitioned_table WHERE " + " a_boolean = true " + "AND an_integer = 1 " + "AND a_bigint = BIGINT '1' " + "AND a_real = REAL '1.0' " + "AND a_double = DOUBLE '1.0' " + "AND a_short_decimal = CAST(1.0 AS decimal(5,2)) " + "AND a_long_decimal = CAST(11.0 AS decimal(38,20)) " + "AND a_varchar = VARCHAR 'onefsadfdsf' " + "AND a_varbinary = X'000102f0feff' " + "AND a_date = DATE '2021-07-24' " + "AND a_time = TIME '02:43:57.987654' " + "AND a_timestamp = TIMESTAMP '2021-07-24 03:43:57.987654' " + "AND a_timestamptz = TIMESTAMP '2021-07-24 04:43:57.987654 UTC' " + "AND a_uuid = UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + "AND a_row = CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)) " + "AND an_array = ARRAY[VARCHAR 'uno', 'dos', 'tres'] " + "AND a_map = map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one']) " + "")) .matches(values); assertThat(query("SELECT * FROM test_partitioned_table WHERE " + " a_boolean IS NULL " + "AND an_integer IS NULL " + "AND a_bigint IS NULL " + "AND a_real IS NULL " + "AND a_double IS NULL " + "AND a_short_decimal IS NULL " + "AND a_long_decimal IS NULL " + "AND a_varchar IS NULL " + "AND a_varbinary IS NULL " + "AND a_date IS NULL " + "AND a_time IS NULL " + "AND a_timestamp IS NULL " + "AND a_timestamptz IS NULL " + "AND a_uuid IS NULL " + "AND a_row IS NULL " + "AND an_array IS NULL " + "AND a_map IS NULL " + "")) .skippingTypesCheck() .matches(nullValues); // SHOW STATS if (format == ORC) { assertQuery("SHOW STATS FOR test_partitioned_table", "VALUES " + " ('a_boolean', NULL, NULL, 0.5, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5, NULL, '11.0', '11.0'), " + " ('a_varchar', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_varbinary', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5, NULL, '2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'), " + " ('a_timestamptz', NULL, NULL, 0.5, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('an_array', NULL, NULL, 0.5, NULL, NULL, NULL), " + " ('a_map', NULL, NULL, 0.5, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); } else { assertThat(query("SHOW STATS FOR test_partitioned_table")) .skippingTypesCheck() .matches("VALUES " + " ('a_boolean', NULL, NULL, 0.5e0, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5e0, NULL, '11.0', '11.0'), " + " ('a_varchar', 87e0, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_varbinary', 82e0, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5e0, NULL, '2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'), " + " ('a_timestamptz', NULL, NULL, 0.5e0, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, NULL, NULL, NULL, NULL), " + " ('an_array', NULL, NULL, NULL, NULL, NULL, NULL), " + " ('a_map', NULL, NULL, NULL, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); } // $partitions String schema = getSession().getSchema().orElseThrow(); assertThat(query("SELECT column_name FROM information_schema.columns WHERE table_schema = '" + schema + "' AND table_name = 'test_partitioned_table$partitions' ")) .skippingTypesCheck() .matches("VALUES 'partition', 'record_count', 'file_count', 'total_size'"); assertThat(query("SELECT " + " record_count," + " file_count, " + " partition.a_boolean, " + " partition.an_integer, " + " partition.a_bigint, " + " partition.a_real, " + " partition.a_double, " + " partition.a_short_decimal, " + " partition.a_long_decimal, " + " partition.a_varchar, " + " partition.a_varbinary, " + " partition.a_date, " + " partition.a_time, " + " partition.a_timestamp, " + " partition.a_timestamptz, " + " partition.a_uuid " + // Note: partitioning on non-primitive columns is not allowed in Iceberg " FROM \"test_partitioned_table$partitions\" ")) .matches("" + "VALUES (" + " BIGINT '1', " + " BIGINT '1', " + " true, " + " 1, " + " BIGINT '1', " + " REAL '1.0', " + " DOUBLE '1.0', " + " CAST(1.0 AS decimal(5,2)), " + " CAST(11.0 AS decimal(38,20)), " + " VARCHAR 'onefsadfdsf', " + " X'000102f0feff', " + " DATE '2021-07-24'," + " TIME '02:43:57.987654', " + " TIMESTAMP '2021-07-24 03:43:57.987654'," + " TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + " UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + ")" + "UNION ALL " + "VALUES (" + " BIGINT '1', " + " BIGINT '1', " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL, " + " NULL " + ")"); assertUpdate("DROP TABLE test_partitioned_table"); } @Test public void testCreatePartitionedTableWithNestedTypes() { assertUpdate("" + "CREATE TABLE test_partitioned_table_nested_type (" + " _string VARCHAR" + ", _struct ROW(_field1 INT, _field2 VARCHAR)" + ", _date DATE" + ") " + "WITH (" + " partitioning = ARRAY['_date']" + ")"); dropTable("test_partitioned_table_nested_type"); } @Test public void testCreatePartitionedTableAs() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); assertUpdate( "CREATE TABLE test_create_partitioned_table_as " + "WITH (" + "format_version = 2," + "location = '" + tempDirPath + "', " + "partitioning = ARRAY['ORDER_STATUS', 'Ship_Priority', 'Bucket(order_key,9)']" + ") " + "AS " + "SELECT orderkey AS order_key, shippriority AS ship_priority, orderstatus AS order_status " + "FROM tpch.tiny.orders", "SELECT count(*) from orders"); assertEquals( computeScalar("SHOW CREATE TABLE test_create_partitioned_table_as"), format( "CREATE TABLE %s.%s.%s (\n" + " order_key bigint,\n" + " ship_priority integer,\n" + " order_status varchar\n" + ")\n" + "WITH (\n" + " format = '%s',\n" + " format_version = 2,\n" + " location = '%s',\n" + " partitioning = ARRAY['order_status','ship_priority','bucket(order_key, 9)']\n" + ")", getSession().getCatalog().orElseThrow(), getSession().getSchema().orElseThrow(), "test_create_partitioned_table_as", format, tempDirPath)); assertQuery("SELECT * from test_create_partitioned_table_as", "SELECT orderkey, shippriority, orderstatus FROM orders"); dropTable("test_create_partitioned_table_as"); } @Test public void testTableComments() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); String createTableTemplate = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "COMMENT '%s'\n" + "WITH (\n" + format(" format = '%s',\n", format) + " format_version = 2,\n" + format(" location = '%s'\n", tempDirPath) + ")"; String createTableWithoutComment = "" + "CREATE TABLE iceberg.tpch.test_table_comments (\n" + " _x bigint\n" + ")\n" + "WITH (\n" + " format = '" + format + "',\n" + " format_version = 2,\n" + " location = '" + tempDirPath + "'\n" + ")"; String createTableSql = format(createTableTemplate, "test table comment", format); assertUpdate(createTableSql); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableSql); assertUpdate("COMMENT ON TABLE test_table_comments IS 'different test table comment'"); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), format(createTableTemplate, "different test table comment", format)); assertUpdate("COMMENT ON TABLE test_table_comments IS NULL"); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableWithoutComment); dropTable("iceberg.tpch.test_table_comments"); assertUpdate(createTableWithoutComment); assertEquals(computeScalar("SHOW CREATE TABLE test_table_comments"), createTableWithoutComment); dropTable("iceberg.tpch.test_table_comments"); } @Test public void testRollbackSnapshot() { assertUpdate("CREATE TABLE test_rollback (col0 INTEGER, col1 BIGINT)"); long afterCreateTableId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (123, CAST(987 AS BIGINT))", 1); long afterFirstInsertId = getLatestSnapshotId("test_rollback"); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (456, CAST(654 AS BIGINT))", 1); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT)), (456, CAST(654 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterFirstInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (123, CAST(987 AS BIGINT))"); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterCreateTableId)); assertEquals((long) computeActual("SELECT COUNT(*) FROM test_rollback").getOnlyValue(), 0); assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (789, CAST(987 AS BIGINT))", 1); long afterSecondInsertId = getLatestSnapshotId("test_rollback"); // extra insert which should be dropped on rollback assertUpdate("INSERT INTO test_rollback (col0, col1) VALUES (999, CAST(999 AS BIGINT))", 1); assertUpdate(format("CALL system.rollback_to_snapshot('tpch', 'test_rollback', %s)", afterSecondInsertId)); assertQuery("SELECT * FROM test_rollback ORDER BY col0", "VALUES (789, CAST(987 AS BIGINT))"); dropTable("test_rollback"); } private long getLatestSnapshotId(String tableName) { return (long) computeActual(format("SELECT snapshot_id FROM \"%s$snapshots\" ORDER BY committed_at DESC LIMIT 1", tableName)) .getOnlyValue(); } @Override protected String errorMessageForInsertIntoNotNullColumn(String columnName) { return "NULL value not allowed for NOT NULL column: " + columnName; } @Test public void testSchemaEvolution() { assertUpdate("CREATE TABLE test_schema_evolution_drop_end (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end DROP COLUMN col2"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_end ADD COLUMN col2 INTEGER"); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL)"); assertUpdate("INSERT INTO test_schema_evolution_drop_end VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_end", "VALUES(0, 1, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_end"); assertUpdate("CREATE TABLE test_schema_evolution_drop_middle (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (0, 1, 2)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 1, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle DROP COLUMN col1"); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2)"); assertUpdate("ALTER TABLE test_schema_evolution_drop_middle ADD COLUMN col1 INTEGER"); assertUpdate("INSERT INTO test_schema_evolution_drop_middle VALUES (3, 4, 5)", 1); assertQuery("SELECT * FROM test_schema_evolution_drop_middle", "VALUES(0, 2, NULL), (3, 4, 5)"); dropTable("test_schema_evolution_drop_middle"); } @Test public void testShowStatsAfterAddColumn() { assertUpdate("CREATE TABLE test_show_stats_after_add_column (col0 INTEGER, col1 INTEGER, col2 INTEGER)"); // Insert separately to ensure the table has multiple data files assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (1, 2, 3)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (4, 5, 6)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (NULL, NULL, NULL)", 1); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (7, 8, 9)", 1); assertThat(query("SHOW STATS FOR test_show_stats_after_add_column")) .skippingTypesCheck() .matches("VALUES " + " ('col0', NULL, NULL, 25e-2, NULL, '1', '7')," + " ('col1', NULL, NULL, 25e-2, NULL, '2', '8'), " + " ('col2', NULL, NULL, 25e-2, NULL, '3', '9'), " + " (NULL, NULL, NULL, NULL, 4e0, NULL, NULL)"); // Columns added after some data files exist will not have valid statistics because not all files have min/max/null count statistics for the new column assertUpdate("ALTER TABLE test_show_stats_after_add_column ADD COLUMN col3 INTEGER"); assertUpdate("INSERT INTO test_show_stats_after_add_column VALUES (10, 11, 12, 13)", 1); assertThat(query("SHOW STATS FOR test_show_stats_after_add_column")) .skippingTypesCheck() .matches("VALUES " + " ('col0', NULL, NULL, 2e-1, NULL, '1', '10')," + " ('col1', NULL, NULL, 2e-1, NULL, '2', '11'), " + " ('col2', NULL, NULL, 2e-1, NULL, '3', '12'), " + " ('col3', NULL, NULL, NULL, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)"); } @Test public void testLargeInOnPartitionedColumns() { assertUpdate("CREATE TABLE test_in_predicate_large_set (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); assertUpdate("INSERT INTO test_in_predicate_large_set VALUES (1, 10)", 1L); assertUpdate("INSERT INTO test_in_predicate_large_set VALUES (2, 20)", 1L); List<String> predicates = IntStream.range(0, 25_000).boxed() .map(Object::toString) .collect(toImmutableList()); String filter = format("col2 IN (%s)", join(",", predicates)); assertThat(query("SELECT * FROM test_in_predicate_large_set WHERE " + filter)) .matches("TABLE test_in_predicate_large_set"); dropTable("test_in_predicate_large_set"); } @Test public void testCreateTableFailsOnNonEmptyPath() { String tableName = "test_rename_table_" + randomTableSuffix(); String tmpName = "test_rename_table_tmp_" + randomTableSuffix(); try { assertUpdate("CREATE TABLE " + tmpName + " AS SELECT 1 as a", 1); assertUpdate("ALTER TABLE " + tmpName + " RENAME TO " + tableName); assertQueryFails("CREATE TABLE " + tmpName + " AS SELECT 1 as a", "Cannot create a table on a non-empty location.*"); } finally { assertUpdate("DROP TABLE IF EXISTS " + tableName); assertUpdate("DROP TABLE IF EXISTS " + tmpName); } } @Test public void testCreateTableSucceedsOnEmptyDirectory() { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tmpName = "test_rename_table_tmp_" + randomTableSuffix(); Path newPath = tempDir.toPath().resolve(tmpName); File directory = newPath.toFile(); verify(directory.mkdirs(), "Could not make directory on filesystem"); try { assertUpdate("CREATE TABLE " + tmpName + " WITH (location='" + directory + "') AS SELECT 1 as a", 1); } finally { assertUpdate("DROP TABLE IF EXISTS " + tmpName); } } @Test public void testCreateTableLike() { IcebergFileFormat otherFormat = (format == PARQUET) ? ORC : PARQUET; testCreateTableLikeForFormat(otherFormat); } private void testCreateTableLikeForFormat(IcebergFileFormat otherFormat) { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix(); // LIKE source INCLUDING PROPERTIES copies all the properties of the source table, including the `location`. // For this reason the source and the copied table will share the same directory. // This test does not drop intentionally the created tables to avoid affecting the source table or the information_schema. assertUpdate(format("CREATE TABLE test_create_table_like_original (col1 INTEGER, aDate DATE) WITH(format = '%s', location = '%s', partitioning = ARRAY['aDate'])", format, tempDirPath)); assertEquals(getTablePropertiesString("test_create_table_like_original"), "WITH (\n" + format(" format = '%s',\n", format) + " format_version = 2,\n" + format(" location = '%s',\n", tempDirPath) + " partitioning = ARRAY['adate']\n" + ")"); assertUpdate("CREATE TABLE test_create_table_like_copy0 (LIKE test_create_table_like_original, col2 INTEGER)"); assertUpdate("INSERT INTO test_create_table_like_copy0 (col1, aDate, col2) VALUES (1, CAST('1950-06-28' AS DATE), 3)", 1); assertQuery("SELECT * from test_create_table_like_copy0", "VALUES(1, CAST('1950-06-28' AS DATE), 3)"); assertUpdate("CREATE TABLE test_create_table_like_copy1 (LIKE test_create_table_like_original)"); assertEquals(getTablePropertiesString("test_create_table_like_copy1"), "WITH (\n" + format(" format = '%s',\n format_version = 2,\n location = '%s'\n)", format, tempDir + "/iceberg_data/tpch/test_create_table_like_copy1")); assertUpdate("CREATE TABLE test_create_table_like_copy2 (LIKE test_create_table_like_original EXCLUDING PROPERTIES)"); assertEquals(getTablePropertiesString("test_create_table_like_copy2"), "WITH (\n" + format(" format = '%s',\n format_version = 2,\n location = '%s'\n)", format, tempDir + "/iceberg_data/tpch/test_create_table_like_copy2")); dropTable("test_create_table_like_copy2"); assertQueryFails("CREATE TABLE test_create_table_like_copy3 (LIKE test_create_table_like_original INCLUDING PROPERTIES)", "Cannot create a table on a non-empty location.*"); assertQueryFails(format("CREATE TABLE test_create_table_like_copy4 (LIKE test_create_table_like_original INCLUDING PROPERTIES) WITH (format = '%s')", otherFormat), "Cannot create a table on a non-empty location.*"); } private String getTablePropertiesString(String tableName) { MaterializedResult showCreateTable = computeActual("SHOW CREATE TABLE " + tableName); String createTable = (String) getOnlyElement(showCreateTable.getOnlyColumnAsSet()); Matcher matcher = WITH_CLAUSE_EXTRACTOR.matcher(createTable); return matcher.matches() ? matcher.group(1) : null; } @Test public void testPredicating() { assertUpdate("CREATE TABLE test_predicating_on_real (col REAL)"); assertUpdate("INSERT INTO test_predicating_on_real VALUES 1.2", 1); assertQuery("SELECT * FROM test_predicating_on_real WHERE col = 1.2", "VALUES 1.2"); dropTable("test_predicating_on_real"); } @Test public void testHourTransform() { assertUpdate("CREATE TABLE test_hour_transform (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['hour(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-31 22:22:22.222222', 8)," + "(TIMESTAMP '1969-12-31 23:33:11.456789', 9)," + "(TIMESTAMP '1969-12-31 23:44:55.567890', 10)," + "(TIMESTAMP '1970-01-01 00:55:44.765432', 11)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 10:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 10:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 12:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 12:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 13:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 13:12:12.654321', 7)"; assertUpdate("INSERT INTO test_hour_transform " + values, 11); assertQuery("SELECT * FROM test_hour_transform", values); @Language("SQL") String expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222222', TIMESTAMP '1969-12-31 22:22:22.222222', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456789', TIMESTAMP '1969-12-31 23:44:55.567890', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765432', TIMESTAMP '1970-01-01 00:55:44.765432', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 10:55:00.456789', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234567', TIMESTAMP '2015-05-15 12:21:02.345678', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876543', TIMESTAMP '2020-02-21 13:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-12-31 22:22:22.222222', '2020-02-21 13:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 1, TIMESTAMP '1969-12-31 22:22:22.222000', TIMESTAMP '1969-12-31 22:22:22.222999', 8, 8), " + "(-1, 2, TIMESTAMP '1969-12-31 23:33:11.456000', TIMESTAMP '1969-12-31 23:44:55.567999', 9, 10), " + "(0, 1, TIMESTAMP '1970-01-01 00:55:44.765000', TIMESTAMP '1970-01-01 00:55:44.765999', 11, 11), " + "(394474, 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 10:55:00.456999', 1, 3), " + "(397692, 2, TIMESTAMP '2015-05-15 12:05:01.234000', TIMESTAMP '2015-05-15 12:21:02.345999', 4, 5), " + "(439525, 2, TIMESTAMP '2020-02-21 13:11:11.876000', TIMESTAMP '2020-02-21 13:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-12-31 22:22:22.222000', '2020-02-21 13:12:12.654999'"; } assertQuery("SELECT partition.d_hour, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_hour_transform$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_hour_transform WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 23:44:55.567890', 10)"); assertThat(query("SHOW STATS FOR test_hour_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_hour_transform"); } @Test public void testDayTransformDate() { assertUpdate("CREATE TABLE test_day_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-01-01', 10), " + "(DATE '1969-12-31', 11), " + "(DATE '1970-01-01', 1), " + "(DATE '1970-03-04', 2), " + "(DATE '2015-01-01', 3), " + "(DATE '2015-01-13', 4), " + "(DATE '2015-01-13', 5), " + "(DATE '2015-05-15', 6), " + "(DATE '2015-05-15', 7), " + "(DATE '2020-02-21', 8), " + "(DATE '2020-02-21', 9)"; assertUpdate("INSERT INTO test_day_transform_date " + values, 11); assertQuery("SELECT * FROM test_day_transform_date", values); assertQuery( "SELECT partition.d_day, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_day_transform_date$partitions\"", "VALUES " + "(DATE '1969-01-01', 1, DATE '1969-01-01', DATE '1969-01-01', 10, 10), " + "(DATE '1969-12-31', 1, DATE '1969-12-31', DATE '1969-12-31', 11, 11), " + "(DATE '1970-01-01', 1, DATE '1970-01-01', DATE '1970-01-01', 1, 1), " + "(DATE '1970-03-04', 1, DATE '1970-03-04', DATE '1970-03-04', 2, 2), " + "(DATE '2015-01-01', 1, DATE '2015-01-01', DATE '2015-01-01', 3, 3), " + "(DATE '2015-01-13', 2, DATE '2015-01-13', DATE '2015-01-13', 4, 5), " + "(DATE '2015-05-15', 2, DATE '2015-05-15', DATE '2015-05-15', 6, 7), " + "(DATE '2020-02-21', 2, DATE '2020-02-21', DATE '2020-02-21', 8, 9)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_date WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (DATE '1969-01-01', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1969-01-01', '2020-02-21'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '11'), " + " (NULL, NULL, NULL, NULL, 11e0, NULL, NULL)"); dropTable("test_day_transform_date"); } @Test public void testDayTransformTimestamp() { assertUpdate("CREATE TABLE test_day_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['day(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-12-25 15:13:12.876543', 8)," + "(TIMESTAMP '1969-12-30 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-31 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-31 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_day_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_day_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876543', TIMESTAMP '1969-12-25 15:13:12.876543', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345678', TIMESTAMP '1969-12-30 18:47:33.345678', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234567', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-12-25 15:13:12.876543', '2020-02-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(DATE '1969-12-25', 1, TIMESTAMP '1969-12-25 15:13:12.876000', TIMESTAMP '1969-12-25 15:13:12.876999', 8, 8), " + "(DATE '1969-12-30', 1, TIMESTAMP '1969-12-30 18:47:33.345000', TIMESTAMP '1969-12-30 18:47:33.345999', 9, 9), " + "(DATE '1969-12-31', 2, TIMESTAMP '1969-12-31 00:00:00.000000', TIMESTAMP '1969-12-31 05:06:07.234999', 10, 11), " + "(DATE '1970-01-01', 1, TIMESTAMP '1970-01-01 12:03:08.456000', TIMESTAMP '1970-01-01 12:03:08.456999', 12, 12), " + "(DATE '2015-01-01', 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 12:55:00.456999', 1, 3), " + "(DATE '2015-05-15', 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-05-15 14:21:02.345999', 4, 5), " + "(DATE '2020-02-21', 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-02-21 16:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-12-25 15:13:12.876000', '2020-02-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_day, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_day_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_day_transform_timestamp WHERE day_of_week(d) = 3 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-31 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_day_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_day_transform_timestamp"); } @Test public void testMonthTransformDate() { assertUpdate("CREATE TABLE test_month_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1969-11-13', 1)," + "(DATE '1969-12-01', 2)," + "(DATE '1969-12-02', 3)," + "(DATE '1969-12-31', 4)," + "(DATE '1970-01-01', 5), " + "(DATE '1970-05-13', 6), " + "(DATE '1970-12-31', 7), " + "(DATE '2020-01-01', 8), " + "(DATE '2020-06-16', 9), " + "(DATE '2020-06-28', 10), " + "(DATE '2020-06-06', 11), " + "(DATE '2020-07-18', 12), " + "(DATE '2020-07-28', 13), " + "(DATE '2020-12-31', 14)"; assertUpdate("INSERT INTO test_month_transform_date " + values, 14); assertQuery("SELECT * FROM test_month_transform_date", values); assertQuery( "SELECT partition.d_month, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_month_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1969-11-13', DATE '1969-11-13', 1, 1), " + "(-1, 3, DATE '1969-12-01', DATE '1969-12-31', 2, 4), " + "(0, 1, DATE '1970-01-01', DATE '1970-01-01', 5, 5), " + "(4, 1, DATE '1970-05-13', DATE '1970-05-13', 6, 6), " + "(11, 1, DATE '1970-12-31', DATE '1970-12-31', 7, 7), " + "(600, 1, DATE '2020-01-01', DATE '2020-01-01', 8, 8), " + "(605, 3, DATE '2020-06-06', DATE '2020-06-28', 9, 11), " + "(606, 2, DATE '2020-07-18', DATE '2020-07-28', 12, 13), " + "(611, 1, DATE '2020-12-31', DATE '2020-12-31', 14, 14)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_date WHERE day_of_week(d) = 7 AND b % 7 = 3", "VALUES (DATE '2020-06-28', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1969-11-13', '2020-12-31'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '14'), " + " (NULL, NULL, NULL, NULL, 14e0, NULL, NULL)"); dropTable("test_month_transform_date"); } @Test public void testMonthTransformTimestamp() { assertUpdate("CREATE TABLE test_month_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['month(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1969-11-15 15:13:12.876543', 8)," + "(TIMESTAMP '1969-11-19 18:47:33.345678', 9)," + "(TIMESTAMP '1969-12-01 00:00:00.000000', 10)," + "(TIMESTAMP '1969-12-01 05:06:07.234567', 11)," + "(TIMESTAMP '1970-01-01 12:03:08.456789', 12)," + "(TIMESTAMP '2015-01-01 10:01:23.123456', 1)," + "(TIMESTAMP '2015-01-01 11:10:02.987654', 2)," + "(TIMESTAMP '2015-01-01 12:55:00.456789', 3)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 4)," + "(TIMESTAMP '2015-05-15 14:21:02.345678', 5)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 6)," + "(TIMESTAMP '2020-02-21 16:12:12.654321', 7)"; assertUpdate("INSERT INTO test_month_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_month_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876543', TIMESTAMP '1969-11-19 18:47:33.345678', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234567', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456789', TIMESTAMP '1970-01-01 12:03:08.456789', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123456', TIMESTAMP '2015-01-01 12:55:00.456789', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-05-15 14:21:02.345678', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-02-21 16:12:12.654321', 6, 7)"; String expectedTimestampStats = "'1969-11-15 15:13:12.876543', '2020-02-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 2, TIMESTAMP '1969-11-15 15:13:12.876000', TIMESTAMP '1969-11-19 18:47:33.345999', 8, 9), " + "(-1, 2, TIMESTAMP '1969-12-01 00:00:00.000000', TIMESTAMP '1969-12-01 05:06:07.234999', 10, 11), " + "(0, 1, TIMESTAMP '1970-01-01 12:03:08.456000', TIMESTAMP '1970-01-01 12:03:08.456999', 12, 12), " + "(540, 3, TIMESTAMP '2015-01-01 10:01:23.123000', TIMESTAMP '2015-01-01 12:55:00.456999', 1, 3), " + "(544, 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-05-15 14:21:02.345999', 4, 5), " + "(601, 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-02-21 16:12:12.654999', 6, 7)"; expectedTimestampStats = "'1969-11-15 15:13:12.876000', '2020-02-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_month, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_month_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_month_transform_timestamp WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (TIMESTAMP '1969-12-01 00:00:00.000000', 10)"); assertThat(query("SHOW STATS FOR test_month_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_month_transform_timestamp"); } @Test public void testYearTransformDate() { assertUpdate("CREATE TABLE test_year_transform_date (d DATE, b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(DATE '1968-10-13', 1), " + "(DATE '1969-01-01', 2), " + "(DATE '1969-03-15', 3), " + "(DATE '1970-01-01', 4), " + "(DATE '1970-03-05', 5), " + "(DATE '2015-01-01', 6), " + "(DATE '2015-06-16', 7), " + "(DATE '2015-07-28', 8), " + "(DATE '2016-05-15', 9), " + "(DATE '2016-06-06', 10), " + "(DATE '2020-02-21', 11), " + "(DATE '2020-11-10', 12)"; assertUpdate("INSERT INTO test_year_transform_date " + values, 12); assertQuery("SELECT * FROM test_year_transform_date", values); assertQuery( "SELECT partition.d_year, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_year_transform_date$partitions\"", "VALUES " + "(-2, 1, DATE '1968-10-13', DATE '1968-10-13', 1, 1), " + "(-1, 2, DATE '1969-01-01', DATE '1969-03-15', 2, 3), " + "(0, 2, DATE '1970-01-01', DATE '1970-03-05', 4, 5), " + "(45, 3, DATE '2015-01-01', DATE '2015-07-28', 6, 8), " + "(46, 2, DATE '2016-05-15', DATE '2016-06-06', 9, 10), " + "(50, 2, DATE '2020-02-21', DATE '2020-11-10', 11, 12)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_date WHERE day_of_week(d) = 1 AND b % 7 = 3", "VALUES (DATE '2016-06-06', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_date")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '1968-10-13', '2020-11-10'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_date"); } @Test public void testYearTransformTimestamp() { assertUpdate("CREATE TABLE test_year_transform_timestamp (d TIMESTAMP(6), b BIGINT) WITH (partitioning = ARRAY['year(d)'])"); @Language("SQL") String values = "VALUES " + "(TIMESTAMP '1968-03-15 15:13:12.876543', 1)," + "(TIMESTAMP '1968-11-19 18:47:33.345678', 2)," + "(TIMESTAMP '1969-01-01 00:00:00.000000', 3)," + "(TIMESTAMP '1969-01-01 05:06:07.234567', 4)," + "(TIMESTAMP '1970-01-18 12:03:08.456789', 5)," + "(TIMESTAMP '1970-03-14 10:01:23.123456', 6)," + "(TIMESTAMP '1970-08-19 11:10:02.987654', 7)," + "(TIMESTAMP '1970-12-31 12:55:00.456789', 8)," + "(TIMESTAMP '2015-05-15 13:05:01.234567', 9)," + "(TIMESTAMP '2015-09-15 14:21:02.345678', 10)," + "(TIMESTAMP '2020-02-21 15:11:11.876543', 11)," + "(TIMESTAMP '2020-08-21 16:12:12.654321', 12)"; assertUpdate("INSERT INTO test_year_transform_timestamp " + values, 12); assertQuery("SELECT * FROM test_year_transform_timestamp", values); @Language("SQL") String expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876543', TIMESTAMP '1968-11-19 18:47:33.345678', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234567', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456789', TIMESTAMP '1970-12-31 12:55:00.456789', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234567', TIMESTAMP '2015-09-15 14:21:02.345678', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876543', TIMESTAMP '2020-08-21 16:12:12.654321', 11, 12)"; String expectedTimestampStats = "'1968-03-15 15:13:12.876543', '2020-08-21 16:12:12.654321'"; if (format == ORC) { expected = "VALUES " + "(-2, 2, TIMESTAMP '1968-03-15 15:13:12.876000', TIMESTAMP '1968-11-19 18:47:33.345999', 1, 2), " + "(-1, 2, TIMESTAMP '1969-01-01 00:00:00.000000', TIMESTAMP '1969-01-01 05:06:07.234999', 3, 4), " + "(0, 4, TIMESTAMP '1970-01-18 12:03:08.456000', TIMESTAMP '1970-12-31 12:55:00.456999', 5, 8), " + "(45, 2, TIMESTAMP '2015-05-15 13:05:01.234000', TIMESTAMP '2015-09-15 14:21:02.345999', 9, 10), " + "(50, 2, TIMESTAMP '2020-02-21 15:11:11.876000', TIMESTAMP '2020-08-21 16:12:12.654999', 11, 12)"; expectedTimestampStats = "'1968-03-15 15:13:12.876000', '2020-08-21 16:12:12.654999'"; } assertQuery("SELECT partition.d_year, record_count, data.d.min, data.d.max, data.b.min, data.b.max FROM \"test_year_transform_timestamp$partitions\"", expected); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_year_transform_timestamp WHERE day_of_week(d) = 2 AND b % 7 = 3", "VALUES (TIMESTAMP '2015-09-15 14:21:02.345678', 10)"); assertThat(query("SHOW STATS FOR test_year_transform_timestamp")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, " + expectedTimestampStats + "), " + " ('b', NULL, NULL, 0e0, NULL, '1', '12'), " + " (NULL, NULL, NULL, NULL, 12e0, NULL, NULL)"); dropTable("test_year_transform_timestamp"); } @Test public void testTruncateTextTransform() { assertUpdate("CREATE TABLE test_truncate_text_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 2)'])"); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"test_truncate_text_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_text_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery("SELECT partition.d_trunc FROM \"test_truncate_text_transform$partitions\"", "VALUES 'ab', 'mo', 'Gr'"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'ab'", "VALUES 1, 2, 3"); assertQuery(select + " WHERE partition.d_trunc = 'ab'", "VALUES ('ab', 3, 'ab598', 'abxy', 1, 3)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'mo'", "VALUES 4, 5"); assertQuery(select + " WHERE partition.d_trunc = 'mo'", "VALUES ('mo', 2, 'mommy', 'moscow', 4, 5)"); assertQuery("SELECT b FROM test_truncate_text_transform WHERE substring(d, 1, 2) = 'Gr'", "VALUES 6, 7"); assertQuery(select + " WHERE partition.d_trunc = 'Gr'", "VALUES ('Gr', 2, 'Greece', 'Grozny', 6, 7)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_text_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); assertThat(query("SHOW STATS FOR test_truncate_text_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', " + (format == PARQUET ? "169e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('b', NULL, NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)"); dropTable("test_truncate_text_transform"); } @Test(dataProvider = "truncateNumberTypesProvider") public void testTruncateIntegerTransform(String dataType) { String table = format("test_truncate_%s_transform", dataType); assertUpdate(format("CREATE TABLE " + table + " (d %s, b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])", dataType)); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"" + table + "$partitions\""; assertUpdate("INSERT INTO " + table + " VALUES" + "(0, 1)," + "(1, 2)," + "(5, 3)," + "(9, 4)," + "(10, 5)," + "(11, 6)," + "(120, 7)," + "(121, 8)," + "(123, 9)," + "(-1, 10)," + "(-5, 11)," + "(-10, 12)," + "(-11, 13)," + "(-123, 14)," + "(-130, 15)", 15); assertQuery("SELECT partition.d_trunc FROM \"" + table + "$partitions\"", "VALUES 0, 10, 120, -10, -20, -130"); assertQuery("SELECT b FROM " + table + " WHERE d IN (0, 1, 5, 9)", "VALUES 1, 2, 3, 4"); assertQuery(select + " WHERE partition.d_trunc = 0", "VALUES (0, 4, 0, 9, 1, 4)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (10, 11)", "VALUES 5, 6"); assertQuery(select + " WHERE partition.d_trunc = 10", "VALUES (10, 2, 10, 11, 5, 6)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (120, 121, 123)", "VALUES 7, 8, 9"); assertQuery(select + " WHERE partition.d_trunc = 120", "VALUES (120, 3, 120, 123, 7, 9)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-1, -5, -10)", "VALUES 10, 11, 12"); assertQuery(select + " WHERE partition.d_trunc = -10", "VALUES (-10, 3, -10, -1, 10, 12)"); assertQuery("SELECT b FROM " + table + " WHERE d = -11", "VALUES 13"); assertQuery(select + " WHERE partition.d_trunc = -20", "VALUES (-20, 1, -11, -11, 13, 13)"); assertQuery("SELECT b FROM " + table + " WHERE d IN (-123, -130)", "VALUES 14, 15"); assertQuery(select + " WHERE partition.d_trunc = -130", "VALUES (-130, 2, -130, -123, 14, 15)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM " + table + " WHERE d % 10 = -1 AND b % 7 = 3", "VALUES (-1, 10)"); assertThat(query("SHOW STATS FOR " + table)) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '-130', '123'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '15'), " + " (NULL, NULL, NULL, NULL, 15e0, NULL, NULL)"); dropTable(table); } @DataProvider public Object[][] truncateNumberTypesProvider() { return new Object[][] { {"integer"}, {"bigint"}, }; } @Test public void testTruncateDecimalTransform() { assertUpdate("CREATE TABLE test_truncate_decimal_transform (d DECIMAL(9, 2), b BIGINT) WITH (partitioning = ARRAY['truncate(d, 10)'])"); String select = "SELECT partition.d_trunc, record_count, data.d.min AS d_min, data.d.max AS d_max, data.b.min AS b_min, data.b.max AS b_max FROM \"test_truncate_decimal_transform$partitions\""; assertUpdate("INSERT INTO test_truncate_decimal_transform VALUES" + "(12.34, 1)," + "(12.30, 2)," + "(12.29, 3)," + "(0.05, 4)," + "(-0.05, 5)", 5); assertQuery("SELECT partition.d_trunc FROM \"test_truncate_decimal_transform$partitions\"", "VALUES 12.30, 12.20, 0.00, -0.10"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d IN (12.34, 12.30)", "VALUES 1, 2"); assertQuery(select + " WHERE partition.d_trunc = 12.30", "VALUES (12.30, 2, 12.30, 12.34, 1, 2)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 12.29", "VALUES 3"); assertQuery(select + " WHERE partition.d_trunc = 12.20", "VALUES (12.20, 1, 12.29, 12.29, 3, 3)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = 0.05", "VALUES 4"); assertQuery(select + " WHERE partition.d_trunc = 0.00", "VALUES (0.00, 1, 0.05, 0.05, 4, 4)"); assertQuery("SELECT b FROM test_truncate_decimal_transform WHERE d = -0.05", "VALUES 5"); assertQuery(select + " WHERE partition.d_trunc = -0.10", "VALUES (-0.10, 1, -0.05, -0.05, 5, 5)"); // Exercise IcebergMetadata.applyFilter with non-empty Constraint.predicate, via non-pushdownable predicates assertQuery( "SELECT * FROM test_truncate_decimal_transform WHERE d * 100 % 10 = 9 AND b % 7 = 3", "VALUES (12.29, 3)"); assertThat(query("SHOW STATS FOR test_truncate_decimal_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', NULL, NULL, 0e0, NULL, '-0.05', '12.34'), " + " ('b', NULL, NULL, 0e0, NULL, '1', '5'), " + " (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)"); dropTable("test_truncate_decimal_transform"); } @Test public void testBucketTransform() { testBucketTransformForType("DATE", "DATE '2020-05-19'", "DATE '2020-08-19'", "DATE '2020-11-19'"); testBucketTransformForType("VARCHAR", "CAST('abcd' AS VARCHAR)", "CAST('mommy' AS VARCHAR)", "CAST('abxy' AS VARCHAR)"); testBucketTransformForType("BIGINT", "CAST(100000000 AS BIGINT)", "CAST(200000002 AS BIGINT)", "CAST(400000001 AS BIGINT)"); testBucketTransformForType( "UUID", "CAST('206caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)", "CAST('906caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)", "CAST('406caec7-68b9-4778-81b2-a12ece70c8b1' AS UUID)"); } protected void testBucketTransformForType( String type, String value, String greaterValueInSameBucket, String valueInOtherBucket) { String tableName = format("test_bucket_transform%s", type.toLowerCase(Locale.ENGLISH)); assertUpdate(format("CREATE TABLE %s (d %s) WITH (partitioning = ARRAY['bucket(d, 2)'])", tableName, type)); assertUpdate(format("INSERT INTO %s VALUES (%s), (%s), (%s)", tableName, value, greaterValueInSameBucket, valueInOtherBucket), 3); assertThat(query(format("SELECT * FROM %s", tableName))).matches(format("VALUES (%s), (%s), (%s)", value, greaterValueInSameBucket, valueInOtherBucket)); String selectFromPartitions = format("SELECT partition.d_bucket, record_count, data.d.min AS d_min, data.d.max AS d_max FROM \"%s$partitions\"", tableName); if (supportsIcebergFileStatistics(type)) { assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 0", format("VALUES(0, %d, %s, %s)", 2, value, greaterValueInSameBucket)); assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 1", format("VALUES(1, %d, %s, %s)", 1, valueInOtherBucket, valueInOtherBucket)); } else { assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 0", format("VALUES(0, %d, null, null)", 2)); assertQuery(selectFromPartitions + " WHERE partition.d_bucket = 1", format("VALUES(1, %d, null, null)", 1)); } assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .projected(0, 2, 3, 4) // data size, min and max may vary between types .matches("VALUES " + " ('d', NULL, 0e0, NULL), " + " (NULL, NULL, NULL, 3e0)"); dropTable(tableName); } @Test public void testApplyFilterWithNonEmptyConstraintPredicate() { assertUpdate("CREATE TABLE test_bucket_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); assertUpdate( "INSERT INTO test_bucket_transform VALUES" + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('moscow', 5)," + "('Greece', 6)," + "('Grozny', 7)", 7); assertQuery( "SELECT * FROM test_bucket_transform WHERE length(d) = 4 AND b % 7 = 2", "VALUES ('abxy', 2)"); } @Test public void testVoidTransform() { assertUpdate("CREATE TABLE test_void_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['void(d)'])"); String values = "VALUES " + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('Warsaw', 5)," + "(NULL, 6)," + "(NULL, 7)"; assertUpdate("INSERT INTO test_void_transform " + values, 7); assertQuery("SELECT * FROM test_void_transform", values); assertQuery("SELECT COUNT(*) FROM \"test_void_transform$partitions\"", "SELECT 1"); assertQuery( "SELECT partition.d_null, record_count, file_count, data.d.min, data.d.max, data.d.null_count, data.d.nan_count, data.b.min, data.b.max, data.b.null_count, data.b.nan_count FROM \"test_void_transform$partitions\"", "VALUES (NULL, 7, 1, 'Warsaw', 'mommy', 2, NULL, 1, 7, 0, NULL)"); assertQuery( "SELECT d, b FROM test_void_transform WHERE d IS NOT NULL", "VALUES " + "('abcd', 1)," + "('abxy', 2)," + "('ab598', 3)," + "('mommy', 4)," + "('Warsaw', 5)"); assertQuery("SELECT b FROM test_void_transform WHERE d IS NULL", "VALUES 6, 7"); assertThat(query("SHOW STATS FOR test_void_transform")) .skippingTypesCheck() .matches("VALUES " + " ('d', " + (format == PARQUET ? "76e0" : "NULL") + ", NULL, 0.2857142857142857, NULL, NULL, NULL), " + " ('b', NULL, NULL, 0e0, NULL, '1', '7'), " + " (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)"); assertUpdate("DROP TABLE " + "test_void_transform"); } @Test public void testMetadataDeleteSimple() { assertUpdate("CREATE TABLE test_metadata_delete_simple (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO test_metadata_delete_simple VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 1004"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 3"); assertUpdate("DELETE FROM test_metadata_delete_simple WHERE col1 = 1", 3); assertQuery("SELECT sum(col2) FROM test_metadata_delete_simple", "SELECT 701"); assertQuery("SELECT count(*) FROM \"test_metadata_delete_simple$partitions\"", "SELECT 2"); dropTable("test_metadata_delete_simple"); } @Test public void testMetadataDelete() { assertUpdate("CREATE TABLE test_metadata_delete (" + " orderkey BIGINT," + " linenumber INTEGER," + " linestatus VARCHAR" + ") " + "WITH (" + " partitioning = ARRAY[ 'linenumber', 'linestatus' ]" + ")"); assertUpdate( "" + "INSERT INTO test_metadata_delete " + "SELECT orderkey, linenumber, linestatus " + "FROM tpch.tiny.lineitem", "SELECT count(*) FROM lineitem"); assertQuery("SELECT COUNT(*) FROM \"test_metadata_delete$partitions\"", "SELECT 14"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus = 'F' AND linenumber = 3", 5378); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'F' or linenumber <> 3"); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 13"); assertUpdate("DELETE FROM test_metadata_delete WHERE linestatus='O'", 30049); assertQuery("SELECT count(*) FROM \"test_metadata_delete$partitions\"", "SELECT 6"); assertQuery("SELECT * FROM test_metadata_delete", "SELECT orderkey, linenumber, linestatus FROM lineitem WHERE linestatus <> 'O' AND linenumber <> 3"); dropTable("test_metadata_delete"); } @Test public void testInSet() { testInSet(31); testInSet(35); } private void testInSet(int inCount) { String values = range(1, inCount + 1) .mapToObj(n -> format("(%s, %s)", n, n + 10)) .collect(joining(", ")); String inList = range(1, inCount + 1) .mapToObj(Integer::toString) .collect(joining(", ")); assertUpdate("CREATE TABLE test_in_set (col1 INTEGER, col2 BIGINT)"); assertUpdate(format("INSERT INTO test_in_set VALUES %s", values), inCount); // This proves that SELECTs with large IN phrases work correctly computeActual(format("SELECT col1 FROM test_in_set WHERE col1 IN (%s)", inList)); dropTable("test_in_set"); } @Test public void testBasicTableStatistics() { String tableName = "test_basic_table_statistics"; assertUpdate(format("CREATE TABLE %s (col REAL)", tableName)); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', 0e0, 0e0, 1e0, NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 0e0, NULL, NULL)"); assertUpdate("INSERT INTO " + tableName + " VALUES -10", 1); assertUpdate("INSERT INTO " + tableName + " VALUES 100", 1); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', NULL, NULL, 0e0, NULL, '-10.0', '100.0'), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); assertUpdate("INSERT INTO " + tableName + " VALUES 200", 1); assertThat(query("SHOW STATS FOR " + tableName)) .skippingTypesCheck() .matches("VALUES " + " ('col', NULL, NULL, 0e0, NULL, '-10.0', '200.0'), " + " (NULL, NULL, NULL, NULL, 3e0, NULL, NULL)"); dropTable(tableName); } @Test public void testMultipleColumnTableStatistics() { String tableName = "test_multiple_table_statistics"; assertUpdate(format("CREATE TABLE %s (col1 REAL, col2 INTEGER, col3 DATE)", tableName)); assertUpdate("INSERT INTO " + tableName + " VALUES (-10, -1, DATE '2019-06-28')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (100, 10, DATE '2020-01-01')", 1); MaterializedResult result = computeActual("SHOW STATS FOR " + tableName); MaterializedResult expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "100.0") .row("col2", null, null, 0.0, null, "-1", "10") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-01-01") .row(null, null, null, null, 2.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate("INSERT INTO " + tableName + " VALUES (200, 20, DATE '2020-06-28')", 1); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 0.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "20") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-06-28") .row(null, null, null, null, 3.0, null, null) .build(); assertEquals(result, expectedStatistics); assertUpdate("INSERT INTO " + tableName + " VALUES " + IntStream.rangeClosed(21, 25) .mapToObj(i -> format("(200, %d, DATE '2020-07-%d')", i, i)) .collect(joining(", ")), 5); assertUpdate("INSERT INTO " + tableName + " VALUES " + IntStream.rangeClosed(26, 30) .mapToObj(i -> format("(NULL, %d, DATE '2020-06-%d')", i, i)) .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR " + tableName); expectedStatistics = resultBuilder(getSession(), VARCHAR, DOUBLE, DOUBLE, DOUBLE, DOUBLE, VARCHAR, VARCHAR) .row("col1", null, null, 5.0 / 13.0, null, "-10.0", "200.0") .row("col2", null, null, 0.0, null, "-1", "30") .row("col3", null, null, 0.0, null, "2019-06-28", "2020-07-25") .row(null, null, null, null, 13.0, null, null) .build(); assertEquals(result, expectedStatistics); dropTable(tableName); } @Test public void testPartitionedTableStatistics() { assertUpdate("CREATE TABLE iceberg.tpch.test_partitioned_table_statistics (col1 REAL, col2 BIGINT) WITH (partitioning = ARRAY['col2'])"); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES (-10, -1)", 1); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES (100, 10)", 1); MaterializedResult result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); MaterializedRow row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 0.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "100.0"); MaterializedRow row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); MaterializedRow row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 2.0); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(1, 5) .mapToObj(i -> format("(%d, 10)", i + 100)) .collect(joining(", ")), 5); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(NULL, 10)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); assertEquals(result.getRowCount(), 3); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 12.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 0.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 12.0); assertUpdate("INSERT INTO test_partitioned_table_statistics VALUES " + IntStream.rangeClosed(6, 10) .mapToObj(i -> "(100, NULL)") .collect(joining(", ")), 5); result = computeActual("SHOW STATS FOR iceberg.tpch.test_partitioned_table_statistics"); row0 = result.getMaterializedRows().get(0); assertEquals(row0.getField(0), "col1"); assertEquals(row0.getField(3), 5.0 / 17.0); assertEquals(row0.getField(5), "-10.0"); assertEquals(row0.getField(6), "105.0"); row1 = result.getMaterializedRows().get(1); assertEquals(row1.getField(0), "col2"); assertEquals(row1.getField(3), 5.0 / 17.0); assertEquals(row1.getField(5), "-1"); assertEquals(row1.getField(6), "10"); row2 = result.getMaterializedRows().get(2); assertEquals(row2.getField(4), 17.0); dropTable("iceberg.tpch.test_partitioned_table_statistics"); } @Test public void testPredicatePushdown() { QualifiedObjectName tableName = new QualifiedObjectName("iceberg", "tpch", "test_predicate"); assertUpdate(format("CREATE TABLE %s (col1 BIGINT, col2 BIGINT, col3 BIGINT) WITH (partitioning = ARRAY['col2', 'col3'])", tableName)); assertUpdate(format("INSERT INTO %s VALUES (1, 10, 100)", tableName), 1L); assertUpdate(format("INSERT INTO %s VALUES (2, 20, 200)", tableName), 1L); assertQuery(format("SELECT * FROM %s WHERE col1 = 1", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L)), ImmutableMap.of(), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); assertQuery(format("SELECT * FROM %s WHERE col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of()); assertQuery(format("SELECT * FROM %s WHERE col1 = 1 AND col2 = 10", tableName), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", singleValue(BIGINT, 1L), "col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col2", singleValue(BIGINT, 10L)), ImmutableMap.of("col1", singleValue(BIGINT, 1L))); // Assert pushdown for an IN predicate with value count above the default compaction threshold List<Long> values = LongStream.range(1L, 1010L).boxed() .filter(index -> index != 20L) .collect(toImmutableList()); assertThat(values).hasSizeGreaterThan(ICEBERG_DOMAIN_COMPACTION_THRESHOLD); String valuesString = join(",", values.stream().map(Object::toString).collect(toImmutableList())); String inPredicate = "%s IN (" + valuesString + ")"; assertQuery( format("SELECT * FROM %s WHERE %s AND %s", tableName, format(inPredicate, "col1"), format(inPredicate, "col2")), "VALUES (1, 10, 100)"); assertFilterPushdown( tableName, ImmutableMap.of("col1", multipleValues(BIGINT, values), "col2", multipleValues(BIGINT, values)), ImmutableMap.of("col2", multipleValues(BIGINT, values)), // Unenforced predicate is simplified during split generation, but not reflected here ImmutableMap.of("col1", multipleValues(BIGINT, values))); dropTable(tableName.getObjectName()); } @Test public void testPredicatesWithStructuralTypes() { String tableName = "test_predicate_with_structural_types"; assertUpdate("CREATE TABLE " + tableName + " (id INT, array_t ARRAY(BIGINT), map_t MAP(BIGINT, BIGINT), struct_t ROW(f1 BIGINT, f2 BIGINT))"); assertUpdate("INSERT INTO " + tableName + " VALUES " + "(1, ARRAY[1, 2, 3], MAP(ARRAY[1,3], ARRAY[2,4]), ROW(1, 2)), " + "(11, ARRAY[11, 12, 13], MAP(ARRAY[11, 13], ARRAY[12, 14]), ROW(11, 12)), " + "(11, ARRAY[111, 112, 113], MAP(ARRAY[111, 13], ARRAY[112, 114]), ROW(111, 112)), " + "(21, ARRAY[21, 22, 23], MAP(ARRAY[21, 23], ARRAY[22, 24]), ROW(21, 22))", 4); assertQuery("SELECT id FROM " + tableName + " WHERE array_t = ARRAY[1, 2, 3]", "VALUES 1"); assertQuery("SELECT id FROM " + tableName + " WHERE map_t = MAP(ARRAY[11, 13], ARRAY[12, 14])", "VALUES 11"); assertQuery("SELECT id FROM " + tableName + " WHERE struct_t = ROW(21, 22)", "VALUES 21"); assertQuery("SELECT struct_t.f1 FROM " + tableName + " WHERE id = 11 AND map_t = MAP(ARRAY[11, 13], ARRAY[12, 14])", "VALUES 11"); dropTable(tableName); } @Test(dataProviderClass = DataProviders.class, dataProvider = "trueFalse") public void testPartitionsTableWithColumnNameConflict(boolean partitioned) { assertUpdate("DROP TABLE IF EXISTS test_partitions_with_conflict"); assertUpdate("CREATE TABLE test_partitions_with_conflict (" + " p integer, " + " row_count integer, " + " record_count integer, " + " file_count integer, " + " total_size integer " + ") " + (partitioned ? "WITH(partitioning = ARRAY['p'])" : "")); assertUpdate("INSERT INTO test_partitions_with_conflict VALUES (11, 12, 13, 14, 15)", 1); // sanity check assertThat(query("SELECT * FROM test_partitions_with_conflict")) .matches("VALUES (11, 12, 13, 14, 15)"); // test $partitions assertThat(query("SELECT * FROM \"test_partitions_with_conflict$partitions\"")) .matches("SELECT " + (partitioned ? "CAST(ROW(11) AS row(p integer)), " : "") + "BIGINT '1', " + "BIGINT '1', " + // total_size is not exactly deterministic, so grab whatever value there is "(SELECT total_size FROM \"test_partitions_with_conflict$partitions\"), " + "CAST(" + " ROW (" + (partitioned ? "" : " ROW(11, 11, 0, NULL), ") + " ROW(12, 12, 0, NULL), " + " ROW(13, 13, 0, NULL), " + " ROW(14, 14, 0, NULL), " + " ROW(15, 15, 0, NULL) " + " ) " + " AS row(" + (partitioned ? "" : " p row(min integer, max integer, null_count bigint, nan_count bigint), ") + " row_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " record_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " file_count row(min integer, max integer, null_count bigint, nan_count bigint), " + " total_size row(min integer, max integer, null_count bigint, nan_count bigint) " + " )" + ")"); assertUpdate("DROP TABLE test_partitions_with_conflict"); } private void assertFilterPushdown( QualifiedObjectName tableName, Map<String, Domain> filter, Map<String, Domain> expectedEnforcedPredicate, Map<String, Domain> expectedUnenforcedPredicate) { Metadata metadata = getQueryRunner().getMetadata(); newTransaction().execute(getSession(), session -> { TableHandle table = metadata.getTableHandle(session, tableName) .orElseThrow(() -> new TableNotFoundException(tableName.asSchemaTableName())); Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, table); TupleDomain<ColumnHandle> domains = TupleDomain.withColumnDomains( filter.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue))); Optional<ConstraintApplicationResult<TableHandle>> result = metadata.applyFilter(session, table, new Constraint(domains)); assertTrue(result.isEmpty() == (expectedUnenforcedPredicate == null && expectedEnforcedPredicate == null)); if (result.isPresent()) { IcebergTableHandle newTable = (IcebergTableHandle) result.get().getHandle().getConnectorHandle(); assertEquals( newTable.getEnforcedPredicate(), TupleDomain.withColumnDomains(expectedEnforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); assertEquals( newTable.getUnenforcedPredicate(), TupleDomain.withColumnDomains(expectedUnenforcedPredicate.entrySet().stream() .collect(toImmutableMap(entry -> columns.get(entry.getKey()), Map.Entry::getValue)))); } }); } @Test public void testCreateNestedPartitionedTable() { assertUpdate("CREATE TABLE test_nested_table_1 (" + " bool BOOLEAN" + ", int INTEGER" + ", arr ARRAY(VARCHAR)" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, VARCHAR)" + ", dec DECIMAL(5,2)" + ", vc VARCHAR" + ", vb VARBINARY" + ", ts TIMESTAMP(6)" + ", tstz TIMESTAMP(6) WITH TIME ZONE" + ", str ROW(id INTEGER , vc VARCHAR)" + ", dt DATE)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_1 " + " select true, 1, array['uno', 'dos', 'tres'], BIGINT '1', REAL '1.0', DOUBLE '1.0', map(array[1,2,3,4], array['ek','don','teen','char'])," + " CAST(1.0 as DECIMAL(5,2))," + " 'one', VARBINARY 'binary0/1values',\n" + " TIMESTAMP '2021-07-24 02:43:57.348000'," + " TIMESTAMP '2021-07-24 02:43:57.348000 UTC'," + " (CAST(ROW(null, 'this is a random value') AS ROW(int, varchar))), " + " DATE '2021-07-24'", 1); assertEquals(computeActual("SELECT * from test_nested_table_1").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_1")) .skippingTypesCheck() .matches("VALUES " + " ('bool', NULL, NULL, 0e0, NULL, 'true', 'true'), " + " ('int', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', " + (format == PARQUET ? "43e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('vb', " + (format == PARQUET ? "55e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('ts', NULL, NULL, 0e0, NULL, '2021-07-24 02:43:57.348000', " + (format == ORC ? "'2021-07-24 02:43:57.348999'" : "'2021-07-24 02:43:57.348000'") + "), " + " ('tstz', NULL, NULL, 0e0, NULL, '2021-07-24 02:43:57.348 UTC', '2021-07-24 02:43:57.348 UTC'), " + " ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dt', NULL, NULL, 0e0, NULL, '2021-07-24', '2021-07-24'), " + " (NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); dropTable("test_nested_table_1"); assertUpdate("" + "CREATE TABLE test_nested_table_2 (" + " int INTEGER" + ", arr ARRAY(ROW(id INTEGER, vc VARCHAR))" + ", big BIGINT" + ", rl REAL" + ", dbl DOUBLE" + ", mp MAP(INTEGER, ARRAY(VARCHAR))" + ", dec DECIMAL(5,2)" + ", str ROW(id INTEGER, vc VARCHAR, arr ARRAY(INTEGER))" + ", vc VARCHAR)" + " WITH (partitioning = ARRAY['int'])"); assertUpdate( "INSERT INTO test_nested_table_2 " + " select 1, array[cast(row(1, null) as row(int, varchar)), cast(row(2, 'dos') as row(int, varchar))], BIGINT '1', REAL '1.0', DOUBLE '1.0', " + "map(array[1,2], array[array['ek', 'one'], array['don', 'do', 'two']]), CAST(1.0 as DECIMAL(5,2)), " + "CAST(ROW(1, 'this is a random value', null) AS ROW(int, varchar, array(int))), 'one'", 1); assertEquals(computeActual("SELECT * from test_nested_table_2").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_2")) .skippingTypesCheck() .matches("VALUES " + " ('int', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('arr', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('big', NULL, NULL, 0e0, NULL, '1', '1'), " + " ('rl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('dbl', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " ('dec', NULL, NULL, 0e0, NULL, '1.0', '1.0'), " + " ('vc', " + (format == PARQUET ? "43e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " + " ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 1e0, NULL, NULL)"); assertUpdate("CREATE TABLE test_nested_table_3 WITH (partitioning = ARRAY['int']) AS SELECT * FROM test_nested_table_2", 1); assertEquals(computeActual("SELECT * FROM test_nested_table_3").getRowCount(), 1); assertThat(query("SHOW STATS FOR test_nested_table_3")) .matches("SHOW STATS FOR test_nested_table_2"); dropTable("test_nested_table_2"); dropTable("test_nested_table_3"); } @Test public void testSerializableReadIsolation() { assertUpdate("CREATE TABLE test_read_isolation (x int)"); assertUpdate("INSERT INTO test_read_isolation VALUES 123, 456", 2); withTransaction(session -> { assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); assertUpdate("INSERT INTO test_read_isolation VALUES 789", 1); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); assertQuery(session, "SELECT * FROM test_read_isolation", "VALUES 123, 456"); }); assertQuery("SELECT * FROM test_read_isolation", "VALUES 123, 456, 789"); dropTable("test_read_isolation"); } private void withTransaction(Consumer<Session> consumer) { transaction(getQueryRunner().getTransactionManager(), getQueryRunner().getAccessControl()) .readCommitted() .execute(getSession(), consumer); } private void dropTable(String table) { Session session = getSession(); assertUpdate(session, "DROP TABLE " + table); assertFalse(getQueryRunner().tableExists(session, table)); } @Test public void testOptimizedMetadataQueries() { Session session = Session.builder(getSession()) .setSystemProperty("optimize_metadata_queries", "true") .build(); assertUpdate("CREATE TABLE test_metadata_optimization (a BIGINT, b BIGINT, c BIGINT) WITH (PARTITIONING = ARRAY['b', 'c'])"); assertUpdate("INSERT INTO test_metadata_optimization VALUES (5, 6, 7), (8, 9, 10)", 2); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (6), (9)"); assertQuery(session, "SELECT DISTINCT b, c FROM test_metadata_optimization", "VALUES (6, 7), (9, 10)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE b < 7", "VALUES (6)"); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization WHERE c > 8", "VALUES (9)"); // Assert behavior after metadata delete assertUpdate("DELETE FROM test_metadata_optimization WHERE b = 6", 1); assertQuery(session, "SELECT DISTINCT b FROM test_metadata_optimization", "VALUES (9)"); // TODO: assert behavior after deleting the last row of a partition, once row-level deletes are supported. // i.e. a query like 'DELETE FROM test_metadata_optimization WHERE b = 6 AND a = 5' dropTable("test_metadata_optimization"); } @Test public void testFileSizeInManifest() throws Exception { assertUpdate("CREATE TABLE test_file_size_in_manifest (" + "a_bigint bigint, " + "a_varchar varchar, " + "a_long_decimal decimal(38,20), " + "a_map map(varchar, integer))"); assertUpdate( "INSERT INTO test_file_size_in_manifest VALUES " + "(NULL, NULL, NULL, NULL), " + "(42, 'some varchar value', DECIMAL '123456789123456789.123456789123456789', map(ARRAY['abc', 'def'], ARRAY[113, -237843832]))", 2); MaterializedResult files = computeActual("SELECT file_path, record_count, file_size_in_bytes FROM \"test_file_size_in_manifest$files\""); long totalRecordCount = 0; for (MaterializedRow row : files.getMaterializedRows()) { String path = (String) row.getField(0); Long recordCount = (Long) row.getField(1); Long fileSizeInBytes = (Long) row.getField(2); totalRecordCount += recordCount; assertThat(fileSizeInBytes).isEqualTo(Files.size(Paths.get(path))); } // Verify sum(record_count) to make sure we have all the files. assertThat(totalRecordCount).isEqualTo(2); } @Test public void testIncorrectIcebergFileSizes() throws Exception { // Create a table with a single insert assertUpdate("CREATE TABLE test_iceberg_file_size (x BIGINT)"); assertUpdate("INSERT INTO test_iceberg_file_size VALUES (123), (456), (758)", 3); // Get manifest file MaterializedResult result = computeActual("SELECT path FROM \"test_iceberg_file_size$manifests\""); assertEquals(result.getRowCount(), 1); String manifestFile = (String) result.getOnlyValue(); // Read manifest file Schema schema; GenericData.Record entry = null; try (DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<>(new File(manifestFile), new GenericDatumReader<>())) { schema = dataFileReader.getSchema(); int recordCount = 0; while (dataFileReader.hasNext()) { entry = dataFileReader.next(); recordCount++; } assertEquals(recordCount, 1); } // Alter data file entry to store incorrect file size GenericData.Record dataFile = (GenericData.Record) entry.get("data_file"); long alteredValue = 50L; assertNotEquals((long) dataFile.get("file_size_in_bytes"), alteredValue); dataFile.put("file_size_in_bytes", alteredValue); // Replace the file through HDFS client. This is required for correct checksums. HdfsEnvironment.HdfsContext context = new HdfsContext(getSession().toConnectorSession()); org.apache.hadoop.fs.Path manifestFilePath = new org.apache.hadoop.fs.Path(manifestFile); FileSystem fs = HDFS_ENVIRONMENT.getFileSystem(context, manifestFilePath); // Write altered metadata try (OutputStream out = fs.create(manifestFilePath); DataFileWriter<GenericData.Record> dataFileWriter = new DataFileWriter<>(new GenericDatumWriter<>(schema))) { dataFileWriter.create(schema, out); dataFileWriter.append(entry); } // Ignoring Iceberg provided file size makes the query succeed Session session = Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "use_file_size_from_metadata", "false") .build(); assertQuery(session, "SELECT * FROM test_iceberg_file_size", "VALUES (123), (456), (758)"); // Using Iceberg provided file size fails the query assertQueryFails("SELECT * FROM test_iceberg_file_size", format == ORC ? format(".*Error opening Iceberg split.*\\QIncorrect file size (%s) for file (end of stream not reached)\\E.*", alteredValue) : format("Error reading tail from .* with length %d", alteredValue)); dropTable("test_iceberg_file_size"); } @Test public void testSplitPruningForFilterOnPartitionColumn() { String tableName = "nation_partitioned_pruning"; assertUpdate("DROP TABLE IF EXISTS " + tableName); // disable writes redistribution to have predictable number of files written per partition (one). Session noRedistributeWrites = Session.builder(getSession()) .setSystemProperty("redistribute_writes", "false") .build(); assertUpdate(noRedistributeWrites, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY['regionkey']) AS SELECT * FROM nation", 25); // sanity check that table contains exactly 5 files assertThat(query("SELECT count(*) FROM \"" + tableName + "$files\"")).matches("VALUES CAST(5 AS BIGINT)"); verifySplitCount("SELECT * FROM " + tableName, 5); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey = 3", 1); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey < 2", 2); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey < 0", 0); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey > 1 AND regionkey < 4", 2); verifySplitCount("SELECT * FROM " + tableName + " WHERE regionkey % 5 = 3", 1); assertUpdate("DROP TABLE " + tableName); } @Test public void testAllAvailableTypes() { assertUpdate("CREATE TABLE test_all_types (" + " a_boolean boolean, " + " an_integer integer, " + " a_bigint bigint, " + " a_real real, " + " a_double double, " + " a_short_decimal decimal(5,2), " + " a_long_decimal decimal(38,20), " + " a_varchar varchar, " + " a_varbinary varbinary, " + " a_date date, " + " a_time time(6), " + " a_timestamp timestamp(6), " + " a_timestamptz timestamp(6) with time zone, " + " a_uuid uuid, " + " a_row row(id integer , vc varchar), " + " an_array array(varchar), " + " a_map map(integer, varchar) " + ")"); String values = "VALUES (" + "true, " + "1, " + "BIGINT '1', " + "REAL '1.0', " + "DOUBLE '1.0', " + "CAST(1.0 AS decimal(5,2)), " + "CAST(11.0 AS decimal(38,20)), " + "VARCHAR 'onefsadfdsf', " + "X'000102f0feff', " + "DATE '2021-07-24'," + "TIME '02:43:57.987654', " + "TIMESTAMP '2021-07-24 03:43:57.987654'," + "TIMESTAMP '2021-07-24 04:43:57.987654 UTC', " + "UUID '20050910-1330-11e9-ffff-2a86e4085a59', " + "CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)), " + "ARRAY[VARCHAR 'uno', 'dos', 'tres'], " + "map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one'])) "; String nullValues = nCopies(17, "NULL").stream() .collect(joining(", ", "VALUES (", ")")); assertUpdate("INSERT INTO test_all_types " + values, 1); assertUpdate("INSERT INTO test_all_types " + nullValues, 1); // SELECT assertThat(query("SELECT * FROM test_all_types")) .matches(values + " UNION ALL " + nullValues); // SELECT with predicates assertThat(query("SELECT * FROM test_all_types WHERE " + " a_boolean = true " + "AND an_integer = 1 " + "AND a_bigint = BIGINT '1' " + "AND a_real = REAL '1.0' " + "AND a_double = DOUBLE '1.0' " + "AND a_short_decimal = CAST(1.0 AS decimal(5,2)) " + "AND a_long_decimal = CAST(11.0 AS decimal(38,20)) " + "AND a_varchar = VARCHAR 'onefsadfdsf' " + "AND a_varbinary = X'000102f0feff' " + "AND a_date = DATE '2021-07-24' " + "AND a_time = TIME '02:43:57.987654' " + "AND a_timestamp = TIMESTAMP '2021-07-24 03:43:57.987654' " + "AND a_timestamptz = TIMESTAMP '2021-07-24 04:43:57.987654 UTC' " + "AND a_uuid = UUID '20050910-1330-11e9-ffff-2a86e4085a59' " + "AND a_row = CAST(ROW(42, 'this is a random value') AS ROW(id int, vc varchar)) " + "AND an_array = ARRAY[VARCHAR 'uno', 'dos', 'tres'] " + "AND a_map = map(ARRAY[1,2], ARRAY['ek', VARCHAR 'one']) " + "")) .matches(values); assertThat(query("SELECT * FROM test_all_types WHERE " + " a_boolean IS NULL " + "AND an_integer IS NULL " + "AND a_bigint IS NULL " + "AND a_real IS NULL " + "AND a_double IS NULL " + "AND a_short_decimal IS NULL " + "AND a_long_decimal IS NULL " + "AND a_varchar IS NULL " + "AND a_varbinary IS NULL " + "AND a_date IS NULL " + "AND a_time IS NULL " + "AND a_timestamp IS NULL " + "AND a_timestamptz IS NULL " + "AND a_uuid IS NULL " + "AND a_row IS NULL " + "AND an_array IS NULL " + "AND a_map IS NULL " + "")) .skippingTypesCheck() .matches(nullValues); // SHOW STATS assertThat(query("SHOW STATS FOR test_all_types")) .skippingTypesCheck() .matches("VALUES " + " ('a_boolean', NULL, NULL, 0.5e0, NULL, 'true', 'true'), " + " ('an_integer', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_bigint', NULL, NULL, 0.5e0, NULL, '1', '1'), " + " ('a_real', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_double', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_short_decimal', NULL, NULL, 0.5e0, NULL, '1.0', '1.0'), " + " ('a_long_decimal', NULL, NULL, 0.5e0, NULL, '11.0', '11.0'), " + " ('a_varchar', " + (format == PARQUET ? "87e0" : "NULL") + ", NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_varbinary', " + (format == PARQUET ? "82e0" : "NULL") + ", NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_date', NULL, NULL, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " + " ('a_time', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_timestamp', NULL, NULL, 0.5e0, NULL, " + (format == ORC ? "'2021-07-24 03:43:57.987000', '2021-07-24 03:43:57.987999'" : "'2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'") + "), " + " ('a_timestamptz', NULL, NULL, 0.5e0, NULL, '2021-07-24 04:43:57.987 UTC', '2021-07-24 04:43:57.987 UTC'), " + " ('a_uuid', NULL, NULL, 0.5e0, NULL, NULL, NULL), " + " ('a_row', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " ('an_array', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " ('a_map', NULL, NULL, " + (format == ORC ? "0.5" : "NULL") + ", NULL, NULL, NULL), " + " (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)"); // $partitions String schema = getSession().getSchema().orElseThrow(); assertThat(query("SELECT column_name FROM information_schema.columns WHERE table_schema = '" + schema + "' AND table_name = 'test_all_types$partitions' ")) .skippingTypesCheck() .matches("VALUES 'record_count', 'file_count', 'total_size', 'data'"); assertThat(query("SELECT " + " record_count," + " file_count, " + " data.a_boolean, " + " data.an_integer, " + " data.a_bigint, " + " data.a_real, " + " data.a_double, " + " data.a_short_decimal, " + " data.a_long_decimal, " + " data.a_varchar, " + " data.a_varbinary, " + " data.a_date, " + " data.a_time, " + " data.a_timestamp, " + " data.a_timestamptz, " + " data.a_uuid " + " FROM \"test_all_types$partitions\" ")) .matches( "VALUES (" + " BIGINT '2', " + " BIGINT '2', " + " CAST(ROW(true, true, 1, NULL) AS ROW(min boolean, max boolean, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min integer, max integer, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min bigint, max bigint, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min real, max real, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min double, max double, null_count bigint, nan_count bigint)), " + " CAST(ROW(1, 1, 1, NULL) AS ROW(min decimal(5,2), max decimal(5,2), null_count bigint, nan_count bigint)), " + " CAST(ROW(11, 11, 1, NULL) AS ROW(min decimal(38,20), max decimal(38,20), null_count bigint, nan_count bigint)), " + " CAST(ROW('onefsadfdsf', 'onefsadfdsf', 1, NULL) AS ROW(min varchar, max varchar, null_count bigint, nan_count bigint)), " + (format == ORC ? " CAST(ROW(NULL, NULL, 1, NULL) AS ROW(min varbinary, max varbinary, null_count bigint, nan_count bigint)), " : " CAST(ROW(X'000102f0feff', X'000102f0feff', 1, NULL) AS ROW(min varbinary, max varbinary, null_count bigint, nan_count bigint)), ") + " CAST(ROW(DATE '2021-07-24', DATE '2021-07-24', 1, NULL) AS ROW(min date, max date, null_count bigint, nan_count bigint)), " + " CAST(ROW(TIME '02:43:57.987654', TIME '02:43:57.987654', 1, NULL) AS ROW(min time(6), max time(6), null_count bigint, nan_count bigint)), " + (format == ORC ? " CAST(ROW(TIMESTAMP '2021-07-24 03:43:57.987000', TIMESTAMP '2021-07-24 03:43:57.987999', 1, NULL) AS ROW(min timestamp(6), max timestamp(6), null_count bigint, nan_count bigint)), " : " CAST(ROW(TIMESTAMP '2021-07-24 03:43:57.987654', TIMESTAMP '2021-07-24 03:43:57.987654', 1, NULL) AS ROW(min timestamp(6), max timestamp(6), null_count bigint, nan_count bigint)), ") + (format == ORC ? " CAST(ROW(TIMESTAMP '2021-07-24 04:43:57.987000 UTC', TIMESTAMP '2021-07-24 04:43:57.987999 UTC', 1, NULL) AS ROW(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)), " : " CAST(ROW(TIMESTAMP '2021-07-24 04:43:57.987654 UTC', TIMESTAMP '2021-07-24 04:43:57.987654 UTC', 1, NULL) AS ROW(min timestamp(6) with time zone, max timestamp(6) with time zone, null_count bigint, nan_count bigint)), ") + (format == ORC ? " CAST(ROW(NULL, NULL, 1, NULL) AS ROW(min uuid, max uuid, null_count bigint, nan_count bigint)) " : " CAST(ROW(UUID '20050910-1330-11e9-ffff-2a86e4085a59', UUID '20050910-1330-11e9-ffff-2a86e4085a59', 1, NULL) AS ROW(min uuid, max uuid, null_count bigint, nan_count bigint)) " ) + ")"); assertUpdate("DROP TABLE test_all_types"); } @Test public void testLocalDynamicFilteringWithSelectiveBuildSizeJoin() { // We need to prepare tables for this test. The test is required to use tables that are backed by at lest two files Session session = Session.builder(getSession()) .setSystemProperty(TASK_WRITER_COUNT, "2") .build(); getQueryRunner().execute(session, format("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM %s", "linetime_multiple_file_backed", "tpch.tiny.lineitem")).getMaterializedRows(); getQueryRunner().execute(session, format("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM %s", "orders_multiple_file_backed", "tpch.tiny.orders")).getMaterializedRows(); long fullTableScan = (Long) computeActual("SELECT count(*) FROM linetime_multiple_file_backed").getOnlyValue(); // Pick a value for totalprice where file level stats will not be able to filter out any data // This assumes the totalprice ranges in every file have some overlap, otherwise this test will fail. MaterializedRow range = getOnlyElement(computeActual("SELECT max(lower_bounds[4]), min(upper_bounds[4]) FROM \"orders_multiple_file_backed$files\"").getMaterializedRows()); double totalPrice = (Double) computeActual(format( "SELECT totalprice FROM orders_multiple_file_backed WHERE totalprice > %s AND totalprice < %s LIMIT 1", range.getField(0), range.getField(1))) .getOnlyValue(); session = Session.builder(getSession()) .setSystemProperty(JOIN_DISTRIBUTION_TYPE, BROADCAST.name()) .build(); ResultWithQueryId<MaterializedResult> result = getDistributedQueryRunner().executeWithQueryId( session, "SELECT * FROM linetime_multiple_file_backed JOIN orders_multiple_file_backed ON linetime_multiple_file_backed.orderkey = orders_multiple_file_backed.orderkey AND orders_multiple_file_backed.totalprice = " + totalPrice); OperatorStats probeStats = searchScanFilterAndProjectOperatorStats( result.getQueryId(), new QualifiedObjectName(ICEBERG_CATALOG, "tpch", "linetime_multiple_file_backed")); // Assert some lineitem rows were filtered out on file level assertThat(probeStats.getInputPositions()).isLessThan(fullTableScan); } @Test(dataProvider = "repartitioningDataProvider") public void testRepartitionDataOnCtas(Session session, String partitioning, int expectedFiles) { testRepartitionData(session, "tpch.tiny.orders", true, partitioning, expectedFiles); } @Test(dataProvider = "repartitioningDataProvider") public void testRepartitionDataOnInsert(Session session, String partitioning, int expectedFiles) { testRepartitionData(session, "tpch.tiny.orders", false, partitioning, expectedFiles); } @DataProvider public Object[][] repartitioningDataProvider() { Session defaultSession = getSession(); // For identity-only partitioning, Iceberg connector returns ConnectorTableLayout with partitionColumns set, but without partitioning. // This is treated by engine as "preferred", but not mandatory partitioning, and gets ignored if stats suggest number of partitions // written is low. Without partitioning, number of files created is nondeterministic, as a writer (worker node) may or may not receive data. Session obeyConnectorPartitioning = Session.builder(defaultSession) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "1") .build(); return new Object[][] { // identity partitioning column {obeyConnectorPartitioning, "'orderstatus'", 3}, // bucketing {defaultSession, "'bucket(custkey, 13)'", 13}, // varchar-based {defaultSession, "'truncate(comment, 1)'", 35}, // complex; would exceed 100 open writers limit in IcebergPageSink without write repartitioning {defaultSession, "'bucket(custkey, 4)', 'truncate(comment, 1)'", 131}, // same column multiple times {defaultSession, "'truncate(comment, 1)', 'orderstatus', 'bucket(comment, 2)'", 180}, }; } @Test public void testStatsBasedRepartitionDataOnCtas() { testStatsBasedRepartitionData(true); } @Test public void testStatsBasedRepartitionDataOnInsert() { testStatsBasedRepartitionData(false); } private void testStatsBasedRepartitionData(boolean ctas) { Session sessionRepartitionSmall = Session.builder(getSession()) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "2") .build(); Session sessionRepartitionMany = Session.builder(getSession()) .setSystemProperty(PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS, "5") .setSystemProperty(SCALE_WRITERS, "false") .build(); // Use DISTINCT to add data redistribution between source table and the writer. This makes it more likely that all writers get some data. String sourceRelation = "(SELECT DISTINCT orderkey, custkey, orderstatus FROM tpch.tiny.orders)"; testRepartitionData( sessionRepartitionSmall, sourceRelation, ctas, "'orderstatus'", 3); // Test uses relatively small table (60K rows). When engine doesn't redistribute data for writes, // occasionally a worker node doesn't get any data and fewer files get created. assertEventually(() -> { testRepartitionData( sessionRepartitionMany, sourceRelation, ctas, "'orderstatus'", 9); }); } private void testRepartitionData(Session session, String sourceRelation, boolean ctas, String partitioning, int expectedFiles) { String tableName = "repartition" + "_" + sourceRelation.replaceAll("[^a-zA-Z0-9]", "") + (ctas ? "ctas" : "insert") + "_" + partitioning.replaceAll("[^a-zA-Z0-9]", "") + "_" + randomTableSuffix(); long rowCount = (long) computeScalar(session, "SELECT count(*) FROM " + sourceRelation); if (ctas) { assertUpdate( session, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY[" + partitioning + "]) " + "AS SELECT * FROM " + sourceRelation, rowCount); } else { assertUpdate( session, "CREATE TABLE " + tableName + " WITH (partitioning = ARRAY[" + partitioning + "]) " + "AS SELECT * FROM " + sourceRelation + " WITH NO DATA", 0); // Use source table big enough so that there will be multiple pages being written. assertUpdate(session, "INSERT INTO " + tableName + " SELECT * FROM " + sourceRelation, rowCount); } // verify written data assertThat(query(session, "TABLE " + tableName)) .skippingTypesCheck() .matches("SELECT * FROM " + sourceRelation); // verify data files, i.e. repartitioning took place assertThat(query(session, "SELECT count(*) FROM \"" + tableName + "$files\"")) .matches("VALUES BIGINT '" + expectedFiles + "'"); assertUpdate(session, "DROP TABLE " + tableName); } @Test(dataProvider = "testDataMappingSmokeTestDataProvider") public void testSplitPruningForFilterOnNonPartitionColumn(DataMappingTestSetup testSetup) { if (testSetup.isUnsupportedType()) { return; } try (TestTable table = new TestTable(getQueryRunner()::execute, "test_split_pruning_non_partitioned", "(row_id int, col " + testSetup.getTrinoTypeName() + ")")) { String tableName = table.getName(); String sampleValue = testSetup.getSampleValueLiteral(); String highValue = testSetup.getHighValueLiteral(); // Insert separately to ensure two files with one value each assertUpdate("INSERT INTO " + tableName + " VALUES (1, " + sampleValue + ")", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (2, " + highValue + ")", 1); assertQuery("select count(*) from \"" + tableName + "$files\"", "VALUES 2"); int expectedSplitCount = supportsIcebergFileStatistics(testSetup.getTrinoTypeName()) ? 1 : 2; verifySplitCount("SELECT row_id FROM " + tableName, 2); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col = " + sampleValue, expectedSplitCount); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col = " + highValue, expectedSplitCount); // ORC max timestamp statistics are truncated to millisecond precision and then appended with 999 microseconds. // Therefore, sampleValue and highValue are within the max timestamp & there will be 2 splits. verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col > " + sampleValue, (format == ORC && testSetup.getTrinoTypeName().contains("timestamp") ? 2 : expectedSplitCount)); verifySplitCount("SELECT row_id FROM " + tableName + " WHERE col < " + highValue, (format == ORC && testSetup.getTrinoTypeName().contains("timestamp") ? 2 : expectedSplitCount)); } } @Test public void testGetIcebergTableProperties() { assertUpdate("CREATE TABLE test_iceberg_get_table_props (x BIGINT)"); assertThat(query("SELECT * FROM \"test_iceberg_get_table_props$properties\"")) .matches(format("VALUES (VARCHAR 'write.format.default', VARCHAR '%s')", format.name())); dropTable("test_iceberg_get_table_props"); } protected abstract boolean supportsIcebergFileStatistics(String typeName); @Test(dataProvider = "testDataMappingSmokeTestDataProvider") public void testSplitPruningFromDataFileStatistics(DataMappingTestSetup testSetup) { if (testSetup.isUnsupportedType()) { return; } try (TestTable table = new TestTable( getQueryRunner()::execute, "test_split_pruning_data_file_statistics", // Random double is needed to make sure rows are different. Otherwise compression may deduplicate rows, resulting in only one row group "(col " + testSetup.getTrinoTypeName() + ", r double)")) { String tableName = table.getName(); String values = Stream.concat( nCopies(100, testSetup.getSampleValueLiteral()).stream(), nCopies(100, testSetup.getHighValueLiteral()).stream()) .map(value -> "(" + value + ", rand())") .collect(Collectors.joining(", ")); assertUpdate(withSmallRowGroups(getSession()), "INSERT INTO " + tableName + " VALUES " + values, 200); String query = "SELECT * FROM " + tableName + " WHERE col = " + testSetup.getSampleValueLiteral(); verifyPredicatePushdownDataRead(query, supportsRowGroupStatistics(testSetup.getTrinoTypeName())); } } protected abstract Session withSmallRowGroups(Session session); protected abstract boolean supportsRowGroupStatistics(String typeName); private void verifySplitCount(String query, int expectedSplitCount) { ResultWithQueryId<MaterializedResult> selectAllPartitionsResult = getDistributedQueryRunner().executeWithQueryId(getSession(), query); assertEqualsIgnoreOrder(selectAllPartitionsResult.getResult().getMaterializedRows(), computeActual(withoutPredicatePushdown(getSession()), query).getMaterializedRows()); verifySplitCount(selectAllPartitionsResult.getQueryId(), expectedSplitCount); } private void verifyPredicatePushdownDataRead(@Language("SQL") String query, boolean supportsPushdown) { ResultWithQueryId<MaterializedResult> resultWithPredicatePushdown = getDistributedQueryRunner().executeWithQueryId(getSession(), query); ResultWithQueryId<MaterializedResult> resultWithoutPredicatePushdown = getDistributedQueryRunner().executeWithQueryId( withoutPredicatePushdown(getSession()), query); DataSize withPushdownDataSize = getOperatorStats(resultWithPredicatePushdown.getQueryId()).getInputDataSize(); DataSize withoutPushdownDataSize = getOperatorStats(resultWithoutPredicatePushdown.getQueryId()).getInputDataSize(); if (supportsPushdown) { assertThat(withPushdownDataSize).isLessThan(withoutPushdownDataSize); } else { assertThat(withPushdownDataSize).isEqualTo(withoutPushdownDataSize); } } private Session withoutPredicatePushdown(Session session) { return Session.builder(session) .setSystemProperty("allow_pushdown_into_connectors", "false") .build(); } private void verifySplitCount(QueryId queryId, long expectedSplitCount) { checkArgument(expectedSplitCount >= 0); OperatorStats operatorStats = getOperatorStats(queryId); if (expectedSplitCount > 0) { assertThat(operatorStats.getTotalDrivers()).isEqualTo(expectedSplitCount); assertThat(operatorStats.getPhysicalInputPositions()).isGreaterThan(0); } else { // expectedSplitCount == 0 assertThat(operatorStats.getTotalDrivers()).isEqualTo(1); assertThat(operatorStats.getPhysicalInputPositions()).isEqualTo(0); } } private OperatorStats getOperatorStats(QueryId queryId) { try { return getDistributedQueryRunner().getCoordinator() .getQueryManager() .getFullQueryInfo(queryId) .getQueryStats() .getOperatorSummaries() .stream() .filter(summary -> summary.getOperatorType().startsWith("TableScan") || summary.getOperatorType().startsWith("Scan")) .collect(onlyElement()); } catch (NoSuchElementException e) { throw new RuntimeException("Couldn't find operator summary, probably due to query statistic collection error", e); } } @Override protected TestTable createTableWithDefaultColumns() { throw new SkipException("Iceberg connector does not support column default values"); } @Override protected Optional<DataMappingTestSetup> filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("tinyint") || typeName.equals("smallint") || typeName.startsWith("char(")) { // These types are not supported by Iceberg return Optional.of(dataMappingTestSetup.asUnsupported()); } // According to Iceberg specification all time and timestamp values are stored with microsecond precision. if (typeName.equals("time") || typeName.equals("timestamp") || typeName.equals("timestamp(3) with time zone")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } @Override protected Optional<DataMappingTestSetup> filterCaseSensitiveDataMappingTestData(DataMappingTestSetup dataMappingTestSetup) { String typeName = dataMappingTestSetup.getTrinoTypeName(); if (typeName.equals("char(1)")) { return Optional.of(dataMappingTestSetup.asUnsupported()); } return Optional.of(dataMappingTestSetup); } @Test public void testAmbiguousColumnsWithDots() { assertThatThrownBy(() -> assertUpdate("CREATE TABLE ambiguous (\"a.cow\" BIGINT, a ROW(cow BIGINT))")) .hasMessage("Invalid schema: multiple fields for name a.cow: 1 and 3"); assertUpdate("CREATE TABLE ambiguous (\"a.cow\" BIGINT, b ROW(cow BIGINT))"); assertThatThrownBy(() -> assertUpdate("ALTER TABLE ambiguous RENAME COLUMN b TO a")) .hasMessage("Invalid schema: multiple fields for name a.cow: 1 and 3"); assertUpdate("DROP TABLE ambiguous"); assertUpdate("CREATE TABLE ambiguous (a ROW(cow BIGINT))"); assertThatThrownBy(() -> assertUpdate("ALTER TABLE ambiguous ADD COLUMN \"a.cow\" BIGINT")) .hasMessage("Cannot add column with ambiguous name: a.cow, use addColumn(parent, name, type)"); assertUpdate("DROP TABLE ambiguous"); } @Test public void testSchemaEvolutionWithDereferenceProjections() { // Fields are identified uniquely based on unique id's. If a column is dropped and recreated with the same name it should not return dropped data. assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a row(b BIGINT, c VARCHAR))"); assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(1, 'abc'))", 1); assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(b VARCHAR, c BIGINT)"); assertQuery("SELECT a.b FROM evolve_test", "VALUES NULL"); assertUpdate("DROP TABLE evolve_test"); // Very changing subfield ordering does not revive dropped data assertUpdate("CREATE TABLE evolve_test (dummy BIGINT, a ROW(b BIGINT, c VARCHAR), d BIGINT) with (partitioning = ARRAY['d'])"); assertUpdate("INSERT INTO evolve_test VALUES (1, ROW(2, 'abc'), 3)", 1); assertUpdate("ALTER TABLE evolve_test DROP COLUMN a"); assertUpdate("ALTER TABLE evolve_test ADD COLUMN a ROW(c VARCHAR, b BIGINT)"); assertUpdate("INSERT INTO evolve_test VALUES (4, 5, ROW('def', 6))", 1); assertQuery("SELECT a.b FROM evolve_test WHERE d = 3", "VALUES NULL"); assertQuery("SELECT a.b FROM evolve_test WHERE d = 5", "VALUES 6"); assertUpdate("DROP TABLE evolve_test"); } @Test public void testHighlyNestedData() { assertUpdate("CREATE TABLE nested_data (id INT, row_t ROW(f1 INT, f2 INT, row_t ROW (f1 INT, f2 INT, row_t ROW(f1 INT, f2 INT))))"); assertUpdate("INSERT INTO nested_data VALUES (1, ROW(2, 3, ROW(4, 5, ROW(6, 7)))), (11, ROW(12, 13, ROW(14, 15, ROW(16, 17))))", 2); assertUpdate("INSERT INTO nested_data VALUES (21, ROW(22, 23, ROW(24, 25, ROW(26, 27))))", 1); // Test select projected columns, with and without their parent column assertQuery("SELECT id, row_t.row_t.row_t.f2 FROM nested_data", "VALUES (1, 7), (11, 17), (21, 27)"); assertQuery("SELECT id, row_t.row_t.row_t.f2, CAST(row_t AS JSON) FROM nested_data", "VALUES (1, 7, '{\"f1\":2,\"f2\":3,\"row_t\":{\"f1\":4,\"f2\":5,\"row_t\":{\"f1\":6,\"f2\":7}}}'), " + "(11, 17, '{\"f1\":12,\"f2\":13,\"row_t\":{\"f1\":14,\"f2\":15,\"row_t\":{\"f1\":16,\"f2\":17}}}'), " + "(21, 27, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); // Test predicates on immediate child column and deeper nested column assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":26,\"f2\":27}')"); assertQuery("SELECT id, CAST(row_t.row_t.row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":26,\"f2\":27}')"); assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 = 27", "VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); assertQuery("SELECT id, CAST(row_t AS JSON) FROM nested_data WHERE row_t.row_t.row_t.f2 > 20", "VALUES (21, '{\"f1\":22,\"f2\":23,\"row_t\":{\"f1\":24,\"f2\":25,\"row_t\":{\"f1\":26,\"f2\":27}}}')"); // Test predicates on parent columns assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t.row_t.row_t = ROW(16, 17)", "VALUES (11, 16)"); assertQuery("SELECT id, row_t.row_t.row_t.f1 FROM nested_data WHERE row_t = ROW(22, 23, ROW(24, 25, ROW(26, 27)))", "VALUES (21, 26)"); assertUpdate("DROP TABLE IF EXISTS nested_data"); } @Test public void testProjectionPushdownAfterRename() { assertUpdate("CREATE TABLE projection_pushdown_after_rename (id INT, a ROW(b INT, c ROW (d INT)))"); assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (1, ROW(2, ROW(3))), (11, ROW(12, ROW(13)))", 2); assertUpdate("INSERT INTO projection_pushdown_after_rename VALUES (21, ROW(22, ROW(23)))", 1); String expected = "VALUES (11, JSON '{\"b\":12,\"c\":{\"d\":13}}', 13)"; assertQuery("SELECT id, CAST(a AS JSON), a.c.d FROM projection_pushdown_after_rename WHERE a.b = 12", expected); assertUpdate("ALTER TABLE projection_pushdown_after_rename RENAME COLUMN a TO row_t"); assertQuery("SELECT id, CAST(row_t AS JSON), row_t.c.d FROM projection_pushdown_after_rename WHERE row_t.b = 12", expected); assertUpdate("DROP TABLE IF EXISTS projection_pushdown_after_rename"); } @Test public void testProjectionWithCaseSensitiveField() { assertUpdate("CREATE TABLE projection_with_case_sensitive_field (id INT, a ROW(\"UPPER_CASE\" INT, \"lower_case\" INT, \"MiXeD_cAsE\" INT))"); assertUpdate("INSERT INTO projection_with_case_sensitive_field VALUES (1, ROW(2, 3, 4)), (5, ROW(6, 7, 8))", 2); String expected = "VALUES (2, 3, 4), (6, 7, 8)"; assertQuery("SELECT a.UPPER_CASE, a.lower_case, a.MiXeD_cAsE FROM projection_with_case_sensitive_field", expected); assertQuery("SELECT a.upper_case, a.lower_case, a.mixed_case FROM projection_with_case_sensitive_field", expected); assertQuery("SELECT a.UPPER_CASE, a.LOWER_CASE, a.MIXED_CASE FROM projection_with_case_sensitive_field", expected); assertUpdate("DROP TABLE IF EXISTS projection_with_case_sensitive_field"); } @Test public void testProjectionPushdownReadsLessData() { String largeVarchar = "ZZZ".repeat(1000); assertUpdate("CREATE TABLE projection_pushdown_reads_less_data (id INT, a ROW(b VARCHAR, c INT))"); assertUpdate( format("INSERT INTO projection_pushdown_reads_less_data VALUES (1, ROW('%s', 3)), (11, ROW('%1$s', 13)), (21, ROW('%1$s', 23)), (31, ROW('%1$s', 33))", largeVarchar), 4); String selectQuery = "SELECT a.c FROM projection_pushdown_reads_less_data"; Set<Integer> expected = ImmutableSet.of(3, 13, 23, 33); Session sessionWithoutPushdown = Session.builder(getSession()) .setCatalogSessionProperty(ICEBERG_CATALOG, "projection_pushdown_enabled", "false") .build(); assertQueryStats( getSession(), selectQuery, statsWithPushdown -> { DataSize processedDataSizeWithPushdown = statsWithPushdown.getProcessedInputDataSize(); assertQueryStats( sessionWithoutPushdown, selectQuery, statsWithoutPushdown -> assertThat(statsWithoutPushdown.getProcessedInputDataSize()).isGreaterThan(processedDataSizeWithPushdown), results -> assertEquals(results.getOnlyColumnAsSet(), expected)); }, results -> assertEquals(results.getOnlyColumnAsSet(), expected)); assertUpdate("DROP TABLE IF EXISTS projection_pushdown_reads_less_data"); } @Test public void testProjectionPushdownOnPartitionedTables() { assertUpdate("CREATE TABLE table_with_partition_at_beginning (id BIGINT, root ROW(f1 BIGINT, f2 BIGINT)) WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO table_with_partition_at_beginning VALUES (1, ROW(1, 2)), (1, ROW(2, 3)), (1, ROW(3, 4))", 3); assertQuery("SELECT id, root.f2 FROM table_with_partition_at_beginning", "VALUES (1, 2), (1, 3), (1, 4)"); assertUpdate("DROP TABLE table_with_partition_at_beginning"); assertUpdate("CREATE TABLE table_with_partition_at_end (root ROW(f1 BIGINT, f2 BIGINT), id BIGINT) WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO table_with_partition_at_end VALUES (ROW(1, 2), 1), (ROW(2, 3), 1), (ROW(3, 4), 1)", 3); assertQuery("SELECT root.f2, id FROM table_with_partition_at_end", "VALUES (2, 1), (3, 1), (4, 1)"); assertUpdate("DROP TABLE table_with_partition_at_end"); } @Test public void testProjectionPushdownOnPartitionedTableWithComments() { assertUpdate("CREATE TABLE test_projection_pushdown_comments (id BIGINT COMMENT 'id', qid BIGINT COMMENT 'QID', root ROW(f1 BIGINT, f2 BIGINT) COMMENT 'root') WITH (partitioning = ARRAY['id'])"); assertUpdate("INSERT INTO test_projection_pushdown_comments VALUES (1, 1, ROW(1, 2)), (1, 2, ROW(2, 3)), (1, 3, ROW(3, 4))", 3); assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments", "VALUES (1, 2), (1, 3), (1, 4)"); // Query with predicates on both nested and top-level columns (with partition column) assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE id = 1 AND qid = 1 AND root.f1 = 1", "VALUES (1, 2)"); // Query with predicates on both nested and top-level columns (no partition column) assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE qid = 2 AND root.f1 = 2", "VALUES (1, 3)"); // Query with predicates on top-level columns only assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE id = 1 AND qid = 1", "VALUES (1, 2)"); // Query with predicates on nested columns only assertQuery("SELECT id, root.f2 FROM test_projection_pushdown_comments WHERE root.f1 = 2", "VALUES (1, 3)"); assertUpdate("DROP TABLE IF EXISTS test_projection_pushdown_comments"); } @Test(dataProvider = "tableFormatVersion") public void testOptimize(int formatVersion) throws Exception { String tableName = "test_optimize_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key integer, value varchar) WITH (format_version = " + formatVersion + ")"); // DistributedQueryRunner sets node-scheduler.include-coordinator by default, so include coordinator int workerCount = getQueryRunner().getNodeCount(); // optimize an empty table assertQuerySucceeds("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(getActiveFiles(tableName)).isEmpty(); assertUpdate("INSERT INTO " + tableName + " VALUES (11, 'eleven')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (12, 'zwölf')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (13, 'trzynaście')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (14, 'quatorze')", 1); assertUpdate("INSERT INTO " + tableName + " VALUES (15, 'пʼятнадцять')", 1); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles) .hasSize(5) // Verify we have sufficiently many test rows with respect to worker count. .hasSizeGreaterThan(workerCount); computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(query("SELECT sum(key), listagg(value, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '65', VARCHAR 'eleven zwölf trzynaście quatorze пʼятнадцять')"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles) .hasSizeBetween(1, workerCount) .doesNotContainAnyElementsOf(initialFiles); // No files should be removed (this is expire_snapshots's job, when it exists) assertThat(getAllDataFilesFromTableDirectory(tableName)) .containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); // optimize with low retention threshold, nothing should change computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE (file_size_threshold => '33B')"); assertThat(query("SELECT sum(key), listagg(value, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '65', VARCHAR 'eleven zwölf trzynaście quatorze пʼятнадцять')"); assertThat(getActiveFiles(tableName)).isEqualTo(updatedFiles); assertThat(getAllDataFilesFromTableDirectory(tableName)) .containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); // optimize with delimited procedure name assertQueryFails("ALTER TABLE " + tableName + " EXECUTE \"optimize\"", "Procedure optimize not registered for catalog iceberg"); assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\""); // optimize with delimited parameter name (and procedure name) assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\" (\"file_size_threshold\" => '33B')"); // TODO (https://github.com/trinodb/trino/issues/11326) this should fail assertUpdate("ALTER TABLE " + tableName + " EXECUTE \"OPTIMIZE\" (\"FILE_SIZE_THRESHOLD\" => '33B')"); assertUpdate("DROP TABLE " + tableName); } @Test(dataProvider = "tableFormatVersion") public void testOptimizeForPartitionedTable(int formatVersion) throws IOException { // This test will have its own session to make sure partitioning is indeed forced and is not a result // of session configuration Session session = testSessionBuilder() .setCatalog(getQueryRunner().getDefaultSession().getCatalog()) .setSchema(getQueryRunner().getDefaultSession().getSchema()) .setSystemProperty("use_preferred_write_partitioning", "true") .setSystemProperty("preferred_write_partitioning_min_number_of_partitions", "100") .build(); String tableName = "test_repartitiong_during_optimize_" + randomTableSuffix(); assertUpdate(session, "CREATE TABLE " + tableName + " (key varchar, value integer) WITH (format_version = " + formatVersion + ", partitioning = ARRAY['key'])"); // optimize an empty table assertQuerySucceeds(session, "ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 2)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 3)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 4)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 5)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 6)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('one', 7)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('two', 8)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('two', 9)", 1); assertUpdate(session, "INSERT INTO " + tableName + " VALUES ('three', 10)", 1); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles).hasSize(10); computeActual(session, "ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); assertThat(query(session, "SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '55', VARCHAR 'one one one one one one one three two two')"); List<String> updatedFiles = getActiveFiles(tableName); // as we force repartitioning there should be only 3 partitions assertThat(updatedFiles).hasSize(3); assertThat(getAllDataFilesFromTableDirectory(tableName)).containsExactlyInAnyOrderElementsOf(concat(initialFiles, updatedFiles)); assertUpdate("DROP TABLE " + tableName); } @DataProvider public Object[][] tableFormatVersion() { return IntStream.rangeClosed(IcebergConfig.FORMAT_VERSION_SUPPORT_MIN, IcebergConfig.FORMAT_VERSION_SUPPORT_MAX).boxed() .collect(DataProviders.toDataProvider()); } @Test public void testOptimizeTableAfterDeleteWithFormatVersion2() { String tableName = "test_optimize_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", 25); List<String> initialFiles = getActiveFiles(tableName); assertUpdate("DELETE FROM " + tableName + " WHERE nationkey = 7", 1); // Verify that delete files exists assertQuery( "SELECT summary['total-delete-files'] FROM \"" + tableName + "$snapshots\" WHERE snapshot_id = " + getCurrentSnapshotId(tableName), "VALUES '1'"); computeActual("ALTER TABLE " + tableName + " EXECUTE OPTIMIZE"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles) .hasSize(1) .isNotEqualTo(initialFiles); assertThat(query("SELECT * FROM " + tableName)) .matches("SELECT * FROM nation WHERE nationkey != 7"); assertUpdate("DROP TABLE " + tableName); } private List<String> getActiveFiles(String tableName) { return computeActual(format("SELECT file_path FROM \"%s$files\"", tableName)).getOnlyColumn() .map(String.class::cast) .collect(toImmutableList()); } private List<String> getAllDataFilesFromTableDirectory(String tableName) throws IOException { String schema = getSession().getSchema().orElseThrow(); Path tableDataDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve("data"); try (Stream<Path> walk = Files.walk(tableDataDir)) { return walk .filter(Files::isRegularFile) .filter(path -> !path.getFileName().toString().matches("\\..*\\.crc")) .map(Path::toString) .collect(toImmutableList()); } } @Test public void testOptimizeParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE OPTIMIZE", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE OPTIMIZE (file_size_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'OPTIMIZE' property 'file_size_threshold' to ['33']: size is not a valid data size string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE OPTIMIZE (file_size_threshold => '33s')", "\\QUnable to set catalog 'iceberg' table procedure 'OPTIMIZE' property 'file_size_threshold' to ['33s']: Unknown unit: s"); } @Test public void testTargetMaxFileSize() { String tableName = "test_default_max_file_size" + randomTableSuffix(); @Language("SQL") String createTableSql = format("CREATE TABLE %s AS SELECT * FROM tpch.sf1.lineitem LIMIT 100000", tableName); Session session = Session.builder(getSession()) .setSystemProperty("task_writer_count", "1") .build(); assertUpdate(session, createTableSql, 100000); List<String> initialFiles = getActiveFiles(tableName); assertThat(initialFiles.size()).isLessThanOrEqualTo(3); assertUpdate(format("DROP TABLE %s", tableName)); DataSize maxSize = DataSize.of(40, DataSize.Unit.KILOBYTE); session = Session.builder(getSession()) .setSystemProperty("task_writer_count", "1") .setCatalogSessionProperty("iceberg", "target_max_file_size", maxSize.toString()) .build(); assertUpdate(session, createTableSql, 100000); assertThat(query(format("SELECT count(*) FROM %s", tableName))).matches("VALUES BIGINT '100000'"); List<String> updatedFiles = getActiveFiles(tableName); assertThat(updatedFiles.size()).isGreaterThan(10); computeActual(format("SELECT file_size_in_bytes FROM \"%s$files\"", tableName)) .getMaterializedRows() // as target_max_file_size is set to quite low value it can happen that created files are bigger, // so just to be safe we check if it is not much bigger .forEach(row -> assertThat((Long) row.getField(0)).isBetween(1L, maxSize.toBytes() * 3)); } @Test public void testDroppingIcebergAndCreatingANewTableWithTheSameNameShouldBePossible() { assertUpdate("CREATE TABLE test_iceberg_recreate (a_int) AS VALUES (1)", 1); assertThat(query("SELECT min(a_int) FROM test_iceberg_recreate")).matches("VALUES 1"); dropTable("test_iceberg_recreate"); assertUpdate("CREATE TABLE test_iceberg_recreate (a_varchar) AS VALUES ('Trino')", 1); assertThat(query("SELECT min(a_varchar) FROM test_iceberg_recreate")).matches("VALUES CAST('Trino' AS varchar)"); dropTable("test_iceberg_recreate"); } @Test public void testPathHiddenColumn() { String tableName = "test_path_" + randomTableSuffix(); @Language("SQL") String createTable = "CREATE TABLE " + tableName + " " + "WITH ( partitioning = ARRAY['zip'] ) AS " + "SELECT * FROM (VALUES " + "(0, 0), (3, 0), (6, 0), " + "(1, 1), (4, 1), (7, 1), " + "(2, 2), (5, 2) " + " ) t(userid, zip)"; assertUpdate(createTable, 8); MaterializedResult expectedColumns = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR) .row("userid", "integer", "", "") .row("zip", "integer", "", "") .build(); MaterializedResult actualColumns = computeActual(format("DESCRIBE %s", tableName)); // Describe output should not have the $path hidden column assertEquals(actualColumns, expectedColumns); assertThat(query("SELECT file_path FROM \"" + tableName + "$files\"")) .matches("SELECT DISTINCT \"$path\" as file_path FROM " + tableName); String somePath = (String) computeScalar("SELECT \"$path\" FROM " + tableName + " WHERE userid = 2"); assertThat(query("SELECT userid FROM " + tableName + " WHERE \"$path\" = '" + somePath + "'")) .matches("VALUES 2, 5"); assertThat(query("SELECT userid FROM " + tableName + " WHERE \"$path\" = '" + somePath + "' AND userid > 0")) .matches("VALUES 2, 5"); assertUpdate("DROP TABLE " + tableName); } @Test public void testExpireSnapshots() throws Exception { String tableName = "test_expiring_snapshots_" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer)"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertThat(query("SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '3', VARCHAR 'one two')"); List<Long> initialSnapshots = getSnapshotIds(tableName); List<String> initialFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); assertThat(query("SELECT sum(value), listagg(key, ' ') WITHIN GROUP (ORDER BY key) FROM " + tableName)) .matches("VALUES (BIGINT '3', VARCHAR 'one two')"); List<String> updatedFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertThat(updatedFiles.size()).isEqualTo(initialFiles.size() - 1); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); assertThat(updatedSnapshots.size()).isEqualTo(1); assertThat(initialSnapshots).containsAll(updatedSnapshots); } @Test public void testExpireSnapshotsPartitionedTable() throws Exception { String tableName = "test_expiring_snapshots_partitioned_table" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (col1 BIGINT, col2 BIGINT) WITH (partitioning = ARRAY['col1'])"); assertUpdate("INSERT INTO " + tableName + " VALUES(1, 100), (1, 101), (1, 102), (2, 200), (2, 201), (3, 300)", 6); assertUpdate("DELETE FROM " + tableName + " WHERE col1 = 1", 3); assertUpdate("INSERT INTO " + tableName + " VALUES(4, 400)", 1); assertQuery("SELECT sum(col2) FROM " + tableName, "SELECT 1101"); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); List<Long> initialSnapshots = getSnapshotIds(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertQuery("SELECT sum(col2) FROM " + tableName, "SELECT 1101"); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); } @Test public void testExplainExpireSnapshotOutput() { String tableName = "test_expiring_snapshots_output" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertExplain("EXPLAIN ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')", "SimpleTableExecute\\[iceberg:schemaTableName:tpch.test_expiring_snapshots.*\\{retentionThreshold=0\\.00s}.*"); } @Test public void testExpireSnapshotsParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE EXPIRE_SNAPSHOTS", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'EXPIRE_SNAPSHOTS' property 'retention_threshold' to ['33']: duration is not a valid data duration string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33mb')", "\\QUnable to set catalog 'iceberg' table procedure 'EXPIRE_SNAPSHOTS' property 'retention_threshold' to ['33mb']: Unknown time unit: mb"); assertQueryFails( "ALTER TABLE nation EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '33s')", "\\QRetention specified (33.00s) is shorter than the minimum retention configured in the system (7.00d). Minimum retention can be changed with iceberg.expire_snapshots.min-retention configuration property or iceberg.expire_snapshots_min_retention session property"); } @Test public void testRemoveOrphanFiles() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer)"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); Path orphanFile = Files.createFile(Path.of(getIcebergTableDataPath(tableName).toString(), "invalidData." + format)); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedDataFiles).doesNotContain(orphanFile.toString()); } @Test public void testIfRemoveOrphanFilesCleansUnnecessaryDataFilesInPartitionedTable() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); Path orphanFile = Files.createFile(Path.of(getIcebergTableDataPath(tableName) + "/key=one/", "invalidData." + format)); List<String> initialDataFiles = getAllDataFilesFromTableDirectory(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedDataFiles = getAllDataFilesFromTableDirectory(tableName); assertThat(updatedDataFiles.size()).isLessThan(initialDataFiles.size()); assertThat(updatedDataFiles).doesNotContain(orphanFile.toString()); } @Test public void testIfRemoveOrphanFilesCleansUnnecessaryMetadataFilesInPartitionedTable() throws Exception { String tableName = "test_deleting_orphan_files_unnecessary_files" + randomTableSuffix(); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); Path orphanMetadataFile = Files.createFile(Path.of(getIcebergTableMetadataPath(tableName).toString(), "invalidData." + format)); List<String> initialMetadataFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedMetadataFiles = getAllMetadataFilesFromTableDirectoryForTable(tableName); assertThat(updatedMetadataFiles.size()).isLessThan(initialMetadataFiles.size()); assertThat(updatedMetadataFiles).doesNotContain(orphanMetadataFile.toString()); } @Test public void testCleaningUpWithTableWithSpecifiedLocationWithSlashAtTheEnd() throws IOException { testCleaningUpWithTableWithSpecifiedLocation("/"); } @Test public void testCleaningUpWithTableWithSpecifiedLocationWithoutSlashAtTheEnd() throws IOException { testCleaningUpWithTableWithSpecifiedLocation(""); } private void testCleaningUpWithTableWithSpecifiedLocation(String suffix) throws IOException { File tempDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().toFile(); String tempDirPath = tempDir.toURI().toASCIIString() + randomTableSuffix() + suffix; String tableName = "test_table_cleaning_up_with_location" + randomTableSuffix(); assertUpdate(format("CREATE TABLE %s (key varchar, value integer) WITH(location = '%s')", tableName, tempDirPath)); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); List<String> initialFiles = getAllMetadataFilesFromTableDirectory(tempDirPath); List<Long> initialSnapshots = getSnapshotIds(tableName); Session sessionWithShortRetentionUnlocked = prepareCleanUpSession(); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE EXPIRE_SNAPSHOTS (retention_threshold => '0s')"); assertQuerySucceeds(sessionWithShortRetentionUnlocked, "ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')"); List<String> updatedFiles = getAllMetadataFilesFromTableDirectory(tempDirPath); List<Long> updatedSnapshots = getSnapshotIds(tableName); assertThat(updatedFiles.size()).isEqualTo(initialFiles.size() - 1); assertThat(updatedSnapshots.size()).isLessThan(initialSnapshots.size()); assertThat(updatedSnapshots.size()).isEqualTo(1); assertThat(initialSnapshots).containsAll(updatedSnapshots); } @Test public void testExplainRemoveOrphanFilesOutput() { String tableName = "test_remove_orphan_files_output" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertExplain("EXPLAIN ALTER TABLE " + tableName + " EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '0s')", "SimpleTableExecute\\[iceberg:schemaTableName:tpch.test_remove_orphan_files.*\\{retentionThreshold=0\\.00s}.*"); } @Test public void testRemoveOrphanFilesParameterValidation() { assertQueryFails( "ALTER TABLE no_such_table_exists EXECUTE REMOVE_ORPHAN_FILES", "\\Qline 1:1: Table 'iceberg.tpch.no_such_table_exists' does not exist"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33')", "\\QUnable to set catalog 'iceberg' table procedure 'REMOVE_ORPHAN_FILES' property 'retention_threshold' to ['33']: duration is not a valid data duration string: 33"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33mb')", "\\QUnable to set catalog 'iceberg' table procedure 'REMOVE_ORPHAN_FILES' property 'retention_threshold' to ['33mb']: Unknown time unit: mb"); assertQueryFails( "ALTER TABLE nation EXECUTE REMOVE_ORPHAN_FILES (retention_threshold => '33s')", "\\QRetention specified (33.00s) is shorter than the minimum retention configured in the system (7.00d). Minimum retention can be changed with iceberg.remove_orphan_files.min-retention configuration property or iceberg.remove_orphan_files_min_retention session property"); } @Test public void testIfDeletesReturnsNumberOfRemovedRows() { String tableName = "test_delete_returns_number_of_rows_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (key varchar, value integer) WITH (partitioning = ARRAY['key'])"); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 2)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('one', 3)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 1)", 1); assertUpdate("INSERT INTO " + tableName + " VALUES ('two', 2)", 1); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'one'", 3); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'one'"); // TODO change this when iceberg will guarantee to always return this (https://github.com/apache/iceberg/issues/4647) assertUpdate("DELETE FROM " + tableName + " WHERE key = 'three'"); assertUpdate("DELETE FROM " + tableName + " WHERE key = 'two'", 2); } @Test public void testUpdatingFileFormat() { String tableName = "test_updating_file_format_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " WITH (format = 'orc') AS SELECT * FROM nation WHERE nationkey < 10", "SELECT count(*) FROM nation WHERE nationkey < 10"); assertQuery("SELECT value FROM \"" + tableName + "$properties\" WHERE key = 'write.format.default'", "VALUES 'ORC'"); assertUpdate("ALTER TABLE " + tableName + " SET PROPERTIES format = 'parquet'"); assertQuery("SELECT value FROM \"" + tableName + "$properties\" WHERE key = 'write.format.default'", "VALUES 'PARQUET'"); assertUpdate("INSERT INTO " + tableName + " SELECT * FROM nation WHERE nationkey >= 10", "SELECT count(*) FROM nation WHERE nationkey >= 10"); assertQuery("SELECT * FROM " + tableName, "SELECT * FROM nation"); assertQuery("SELECT count(*) FROM \"" + tableName + "$files\" WHERE file_path LIKE '%.orc'", "VALUES 1"); assertQuery("SELECT count(*) FROM \"" + tableName + "$files\" WHERE file_path LIKE '%.parquet'", "VALUES 1"); assertUpdate("DROP TABLE " + tableName); } @Test public void testUpdatingInvalidTableProperty() { String tableName = "test_updating_invalid_table_property_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " (a INT, b INT)"); assertThatThrownBy(() -> query("ALTER TABLE " + tableName + " SET PROPERTIES not_a_valid_table_property = 'a value'")) .hasMessage("Catalog 'iceberg' table property 'not_a_valid_table_property' does not exist"); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyCreateTableAsSelect() { String tableName = "test_empty_ctas_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation WHERE false", 0); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots.size()) .withFailMessage("CTAS operations must create Iceberg snapshot independently whether the selection is empty or not") .isEqualTo(1); assertQueryReturnsEmptyResult("SELECT * FROM " + tableName); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyInsert() { String tableName = "test_empty_insert_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("INSERT INTO " + tableName + " SELECT * FROM nation WHERE false", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("INSERT operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyUpdate() { String tableName = "test_empty_update_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("UPDATE " + tableName + " SET comment = 'new comment' WHERE nationkey IS NULL", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("UPDATE operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } @Test public void testEmptyDelete() { String tableName = "test_empty_delete_" + randomTableSuffix(); assertUpdate("CREATE TABLE " + tableName + " WITH (format = '" + format.name() + "') AS SELECT * FROM nation", "SELECT count(*) FROM nation"); List<Long> initialTableSnapshots = getSnapshotIds(tableName); assertUpdate("DELETE FROM " + tableName + " WHERE nationkey IS NULL", 0); List<Long> updatedTableSnapshots = getSnapshotIds(tableName); assertThat(initialTableSnapshots) .withFailMessage("DELETE operations that are not changing the state of the table must not cause the creation of a new Iceberg snapshot") .hasSize(1) .isEqualTo(updatedTableSnapshots); assertUpdate("DROP TABLE " + tableName); } private Session prepareCleanUpSession() { return Session.builder(getSession()) .setCatalogSessionProperty("iceberg", "expire_snapshots_min_retention", "0s") .setCatalogSessionProperty("iceberg", "remove_orphan_files_min_retention", "0s") .build(); } private List<String> getAllMetadataFilesFromTableDirectoryForTable(String tableName) throws IOException { String schema = getSession().getSchema().orElseThrow(); Path tableDataDir = getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve("metadata"); return listAllTableFilesInDirectory(tableDataDir); } private List<String> getAllMetadataFilesFromTableDirectory(String tableDataDir) throws IOException { return listAllTableFilesInDirectory(Path.of(URI.create(tableDataDir).getPath())); } private List<String> listAllTableFilesInDirectory(Path tableDataPath) throws IOException { try (Stream<Path> walk = Files.walk(tableDataPath)) { return walk .filter(Files::isRegularFile) .filter(path -> !path.getFileName().toString().matches("\\..*\\.crc")) .map(Path::toString) .collect(toImmutableList()); } } private List<Long> getSnapshotIds(String tableName) { return getQueryRunner().execute(format("SELECT snapshot_id FROM \"%s$snapshots\"", tableName)) .getOnlyColumn() .map(Long.class::cast) .collect(toUnmodifiableList()); } private long getCurrentSnapshotId(String tableName) { return (long) computeScalar("SELECT snapshot_id FROM \"" + tableName + "$snapshots\" ORDER BY committed_at DESC LIMIT 1"); } private Path getIcebergTableDataPath(String tableName) { return getIcebergTablePath(tableName, "data"); } private Path getIcebergTableMetadataPath(String tableName) { return getIcebergTablePath(tableName, "metadata"); } private Path getIcebergTablePath(String tableName, String suffix) { String schema = getSession().getSchema().orElseThrow(); return getDistributedQueryRunner().getCoordinator().getBaseDataDir().resolve("iceberg_data").resolve(schema).resolve(tableName).resolve(suffix); } }
Rename test table name `test_bucket_transform` was a copy-paste leftover. This also adds a missing DROP TABLE.
plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
Rename test table name
<ide><path>lugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java <ide> @Test <ide> public void testApplyFilterWithNonEmptyConstraintPredicate() <ide> { <del> assertUpdate("CREATE TABLE test_bucket_transform (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); <add> assertUpdate("CREATE TABLE test_apply_functional_constraint (d VARCHAR, b BIGINT) WITH (partitioning = ARRAY['bucket(d, 2)'])"); <ide> assertUpdate( <del> "INSERT INTO test_bucket_transform VALUES" + <add> "INSERT INTO test_apply_functional_constraint VALUES" + <ide> "('abcd', 1)," + <ide> "('abxy', 2)," + <ide> "('ab598', 3)," + <ide> 7); <ide> <ide> assertQuery( <del> "SELECT * FROM test_bucket_transform WHERE length(d) = 4 AND b % 7 = 2", <add> "SELECT * FROM test_apply_functional_constraint WHERE length(d) = 4 AND b % 7 = 2", <ide> "VALUES ('abxy', 2)"); <add> <add> assertUpdate("DROP TABLE test_apply_functional_constraint"); <ide> } <ide> <ide> @Test
Java
apache-2.0
3a1a95ff3c394af7dc4afd2f6709024ab9b23f41
0
Hipparchus-Math/hipparchus,apache/commons-math,apache/commons-math,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,sdinot/hipparchus,sdinot/hipparchus,sdinot/hipparchus,apache/commons-math,sdinot/hipparchus
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.linear; import org.apache.commons.math.exception.MathIllegalArgumentException; import org.apache.commons.math.fraction.Fraction; import org.apache.commons.math.fraction.FractionField; import org.junit.Assert; import org.junit.Test; public class FieldLUSolverTest { private int[][] testData = { { 1, 2, 3}, { 2, 5, 3}, { 1, 0, 8} }; private int[][] luData = { { 2, 3, 3 }, { 0, 5, 7 }, { 6, 9, 8 } }; // singular matrices private int[][] singular = { { 2, 3 }, { 2, 3 } }; private int[][] bigSingular = { { 1, 2, 3, 4 }, { 2, 5, 3, 4 }, { 7, 3, 256, 1930 }, { 3, 7, 6, 8 } }; // 4th row = 1st + 2nd public static FieldMatrix<Fraction> createFractionMatrix(final int[][] data) { final int numRows = data.length; final int numCols = data[0].length; final Array2DRowFieldMatrix<Fraction> m; m = new Array2DRowFieldMatrix<Fraction>(FractionField.getInstance(), numRows, numCols); for (int i = 0; i < numRows; i++) { for (int j = 0; j < numCols; j++) { m.setEntry(i, j, new Fraction(data[i][j], 1)); } } return m; } /** test singular */ @Test public void testSingular() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); Assert.assertTrue(solver.isNonSingular()); solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(singular)) .getSolver(); Assert.assertFalse(solver.isNonSingular()); solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(bigSingular)) .getSolver(); Assert.assertFalse(solver.isNonSingular()); } /** test solve dimension errors */ @Test public void testSolveDimensionErrors() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); FieldMatrix<Fraction> b = createFractionMatrix(new int[2][2]); try { solver.solve(b); Assert.fail("an exception should have been thrown"); } catch (MathIllegalArgumentException iae) { // expected behavior } try { solver.solve(b.getColumnVector(0)); Assert.fail("an exception should have been thrown"); } catch (MathIllegalArgumentException iae) { // expected behavior } } /** test solve singularity errors */ @Test public void testSolveSingularityErrors() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(singular)) .getSolver(); FieldMatrix<Fraction> b = createFractionMatrix(new int[2][2]); try { solver.solve(b); Assert.fail("an exception should have been thrown"); } catch (SingularMatrixException ime) { // expected behavior } try { solver.solve(b.getColumnVector(0)); Assert.fail("an exception should have been thrown"); } catch (SingularMatrixException ime) { // expected behavior } } /** test solve */ @Test public void testSolve() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); FieldMatrix<Fraction> b = createFractionMatrix(new int[][] { { 1, 0 }, { 2, -5 }, { 3, 1 } }); FieldMatrix<Fraction> xRef = createFractionMatrix(new int[][] { { 19, -71 }, { -6, 22 }, { -2, 9 } }); // using FieldMatrix FieldMatrix<Fraction> x = solver.solve(b); for (int i = 0; i < x.getRowDimension(); i++){ for (int j = 0; j < x.getColumnDimension(); j++){ Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), x.getEntry(i, j)); } } // using ArrayFieldVector for (int j = 0; j < b.getColumnDimension(); j++) { final FieldVector<Fraction> xj = solver.solve(b.getColumnVector(j)); for (int i = 0; i < xj.getDimension(); i++){ Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), xj.getEntry(i)); } } // using SparseFieldVector for (int j = 0; j < b.getColumnDimension(); j++) { final SparseFieldVector<Fraction> bj; bj = new SparseFieldVector<Fraction>(FractionField.getInstance(), b.getColumn(j)); final FieldVector<Fraction> xj = solver.solve(bj); for (int i = 0; i < xj.getDimension(); i++) { Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), xj.getEntry(i)); } } } /** test determinant */ @Test public void testDeterminant() { Assert.assertEquals( -1, getDeterminant(createFractionMatrix(testData)), 1E-15); Assert.assertEquals(-10, getDeterminant(createFractionMatrix(luData)), 1E-14); Assert.assertEquals( 0, getDeterminant(createFractionMatrix(singular)), 1E-15); Assert.assertEquals( 0, getDeterminant(createFractionMatrix(bigSingular)), 1E-15); } private double getDeterminant(final FieldMatrix<Fraction> m) { return new FieldLUDecomposition<Fraction>(m).getDeterminant().doubleValue(); } }
src/test/java/org/apache/commons/math/linear/FieldLUSolverTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.linear; import org.apache.commons.math.exception.MathIllegalArgumentException; import org.apache.commons.math.fraction.Fraction; import org.apache.commons.math.fraction.FractionField; import org.junit.Assert; import org.junit.Test; public class FieldLUSolverTest { private int[][] testData = { { 1, 2, 3}, { 2, 5, 3}, { 1, 0, 8} }; private int[][] luData = { { 2, 3, 3 }, { 0, 5, 7 }, { 6, 9, 8 } }; // singular matrices private int[][] singular = { { 2, 3 }, { 2, 3 } }; private int[][] bigSingular = { { 1, 2, 3, 4 }, { 2, 5, 3, 4 }, { 7, 3, 256, 1930 }, { 3, 7, 6, 8 } }; // 4th row = 1st + 2nd public static FieldMatrix<Fraction> createFractionMatrix(final int[][] data) { final int numRows = data.length; final int numCols = data[0].length; final Array2DRowFieldMatrix<Fraction> m; m = new Array2DRowFieldMatrix<Fraction>(FractionField.getInstance(), numRows, numCols); for (int i = 0; i < numRows; i++) { for (int j = 0; j < numCols; j++) { m.setEntry(i, j, new Fraction(data[i][j], 1)); } } return m; } /** test singular */ @Test public void testSingular() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); Assert.assertTrue(solver.isNonSingular()); solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(singular)) .getSolver(); Assert.assertFalse(solver.isNonSingular()); solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(bigSingular)) .getSolver(); Assert.assertFalse(solver.isNonSingular()); } /** test solve dimension errors */ @Test public void testSolveDimensionErrors() { FieldDecompositionSolver<Fraction> solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); FieldMatrix<Fraction> b = createFractionMatrix(new int[2][2]); try { solver.solve(b); Assert.fail("an exception should have been thrown"); } catch (MathIllegalArgumentException iae) { // expected behavior } try { solver.solve(b.getColumnVector(0)); Assert.fail("an exception should have been thrown"); } catch (MathIllegalArgumentException iae) { // expected behavior } } /** test solve singularity errors */ @Test public void testSolveSingularityErrors() { FieldDecompositionSolver solver; solver = new FieldLUDecomposition(createFractionMatrix(singular)) .getSolver(); FieldMatrix b = createFractionMatrix(new int[2][2]); try { solver.solve(b); Assert.fail("an exception should have been thrown"); } catch (SingularMatrixException ime) { // expected behavior } try { solver.solve(b.getColumnVector(0)); Assert.fail("an exception should have been thrown"); } catch (SingularMatrixException ime) { // expected behavior } } /** test solve */ @Test public void testSolve() { FieldDecompositionSolver solver; solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) .getSolver(); FieldMatrix<Fraction> b = createFractionMatrix(new int[][] { { 1, 0 }, { 2, -5 }, { 3, 1 } }); FieldMatrix<Fraction> xRef = createFractionMatrix(new int[][] { { 19, -71 }, { -6, 22 }, { -2, 9 } }); // using FieldMatrix FieldMatrix<Fraction> x = solver.solve(b); for (int i = 0; i < x.getRowDimension(); i++){ for (int j = 0; j < x.getColumnDimension(); j++){ Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), x.getEntry(i, j)); } } // using ArrayFieldVector for (int j = 0; j < b.getColumnDimension(); j++) { final FieldVector<Fraction> xj = solver.solve(b.getColumnVector(j)); for (int i = 0; i < xj.getDimension(); i++){ Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), xj.getEntry(i)); } } // using SparseFieldVector for (int j = 0; j < b.getColumnDimension(); j++) { final SparseFieldVector<Fraction> bj; bj = new SparseFieldVector<Fraction>(FractionField.getInstance(), b.getColumn(j)); final FieldVector<Fraction> xj = solver.solve(bj); for (int i = 0; i < xj.getDimension(); i++) { Assert.assertEquals("(" + i + ", " + j + ")", xRef.getEntry(i, j), xj.getEntry(i)); } } } /** test determinant */ @Test public void testDeterminant() { Assert.assertEquals( -1, getDeterminant(createFractionMatrix(testData)), 1E-15); Assert.assertEquals(-10, getDeterminant(createFractionMatrix(luData)), 1E-14); Assert.assertEquals( 0, getDeterminant(createFractionMatrix(singular)), 1E-15); Assert.assertEquals( 0, getDeterminant(createFractionMatrix(bigSingular)), 1E-15); } private double getDeterminant(final FieldMatrix<Fraction> m) { return new FieldLUDecomposition<Fraction>(m).getDeterminant().doubleValue(); } }
Fix generics git-svn-id: 80d496c472b8b763a5e941dba212da9bf48aeceb@1197488 13f79535-47bb-0310-9956-ffa450edef68
src/test/java/org/apache/commons/math/linear/FieldLUSolverTest.java
Fix generics
<ide><path>rc/test/java/org/apache/commons/math/linear/FieldLUSolverTest.java <ide> /** test solve singularity errors */ <ide> @Test <ide> public void testSolveSingularityErrors() { <del> FieldDecompositionSolver solver; <del> solver = new FieldLUDecomposition(createFractionMatrix(singular)) <add> FieldDecompositionSolver<Fraction> solver; <add> solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(singular)) <ide> .getSolver(); <del> FieldMatrix b = createFractionMatrix(new int[2][2]); <add> FieldMatrix<Fraction> b = createFractionMatrix(new int[2][2]); <ide> try { <ide> solver.solve(b); <ide> Assert.fail("an exception should have been thrown"); <ide> /** test solve */ <ide> @Test <ide> public void testSolve() { <del> FieldDecompositionSolver solver; <add> FieldDecompositionSolver<Fraction> solver; <ide> solver = new FieldLUDecomposition<Fraction>(createFractionMatrix(testData)) <ide> .getSolver(); <ide> FieldMatrix<Fraction> b = createFractionMatrix(new int[][] {
Java
apache-2.0
2062c304985fc7aa62d5a14f6ebdff7fd4b78d32
0
jzachr/goldenorb,jzachr/goldenorb,jzachr/goldenorb
/** * Licensed to Ravel, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Ravel, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.goldenorb; import java.io.IOException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.ReflectionUtils; import org.apache.zookeeper.ZooKeeper; import org.goldenorb.conf.OrbConfiguration; import org.goldenorb.event.OrbCallback; import org.goldenorb.event.OrbEvent; import org.goldenorb.event.OrbExceptionEvent; import org.goldenorb.io.InputSplitAllocator; import org.goldenorb.io.input.RawSplit; import org.goldenorb.io.input.VertexBuilder; import org.goldenorb.io.output.OrbContext; import org.goldenorb.io.output.VertexWriter; import org.goldenorb.jet.OrbPartitionMember; import org.goldenorb.net.OrbDNS; import org.goldenorb.queue.InboundMessageQueue; import org.goldenorb.queue.OutboundMessageQueue; import org.goldenorb.queue.OutboundVertexQueue; import org.goldenorb.zookeeper.AllDoneBarrier; import org.goldenorb.zookeeper.Barrier; import org.goldenorb.zookeeper.LeaderGroup; import org.goldenorb.zookeeper.OrbFastAllDoneBarrier; import org.goldenorb.zookeeper.OrbFastBarrier; import org.goldenorb.zookeeper.OrbZKFailure; import org.goldenorb.zookeeper.ZookeeperUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * OrbPartition, spawned from via {@link OrbPartitionProcess}, is responsible for loading * input data, assigning file splits to other {@link OrbPartition} processes and coordinating with * other {@link OrbPartition} processes via the exchange of {@link Messages} and {@link Vertices}. * In addition to start up and coordination, {@link OrbPartition} processes are run responsible for * stepping through the graph algorithms themselves, via the compute method. */ public class OrbPartition extends OrbPartitionMember implements Runnable, OrbPartitionCommunicationProtocol, OrbPartitionManagerProtocol { private final static int PARTITION_JOIN_TIMEOUT = 60000; private static Logger LOG = LoggerFactory.getLogger(OrbPartition.class); /** * The unique identifier for this partition. */ private String jobPath; private String jobInProgressPath; private ZooKeeper zk; private boolean standby; private boolean waitingForAllToJoin = true; private LeaderGroup<OrbPartitionMember> leaderGroup; private Server interpartitionCommunicationServer; private Server trackerTetherCommunicationServer; private int interpartitionCommunicationPort; private int trackerTetherCommunicationPort; // private String hostname; private boolean runPartition; private boolean loadedVerticesComplete = false; private Set<InputSplitLoaderHandler> inputSplitLoaderHandlers = Collections.synchronizedSet(new HashSet<InputSplitLoaderHandler>()); private Set<MessagesHandler> messagesHandlers = Collections.synchronizedSet(new HashSet<MessagesHandler>()); private Set<LoadVerticesHandler> loadVerticesHandlers = Collections.synchronizedSet(new HashSet<LoadVerticesHandler>()); private InboundMessageQueue currentInboundMessageQueue; private InboundMessageQueue processingInboundMessageQueue; private OutboundMessageQueue outboundMessageQueue; private VoteToHaltSet processingVoteToHaltSet; private boolean hasMoreToProcess = true; private boolean computing = true; private ExecutorService inputSplitHandlerExecutor; private ExecutorService verticesLoaderHandlerExecutor; private ExecutorService messageHandlerExecutor; private ExecutorService computeExecutor; private Map<String,Vertex<?,?,?>> vertices = new HashMap<String, Vertex<?,?,?>>(); private OrbCommunicationInterface oci = new OrbCommunicationInterface(); Map<Integer,OrbPartitionCommunicationProtocol> orbClients; /** * Constructor * * @param String * jobNumber * @param int partitionID * @param boolean standby * @param int partitionBasePort */ public OrbPartition(String jobNumber, int partitionID, boolean standby, int partitionBasePort) { this.setOrbConf(new OrbConfiguration(true)); this.standby = standby; interpartitionCommunicationPort = partitionBasePort; trackerTetherCommunicationPort = partitionBasePort + 100; jobPath = "/GoldenOrb/" + getOrbConf().getOrbClusterName() + "/JobQueue/" + jobNumber; jobInProgressPath = "/GoldenOrb/" + getOrbConf().getOrbClusterName() + "/JobsInProgress/" + jobNumber; setPartitionID(partitionID); LOG.debug("Starting for job {}", jobInProgressPath); inputSplitHandlerExecutor = Executors.newFixedThreadPool(getOrbConf().getInputSplitHandlerThreads()); messageHandlerExecutor = Executors.newFixedThreadPool(getOrbConf().getMessageHandlerThreads()); computeExecutor = Executors.newFixedThreadPool(getOrbConf().getComputeThreads()); verticesLoaderHandlerExecutor = Executors.newFixedThreadPool(getOrbConf() .getVerticesLoaderHandlerThreads()); try { zk = ZookeeperUtils.connect(getOrbConf().getOrbZooKeeperQuorum()); } catch (Exception e) { LOG.error("Unable to establish a connection with ZooKeeper" + getOrbConf().getOrbZooKeeperQuorum(), e); System.exit(-1); } OrbConfiguration jobConf = null; try { jobConf = (OrbConfiguration) ZookeeperUtils.getNodeWritable(zk, jobPath, OrbConfiguration.class, getOrbConf()); } catch (OrbZKFailure e) { LOG.error("Unable to retrieve job from ZooKeeper: " + jobPath, e); System.exit(-1); } if (jobConf != null) { setOrbConf(jobConf); getOrbConf().setJobNumber(jobNumber); LOG.debug("setOrbConf with requested, reserved", jobConf.getOrbRequestedPartitions(), jobConf.getOrbReservedPartitions()); } setSuperStep(0); setNumberOfVertices(0); setMessagesSent(0); setPercentComplete(0.0F); setLeader(false); } /** * * @param String * [] args */ public static void main(String[] args) { if (args.length != 4) { LOG.error("OrbPartition cannot start unless it is passed both the partitionID and the jobNumber to the Jobs OrbConfiguration"); } LOG.debug("OrbPartition starting with args: {}", Arrays.toString(args)); String jobNumber = args[0]; int partitionID = Integer.parseInt(args[1]); boolean standby = Boolean.parseBoolean(args[2]); int partitionBasePort = Integer.parseInt(args[3]); new Thread(new OrbPartition(jobNumber, partitionID, standby, partitionBasePort)).start(); } /** * */ @Override public void run() { try { setHostname(OrbDNS.getDefaultHost(getOrbConf())); setPort(trackerTetherCommunicationPort); } catch (UnknownHostException e) { LOG.error("Unable to get hostname.", e); System.exit(-1); } try { // TODO make this use the configuration to set this up interpartitionCommunicationServer = RPC.getServer(this, getHostname(), this.interpartitionCommunicationPort, 10, false, getOrbConf()); interpartitionCommunicationServer.start(); LOG.info("Starting OrbPartition Interpartition Communication Server on: " + getHostname() + ":" + this.interpartitionCommunicationPort); } catch (IOException e) { LOG.error("Failed to start OrbPartition Interpartition Communication server!!", e); e.printStackTrace(); System.exit(-1); } try { trackerTetherCommunicationServer = RPC.getServer(this, getHostname(), this.trackerTetherCommunicationPort, getOrbConf()); trackerTetherCommunicationServer.start(); LOG.info("Starting OrbPartition Tracker Tether Communication Server on: " + getHostname() + ":" + this.trackerTetherCommunicationPort); } catch (IOException e) { LOG.error("Failed to start Tracker Tether Communcation server!!", e); e.printStackTrace(); System.exit(-1); } leaderGroup = new LeaderGroup<OrbPartitionMember>(zk, new OrbPartitionCallback(), jobInProgressPath + "/OrbPartitionLeaderGroup", this, OrbPartitionMember.class); LOG.debug("leaderGroup member paths {}", leaderGroup.getMembersPath().toString()); LOG.debug("requested {}, reserved {}", getOrbConf().getOrbRequestedPartitions(), getOrbConf().getOrbReservedPartitions()); synchronized (this) { while (leaderGroup.getNumOfMembers() < (getOrbConf().getOrbRequestedPartitions() + getOrbConf() .getOrbReservedPartitions())) { try { LOG.debug("partition {} is waiting", getPartitionID()); wait(PARTITION_JOIN_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } } } initializeOrbClients(); if (leaderGroup.isLeader()) { executeAsLeader(); } else { executeAsSlave(); } } /** * */ private void initializeOrbClients() { orbClients = new HashMap<Integer,OrbPartitionCommunicationProtocol>(); for (OrbPartitionMember orbPartitionMember : leaderGroup.getMembers()) { try { orbPartitionMember.initProxy(getOrbConf()); LOG.debug("partition {} proxy initialized", getPartitionID()); } catch (IOException e) { // TODO This is a significant error and should start the killing of the partition e.printStackTrace(); } orbClients.put(orbPartitionMember.getPartitionID(), orbPartitionMember); } } /** * */ private void executeAsSlave() { if (standby) { waitForActivate(); } synchronized (this) { setLeader(false); if (!loadedVerticesComplete) { loadVerticesSlave(); } } waitLoop(); } /** * */ private void executeAsLeader() { synchronized (this) { setLeader(true); new Thread(new HeartbeatGenerator()).start(); if (!loadedVerticesComplete) { loadVerticesLeader(); } } waitLoop(); } private void waitForActivate() { synchronized (this) { while (standby) { try { this.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } // TODO need to have separate code for if it becomes active as a leader or as a slave } } /** * */ private void loadVerticesSlave() { enterBarrier("startLoadVerticesBarrier"); // since we are a slave we immediately jump into this barrier enterBarrier("sentInputSplitsBarrier"); // here we are handling our InputSplits by loading and sending vertices while (!inputSplitLoaderHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("inputSplitHandlersCompleteBarrier"); // here we are handling all of the vertices that have been sent to us, and are loading them into vertices while (!loadVerticesHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } enterBarrier("loadVerticesIntoPartitionBarrier"); LOG.info("Partition " + getPartitionID() + " completed Loading vertices!!!"); process(); // try { // ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); // } catch (OrbZKFailure e) { // e.printStackTrace(); // } // System.exit(1); } /** * This is where the core processing of the vertices -- and runnning of the algorithm lives. */ private void process() { while (computing) { step(); compute(); LOG.info("Partition " + getPartitionID() + " back in run portion " + Integer.toString(getSuperStep())); } } public void compute() { if (getSuperStep() == 1) { enterBarrier("superStep1Barrier"); processingVoteToHaltSet = new VoteToHaltSet(vertices.keySet()); int count = 0; List<Vertex<?,?,?>> vertexList = new ArrayList<Vertex<?,?,?>>(); List<List<Message<? extends Writable>>> messageList = new ArrayList<List<Message<? extends Writable>>>(); int verticesLeft = vertices.keySet().size(); for (Vertex<?,?,?> v : vertices.values()) { // count += 1; // verticesLeft -= 1; // vertexList.add(v); // messageList.add(new ArrayList<Message<? extends Writable>>()); // // if (count >= getOrbConf().getVerticesPerBlock() || verticesLeft == 0) { // computeExecutor.execute(new VertexComputer(vertexList, messageList)); // vertexList = new ArrayList<Vertex<?,?,?>>(); // messageList = new ArrayList<List<Message<? extends Writable>>>(); // count = 0; // } v.compute(new ArrayList()); } synchronized (this) { while (!processingVoteToHaltSet.isEmpty()) { try { wait(1000); LOG.debug(Integer.toString(processingVoteToHaltSet.size())); } catch (InterruptedException e) { e.printStackTrace(); } } } } else { if (processingInboundMessageQueue.getVerticesWithMessages().size() == 0) { hasMoreToProcess = false; if (enterAllDoneBarrier("superStepBarrier", getSuperStep(), true)) { doneComputing(); } } else { enterAllDoneBarrier("superStepBarrier", getSuperStep(), false); int count = 0; List<Vertex<?,?,?>> vertexList = new ArrayList<Vertex<?,?,?>>(); List<List<Message<? extends Writable>>> messageList = new ArrayList<List<Message<? extends Writable>>>(); int verticesLeft = processingInboundMessageQueue.getVerticesWithMessages().size(); for (String s : processingInboundMessageQueue.getVerticesWithMessages()) { // count += 1; // verticesLeft -= 1; // vertexList.add(vertices.get(s)); // messageList.add(processingInboundMessageQueue.getMessage(s)); // // if (count >= getOrbConf().getVerticesPerBlock() || verticesLeft == 0) { // computeExecutor.execute(new VertexComputer(vertexList, messageList)); // vertexList = new ArrayList<Vertex<?,?,?>>(); // messageList = new ArrayList<List<Message<? extends Writable>>>(); // count = 0; // } vertices.get(s).compute((Collection)processingInboundMessageQueue.getMessage(s)); } synchronized (this) { while (!processingVoteToHaltSet.isEmpty()) { try { wait(10000); } catch (InterruptedException e) { e.printStackTrace(); } LOG.debug(Integer.toString(processingVoteToHaltSet.size())); } } } } enterBarrier("doneComputingVerticesBarrier", getSuperStep()); outboundMessageQueue.sendRemainingMessages(); enterBarrier("doneSendingMessagesBarrier", getSuperStep()); LOG.info("Partition " + getPartitionID() + " going back to run portion " + Integer.toString(getSuperStep())); } private void doneComputing() { computing = false; LOG.info("Partition: (" + Integer.toString(getPartitionID()) + ") Done computing!!!!!!"); dumpData(); enterBarrier("doneDumpingDataBarrier"); try { ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); } catch (OrbZKFailure e) { e.printStackTrace(); } System.exit(1); } private void dumpData() { // TODO Auto-generated method stub Configuration conf = new Configuration(); Job job = null; JobContext jobContext = null; TaskAttemptContext tao = null; RecordWriter rw; VertexWriter vw; FileOutputFormat outputFormat; boolean tryAgain = true; int count = 0; while (tryAgain && count < 3) try { count++; tryAgain = false; if (job == null) { job = new Job(conf); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(new String(getOrbConf().getNameNode() + getOrbConf().getFileOutputPath()))); } if (jobContext == null) { jobContext = new JobContext(job.getConfiguration(), new JobID()); } System.out.println(jobContext.getConfiguration().get("mapred.output.dir")); tao = new TaskAttemptContext(jobContext.getConfiguration(), new TaskAttemptID(new TaskID( jobContext.getJobID(), true, getPartitionID()), 0)); outputFormat = (FileOutputFormat) tao.getOutputFormatClass().newInstance(); rw = outputFormat.getRecordWriter(tao); vw = (VertexWriter) getOrbConf().getVertexOutputFormatClass().newInstance(); for (Vertex v : vertices.values()) { OrbContext oc = vw.vertexWrite(v); rw.write(oc.getKey(), oc.getValue()); // orbLogger.info("Partition: " + Integer.toString(partitionId) + "writing: " + // oc.getKey().toString() + ", " + oc.getValue().toString()); } rw.close(tao); FileOutputCommitter cm = (FileOutputCommitter) outputFormat.getOutputCommitter(tao); if (cm.needsTaskCommit(tao)) { cm.commitTask(tao); cm.cleanupJob(jobContext); } else { cm.cleanupJob(jobContext); tryAgain = true; } } catch (IOException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (InterruptedException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } if (tryAgain) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } } class VertexComputer implements Runnable { private List<Vertex<?,?,?>> vertexList; private List<List<Message<? extends Writable>>> messageList; VertexComputer(List<Vertex<?,?,?>> vertexList, List<List<Message<? extends Writable>>> messageList) { this.vertexList = vertexList; this.messageList = messageList; } public void run() { for (int i = 0; i < vertexList.size(); i++) { vertexList.get(i).compute((Collection) messageList.get(i)); } synchronized (OrbPartition.this) { if (processingVoteToHaltSet.isEmpty()) { OrbPartition.this.notify(); } } } } public void step() { synchronized (this) { while (!messagesHandlers.isEmpty()) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } enterBarrier("messageHandlersDoneReceivingBarrier", getSuperStep()); processingInboundMessageQueue = currentInboundMessageQueue; currentInboundMessageQueue = new InboundMessageQueue(); try { outboundMessageQueue = new OutboundMessageQueue(getOrbConf().getOrbRequestedPartitions(), getOrbConf().getMessagesPerBlock(), orbClients, (Class<? extends Message<? extends Writable>>) getOrbConf().getMessageClass(), getPartitionID()); } catch (ClassNotFoundException e) { e.printStackTrace(); throw new RuntimeException(e); } setSuperStep(getSuperStep() + 1); LOG.info("********Starting SuperStep " + getSuperStep() + "Partition: " + getPartitionID() + " *********"); if (getSuperStep() > 1) { processingVoteToHaltSet = new VoteToHaltSet(processingInboundMessageQueue.getVerticesWithMessages()); } } } /** * */ private void loadVerticesLeader() { enterBarrier("startLoadVerticesBarrier"); // Here InputSplits are sent to their constituent partitions for loading InputSplitAllocator inputSplitAllocator = new InputSplitAllocator(getOrbConf(), leaderGroup.getMembers()); Map<OrbPartitionMember,List<RawSplit>> inputSplitAssignments = inputSplitAllocator.assignInputSplits(); for (OrbPartitionMember orbPartitionMember : inputSplitAssignments.keySet()) { for (RawSplit rawSplit : inputSplitAssignments.get(orbPartitionMember)) { orbPartitionMember.loadVerticesFromInputSplit(rawSplit); } } enterBarrier("sentInputSplitsBarrier"); // just like the slave we have to wait for the InputSplitHandlers to finish loading and sending vertices while (!inputSplitLoaderHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("inputSplitHandlersCompleteBarrier"); // just like the slave here we are handling all of the vertices that have been sent to us, and are loading // them into vertices while (!loadVerticesHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("loadVerticesIntoPartitionBarrier"); LOG.debug("Completed Loading vertices!!!"); if (standby) { waitForActivate(); } process(); // try { // ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); // } catch (OrbZKFailure e) { // e.printStackTrace(); // } // System.exit(1); } /** * */ private void waitLoop() { while (runPartition) { synchronized (this) { try { wait(); } catch (InterruptedException e) { LOG.error(e.getMessage()); } } if ((leaderGroup.isLeader() && !isLeader()) || (!leaderGroup.isLeader() && isLeader())){ if (leaderGroup.isLeader()) { executeAsLeader(); } else { executeAsSlave(); } } } } private class OrbPartitionCallback implements OrbCallback { /** * * @param OrbEvent * e */ @Override public void process(OrbEvent e) { int eventCode = e.getType(); if (eventCode == OrbEvent.ORB_EXCEPTION) { ((OrbExceptionEvent) e).getException().printStackTrace(); } else if (eventCode == OrbEvent.LEADERSHIP_CHANGE) { synchronized (OrbPartition.this) { if ((leaderGroup.isLeader() && !isLeader()) || (!leaderGroup.isLeader() && isLeader())) { OrbPartition.this.notify(); } } } else if (eventCode == OrbEvent.NEW_MEMBER) { synchronized (OrbPartition.this) { if (waitingForAllToJoin) { OrbPartition.this.notify(); } } } } } public class OrbCommunicationInterface { /** * * @returns int */ public int superStep() { return getSuperStep(); } /** * * @param String * vertexID */ public void voteToHalt(String vertexID) { processingVoteToHaltSet.voteToHalt(vertexID); } /** * * @param Message * <? extends Writable> message */ public void sendMessage(Message<? extends Writable> message) { OrbPartition.this.outboundMessageQueue.sendMessage(message); } } /** * Return the protocolVersion */ @Override public long getProtocolVersion(String arg0, long arg1) throws IOException { return 0L; } /** * * @returns int */ @Override public int stop() { // TODO Shutdown stuff return 0; } /** * Return the unning */ @Override public boolean isRunning() { // TODO what constitutes that it is no longer running? return true; } /** * * @param Messages * messages */ @Override public void sendMessages(Messages messages) { MessagesHandler messagesHandler = new MessagesHandler(messages); messagesHandlers.add(messagesHandler); messageHandlerExecutor.execute(messagesHandler); } class MessagesHandler implements Runnable { private Messages messages; MessagesHandler(Messages messages) { this.messages = messages; } public void run() { synchronized (currentInboundMessageQueue) { currentInboundMessageQueue.addMessages(messages); synchronized (OrbPartition.this) { messagesHandlers.remove(this); LOG.info("Partition " + getPartitionID() + " " + OrbPartition.this + " messagesHandlerNotifying Parent " + Integer.toString(getSuperStep())); OrbPartition.this.notify(); } } } } /** * * @param Vertices * vertices */ @Override public void sendVertices(Vertices vertices) { LoadVerticesHandler loadVerticesHandler = new LoadVerticesHandler(vertices, this); loadVerticesHandlers.add(loadVerticesHandler); verticesLoaderHandlerExecutor.execute(loadVerticesHandler); } class LoadVerticesHandler implements Runnable { private Vertices vertices; /** * Constructor * * @param Vertices * vertices * @param OrbPartition * orbPartition */ public LoadVerticesHandler(Vertices vertices, OrbPartition orbPartition) { this.vertices = vertices; } /** * */ public void run() { synchronized (vertices) { for (Vertex<?,?,?> vertex : vertices.getArrayList()) { vertex.setOci(oci); OrbPartition.this.vertices.put(vertex.getVertexID(), vertex); } LOG.info("( Partition: " + Integer.toString(getPartitionID()) + ") Loaded " + vertices.size() + " vertices."); } loadVerticesHandlers.remove(this); synchronized (OrbPartition.this) { OrbPartition.this.notify(); } } } /** * * @param int partitionID */ @Override public void becomeActive(int partitionID) { if (standby) { setPartitionID(partitionID); standby = false; synchronized (this) { notify(); } } } /** * * @param RawSplit * rawsplit */ @Override public void loadVerticesFromInputSplit(RawSplit rawsplit) { InputSplitLoaderHandler inputSplitLoaderHandler = new InputSplitLoaderHandler(rawsplit); inputSplitLoaderHandlers.add(inputSplitLoaderHandler); inputSplitHandlerExecutor.execute(inputSplitLoaderHandler); } class InputSplitLoaderHandler implements Runnable { private RawSplit rawsplit; /** * Constructor * * @param RawSplit * rawsplit */ public InputSplitLoaderHandler(RawSplit rawsplit) { this.rawsplit = rawsplit; } /** * */ @SuppressWarnings("unchecked") @Override public void run() { OutboundVertexQueue outboundVertexQueue; outboundVertexQueue = new OutboundVertexQueue(getOrbConf().getOrbRequestedPartitions(), getOrbConf() .getVerticesPerBlock(), orbClients, (Class<? extends Vertex<?,?,?>>) getOrbConf().getVertexClass(), getPartitionID()); LOG.info("Loading on machine " + getHostname() + ":" + interpartitionCommunicationPort); VertexBuilder<?,?,?> vertexBuilder = ReflectionUtils.newInstance(getOrbConf() .getVertexInputFormatClass(), getOrbConf()); vertexBuilder.setOrbConf(getOrbConf()); vertexBuilder.setPartitionID(getPartitionID()); vertexBuilder.setRawSplit(rawsplit.getBytes()); vertexBuilder.setSplitClass(rawsplit.getClassName()); vertexBuilder.initialize(); try { while (vertexBuilder.nextVertex()) { outboundVertexQueue.sendVertex(vertexBuilder.getCurrentVertex()); } } catch (IOException e) { // TODO Data loading failed --- needs to fire a death event. e.printStackTrace(); } catch (InterruptedException e) { // TODO Data loading failed --- needs to fire a death event. e.printStackTrace(); } outboundVertexQueue.sendRemainingVertices(); inputSplitLoaderHandlers.remove(this); synchronized (OrbPartition.this) { OrbPartition.this.notify(); } } } private class HeartbeatGenerator implements Runnable, Killable { private boolean active = true; private Long heartbeat = 1L; /** * */ @Override public void run() { while (active) { synchronized (this) { try { wait((getOrbConf().getJobHeartbeatTimeout() / 10)); try { ZookeeperUtils.existsUpdateNodeData(zk, jobInProgressPath + "/messages/heartbeat", new LongWritable(heartbeat++)); LOG.debug("Creating heartbeat for: " + jobInProgressPath + "/messages/heartbeat" + " heartbeat is: " + heartbeat); } catch (OrbZKFailure e) { e.printStackTrace(); } } catch (InterruptedException e) { e.printStackTrace(); } } } } /** * */ @Override public void kill() { active = false; } /** * */ @Override public void restart() { active = true; } } /** * * @param String * barrierName * @param int superStep */ private void enterBarrier(String barrierName, int superStep) { enterBarrier(barrierName + Integer.toString(superStep)); } /** * * @param String * barrierName */ private void enterBarrier(String barrierName) { LOG.debug("creating barrier {}", barrierName); Barrier barrier = new OrbFastBarrier(getOrbConf(), jobInProgressPath + "/" + barrierName, leaderGroup.getNumOfMembers(), Integer.toString(getPartitionID()), zk); try { barrier.enter(); LOG.debug("{} entered " + getPartitionID(), barrierName); } catch (OrbZKFailure e) { LOG.error("Failed to complete barrier: " + barrierName, e); e.printStackTrace(); } } /** * @param boolean iAmDone * @param String * barrierName * @param int superStep * @return */ private boolean enterAllDoneBarrier(String barrierName, int superStep, boolean iAmDone) { return enterAllDoneBarrier(barrierName + Integer.toString(superStep), iAmDone); } /** * @param boolean iAmDone * @param String * barrierName */ private boolean enterAllDoneBarrier(String barrierName, boolean iAmDone) { AllDoneBarrier barrier = new OrbFastAllDoneBarrier(getOrbConf(), jobInProgressPath + "/" + barrierName, leaderGroup.getNumOfMembers(), Integer.toString(getPartitionID()), zk); try { return barrier.enter(iAmDone); } catch (OrbZKFailure e) { LOG.error("Failed to complete barrier: " + barrierName, e); e.printStackTrace(); } return false; } }
src/main/java/org/goldenorb/OrbPartition.java
/** * Licensed to Ravel, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Ravel, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.goldenorb; import java.io.IOException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.ReflectionUtils; import org.apache.zookeeper.ZooKeeper; import org.goldenorb.conf.OrbConfiguration; import org.goldenorb.event.OrbCallback; import org.goldenorb.event.OrbEvent; import org.goldenorb.event.OrbExceptionEvent; import org.goldenorb.io.InputSplitAllocator; import org.goldenorb.io.input.RawSplit; import org.goldenorb.io.input.VertexBuilder; import org.goldenorb.io.output.OrbContext; import org.goldenorb.io.output.VertexWriter; import org.goldenorb.jet.OrbPartitionMember; import org.goldenorb.net.OrbDNS; import org.goldenorb.queue.InboundMessageQueue; import org.goldenorb.queue.OutboundMessageQueue; import org.goldenorb.queue.OutboundVertexQueue; import org.goldenorb.zookeeper.AllDoneBarrier; import org.goldenorb.zookeeper.Barrier; import org.goldenorb.zookeeper.LeaderGroup; import org.goldenorb.zookeeper.OrbFastAllDoneBarrier; import org.goldenorb.zookeeper.OrbFastBarrier; import org.goldenorb.zookeeper.OrbZKFailure; import org.goldenorb.zookeeper.ZookeeperUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * OrbPartition, spawned from via {@link OrbPartitionProcess}, is responsible for loading * input data, assigning file splits to other {@link OrbPartition} processes and coordinating with * other {@link OrbPartition} processes via the exchange of {@link Messages} and {@link Vertices}. * In addition to start up and coordination, {@link OrbPartition} processes are run responsible for * stepping through the graph algorithms themselves, via the compute method. */ public class OrbPartition extends OrbPartitionMember implements Runnable, OrbPartitionCommunicationProtocol, OrbPartitionManagerProtocol { private final static int PARTITION_JOIN_TIMEOUT = 60000; private static Logger LOG = LoggerFactory.getLogger(OrbPartition.class); /** * The unique identifier for this partition. */ private String jobPath; private String jobInProgressPath; private ZooKeeper zk; private boolean standby; private boolean waitingForAllToJoin = true; private LeaderGroup<OrbPartitionMember> leaderGroup; private Server interpartitionCommunicationServer; private Server trackerTetherCommunicationServer; private int interpartitionCommunicationPort; private int trackerTetherCommunicationPort; // private String hostname; private boolean runPartition; private boolean loadedVerticesComplete = false; private Set<InputSplitLoaderHandler> inputSplitLoaderHandlers = Collections.synchronizedSet(new HashSet<InputSplitLoaderHandler>()); private Set<MessagesHandler> messagesHandlers = Collections.synchronizedSet(new HashSet<MessagesHandler>()); private Set<LoadVerticesHandler> loadVerticesHandlers = Collections.synchronizedSet(new HashSet<LoadVerticesHandler>()); private InboundMessageQueue currentInboundMessageQueue; private InboundMessageQueue processingInboundMessageQueue; private OutboundMessageQueue outboundMessageQueue; private VoteToHaltSet processingVoteToHaltSet; private boolean hasMoreToProcess = true; private boolean computing = true; private ExecutorService inputSplitHandlerExecutor; private ExecutorService verticesLoaderHandlerExecutor; private ExecutorService messageHandlerExecutor; private ExecutorService computeExecutor; private Map<String,Vertex<?,?,?>> vertices = new HashMap<String, Vertex<?,?,?>>(); private OrbCommunicationInterface oci = new OrbCommunicationInterface(); Map<Integer,OrbPartitionCommunicationProtocol> orbClients; /** * Constructor * * @param String * jobNumber * @param int partitionID * @param boolean standby * @param int partitionBasePort */ public OrbPartition(String jobNumber, int partitionID, boolean standby, int partitionBasePort) { this.setOrbConf(new OrbConfiguration(true)); this.standby = standby; interpartitionCommunicationPort = partitionBasePort; trackerTetherCommunicationPort = partitionBasePort + 100; jobPath = "/GoldenOrb/" + getOrbConf().getOrbClusterName() + "/JobQueue/" + jobNumber; jobInProgressPath = "/GoldenOrb/" + getOrbConf().getOrbClusterName() + "/JobsInProgress/" + jobNumber; setPartitionID(partitionID); LOG.debug("Starting for job {}", jobInProgressPath); inputSplitHandlerExecutor = Executors.newFixedThreadPool(getOrbConf().getInputSplitHandlerThreads()); messageHandlerExecutor = Executors.newFixedThreadPool(getOrbConf().getMessageHandlerThreads()); computeExecutor = Executors.newFixedThreadPool(getOrbConf().getComputeThreads()); verticesLoaderHandlerExecutor = Executors.newFixedThreadPool(getOrbConf() .getVerticesLoaderHandlerThreads()); try { zk = ZookeeperUtils.connect(getOrbConf().getOrbZooKeeperQuorum()); } catch (Exception e) { LOG.error("Unable to establish a connection with ZooKeeper" + getOrbConf().getOrbZooKeeperQuorum(), e); System.exit(-1); } OrbConfiguration jobConf = null; try { jobConf = (OrbConfiguration) ZookeeperUtils.getNodeWritable(zk, jobPath, OrbConfiguration.class, getOrbConf()); } catch (OrbZKFailure e) { LOG.error("Unable to retrieve job from ZooKeeper: " + jobPath, e); System.exit(-1); } if (jobConf != null) { setOrbConf(jobConf); getOrbConf().setJobNumber(jobNumber); LOG.debug("setOrbConf with requested, reserved", jobConf.getOrbRequestedPartitions(), jobConf.getOrbReservedPartitions()); } setSuperStep(0); setNumberOfVertices(0); setMessagesSent(0); setPercentComplete(0.0F); setLeader(false); } /** * * @param String * [] args */ public static void main(String[] args) { if (args.length != 4) { LOG.error("OrbPartition cannot start unless it is passed both the partitionID and the jobNumber to the Jobs OrbConfiguration"); } LOG.debug("OrbPartition starting with args: {}", Arrays.toString(args)); String jobNumber = args[0]; int partitionID = Integer.parseInt(args[1]); boolean standby = Boolean.parseBoolean(args[2]); int partitionBasePort = Integer.parseInt(args[3]); new Thread(new OrbPartition(jobNumber, partitionID, standby, partitionBasePort)).start(); } /** * */ @Override public void run() { try { setHostname(OrbDNS.getDefaultHost(getOrbConf())); setPort(trackerTetherCommunicationPort); } catch (UnknownHostException e) { LOG.error("Unable to get hostname.", e); System.exit(-1); } try { // TODO make this use the configuration to set this up interpartitionCommunicationServer = RPC.getServer(this, getHostname(), this.interpartitionCommunicationPort, 10, false, getOrbConf()); interpartitionCommunicationServer.start(); LOG.info("Starting OrbPartition Interpartition Communication Server on: " + getHostname() + ":" + this.interpartitionCommunicationPort); } catch (IOException e) { LOG.error("Failed to start OrbPartition Interpartition Communication server!!", e); e.printStackTrace(); System.exit(-1); } try { trackerTetherCommunicationServer = RPC.getServer(this, getHostname(), this.trackerTetherCommunicationPort, getOrbConf()); trackerTetherCommunicationServer.start(); LOG.info("Starting OrbPartition Tracker Tether Communication Server on: " + getHostname() + ":" + this.trackerTetherCommunicationPort); } catch (IOException e) { LOG.error("Failed to start Tracker Tether Communcation server!!", e); e.printStackTrace(); System.exit(-1); } leaderGroup = new LeaderGroup<OrbPartitionMember>(zk, new OrbPartitionCallback(), jobInProgressPath + "/OrbPartitionLeaderGroup", this, OrbPartitionMember.class); LOG.debug("leaderGroup member paths {}", leaderGroup.getMembersPath().toString()); LOG.debug("requested {}, reserved {}", getOrbConf().getOrbRequestedPartitions(), getOrbConf().getOrbReservedPartitions()); synchronized (this) { while (leaderGroup.getNumOfMembers() < (getOrbConf().getOrbRequestedPartitions() + getOrbConf() .getOrbReservedPartitions())) { try { LOG.debug("partition {} is waiting", getPartitionID()); wait(PARTITION_JOIN_TIMEOUT); } catch (InterruptedException e) { e.printStackTrace(); } } } initializeOrbClients(); if (leaderGroup.isLeader()) { executeAsLeader(); } else { executeAsSlave(); } } /** * */ private void initializeOrbClients() { orbClients = new HashMap<Integer,OrbPartitionCommunicationProtocol>(); for (OrbPartitionMember orbPartitionMember : leaderGroup.getMembers()) { try { orbPartitionMember.initProxy(getOrbConf()); LOG.debug("partition {} proxy initialized", getPartitionID()); } catch (IOException e) { // TODO This is a significant error and should start the killing of the partition e.printStackTrace(); } orbClients.put(orbPartitionMember.getPartitionID(), orbPartitionMember); } } /** * */ private void executeAsSlave() { if (standby) { waitForActivate(); } synchronized (this) { setLeader(false); if (!loadedVerticesComplete) { loadVerticesSlave(); } } waitLoop(); } /** * */ private void executeAsLeader() { synchronized (this) { setLeader(true); new Thread(new HeartbeatGenerator()).start(); if (!loadedVerticesComplete) { loadVerticesLeader(); } } waitLoop(); } private void waitForActivate() { synchronized (this) { while (standby) { try { this.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } // TODO need to have separate code for if it becomes active as a leader or as a slave } } /** * */ private void loadVerticesSlave() { enterBarrier("startLoadVerticesBarrier"); // since we are a slave we immediately jump into this barrier enterBarrier("sentInputSplitsBarrier"); // here we are handling our InputSplits by loading and sending vertices while (!inputSplitLoaderHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("inputSplitHandlersCompleteBarrier"); // here we are handling all of the vertices that have been sent to us, and are loading them into vertices while (!loadVerticesHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } enterBarrier("loadVerticesIntoPartitionBarrier"); LOG.info("Partition " + getPartitionID() + " completed Loading vertices!!!"); process(); // try { // ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); // } catch (OrbZKFailure e) { // e.printStackTrace(); // } // System.exit(1); } /** * This is where the core processing of the vertices -- and runnning of the algorithm lives. */ private void process() { while (computing) { step(); compute(); LOG.info("Partition " + getPartitionID() + " back in run portion " + Integer.toString(getSuperStep())); } } public void compute() { if (getSuperStep() == 1) { enterBarrier("superStep1Barrier"); processingVoteToHaltSet = new VoteToHaltSet(vertices.keySet()); int count = 0; List<Vertex<?,?,?>> vertexList = new ArrayList<Vertex<?,?,?>>(); List<List<Message<? extends Writable>>> messageList = new ArrayList<List<Message<? extends Writable>>>(); int verticesLeft = vertices.keySet().size(); for (Vertex<?,?,?> v : vertices.values()) { // count += 1; // verticesLeft -= 1; // vertexList.add(v); // messageList.add(new ArrayList<Message<? extends Writable>>()); // // if (count >= getOrbConf().getVerticesPerBlock() || verticesLeft == 0) { // computeExecutor.execute(new VertexComputer(vertexList, messageList)); // vertexList = new ArrayList<Vertex<?,?,?>>(); // messageList = new ArrayList<List<Message<? extends Writable>>>(); // count = 0; // } v.compute(new ArrayList()); } synchronized (this) { while (!processingVoteToHaltSet.isEmpty()) { try { wait(1000); LOG.debug(Integer.toString(processingVoteToHaltSet.size())); } catch (InterruptedException e) { e.printStackTrace(); } } } } else { if (processingInboundMessageQueue.getVerticesWithMessages().size() == 0) { hasMoreToProcess = false; if (enterAllDoneBarrier("superStepBarrier", getSuperStep(), true)) { doneComputing(); } } else { enterAllDoneBarrier("superStepBarrier", getSuperStep(), false); int count = 0; List<Vertex<?,?,?>> vertexList = new ArrayList<Vertex<?,?,?>>(); List<List<Message<? extends Writable>>> messageList = new ArrayList<List<Message<? extends Writable>>>(); int verticesLeft = processingInboundMessageQueue.getVerticesWithMessages().size(); for (String s : processingInboundMessageQueue.getVerticesWithMessages()) { // count += 1; // verticesLeft -= 1; // vertexList.add(vertices.get(s)); // messageList.add(processingInboundMessageQueue.getMessage(s)); // // if (count >= getOrbConf().getVerticesPerBlock() || verticesLeft == 0) { // computeExecutor.execute(new VertexComputer(vertexList, messageList)); // vertexList = new ArrayList<Vertex<?,?,?>>(); // messageList = new ArrayList<List<Message<? extends Writable>>>(); // count = 0; // } vertices.get(s).compute((Collection)processingInboundMessageQueue.getMessage(s)); } synchronized (this) { while (!processingVoteToHaltSet.isEmpty()) { try { wait(10000); } catch (InterruptedException e) { e.printStackTrace(); } LOG.debug(Integer.toString(processingVoteToHaltSet.size())); } } } } enterBarrier("doneComputingVerticesBarrier", getSuperStep()); outboundMessageQueue.sendRemainingMessages(); enterBarrier("doneSendingMessagesBarrier", getSuperStep()); LOG.info("Partition " + getPartitionID() + " going back to run portion " + Integer.toString(getSuperStep())); } private void doneComputing() { computing = false; LOG.info("Partition: (" + Integer.toString(getPartitionID()) + ") Done computing!!!!!!"); dumpData(); try { ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); } catch (OrbZKFailure e) { e.printStackTrace(); } System.exit(1); } private void dumpData() { // TODO Auto-generated method stub Configuration conf = new Configuration(); Job job = null; JobContext jobContext = null; TaskAttemptContext tao = null; RecordWriter rw; VertexWriter vw; FileOutputFormat outputFormat; boolean tryAgain = true; int count = 0; while (tryAgain && count < 3) try { count++; tryAgain = false; if (job == null) { job = new Job(conf); job.setOutputFormatClass(TextOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(new String(getOrbConf().getNameNode() + getOrbConf().getFileOutputPath()))); } if (jobContext == null) { jobContext = new JobContext(job.getConfiguration(), new JobID()); } System.out.println(jobContext.getConfiguration().get("mapred.output.dir")); tao = new TaskAttemptContext(jobContext.getConfiguration(), new TaskAttemptID(new TaskID( jobContext.getJobID(), true, getPartitionID()), 0)); outputFormat = (FileOutputFormat) tao.getOutputFormatClass().newInstance(); rw = outputFormat.getRecordWriter(tao); vw = (VertexWriter) getOrbConf().getVertexOutputFormatClass().newInstance(); for (Vertex v : vertices.values()) { OrbContext oc = vw.vertexWrite(v); rw.write(oc.getKey(), oc.getValue()); // orbLogger.info("Partition: " + Integer.toString(partitionId) + "writing: " + // oc.getKey().toString() + ", " + oc.getValue().toString()); } rw.close(tao); FileOutputCommitter cm = (FileOutputCommitter) outputFormat.getOutputCommitter(tao); if (cm.needsTaskCommit(tao)) { cm.commitTask(tao); cm.cleanupJob(jobContext); } else { cm.cleanupJob(jobContext); tryAgain = true; } } catch (IOException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (InstantiationException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } catch (InterruptedException e) { // TODO Auto-generated catch block tryAgain = true; e.printStackTrace(); } if (tryAgain) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } } class VertexComputer implements Runnable { private List<Vertex<?,?,?>> vertexList; private List<List<Message<? extends Writable>>> messageList; VertexComputer(List<Vertex<?,?,?>> vertexList, List<List<Message<? extends Writable>>> messageList) { this.vertexList = vertexList; this.messageList = messageList; } public void run() { for (int i = 0; i < vertexList.size(); i++) { vertexList.get(i).compute((Collection) messageList.get(i)); } synchronized (OrbPartition.this) { if (processingVoteToHaltSet.isEmpty()) { OrbPartition.this.notify(); } } } } public void step() { synchronized (this) { while (!messagesHandlers.isEmpty()) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } enterBarrier("messageHandlersDoneReceivingBarrier", getSuperStep()); processingInboundMessageQueue = currentInboundMessageQueue; currentInboundMessageQueue = new InboundMessageQueue(); try { outboundMessageQueue = new OutboundMessageQueue(getOrbConf().getOrbRequestedPartitions(), getOrbConf().getMessagesPerBlock(), orbClients, (Class<? extends Message<? extends Writable>>) getOrbConf().getMessageClass(), getPartitionID()); } catch (ClassNotFoundException e) { e.printStackTrace(); throw new RuntimeException(e); } setSuperStep(getSuperStep() + 1); LOG.info("********Starting SuperStep " + getSuperStep() + "Partition: " + getPartitionID() + " *********"); if (getSuperStep() > 1) { processingVoteToHaltSet = new VoteToHaltSet(processingInboundMessageQueue.getVerticesWithMessages()); } } } /** * */ private void loadVerticesLeader() { enterBarrier("startLoadVerticesBarrier"); // Here InputSplits are sent to their constituent partitions for loading InputSplitAllocator inputSplitAllocator = new InputSplitAllocator(getOrbConf(), leaderGroup.getMembers()); Map<OrbPartitionMember,List<RawSplit>> inputSplitAssignments = inputSplitAllocator.assignInputSplits(); for (OrbPartitionMember orbPartitionMember : inputSplitAssignments.keySet()) { for (RawSplit rawSplit : inputSplitAssignments.get(orbPartitionMember)) { orbPartitionMember.loadVerticesFromInputSplit(rawSplit); } } enterBarrier("sentInputSplitsBarrier"); // just like the slave we have to wait for the InputSplitHandlers to finish loading and sending vertices while (!inputSplitLoaderHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("inputSplitHandlersCompleteBarrier"); // just like the slave here we are handling all of the vertices that have been sent to us, and are loading // them into vertices while (!loadVerticesHandlers.isEmpty()) { synchronized (this) { try { wait(1000); } catch (InterruptedException e) { e.printStackTrace(); } } } enterBarrier("loadVerticesIntoPartitionBarrier"); LOG.debug("Completed Loading vertices!!!"); if (standby) { waitForActivate(); } process(); // try { // ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); // } catch (OrbZKFailure e) { // e.printStackTrace(); // } // System.exit(1); } /** * */ private void waitLoop() { while (runPartition) { synchronized (this) { try { wait(); } catch (InterruptedException e) { LOG.error(e.getMessage()); } } if ((leaderGroup.isLeader() && !isLeader()) || (!leaderGroup.isLeader() && isLeader())){ if (leaderGroup.isLeader()) { executeAsLeader(); } else { executeAsSlave(); } } } } private class OrbPartitionCallback implements OrbCallback { /** * * @param OrbEvent * e */ @Override public void process(OrbEvent e) { int eventCode = e.getType(); if (eventCode == OrbEvent.ORB_EXCEPTION) { ((OrbExceptionEvent) e).getException().printStackTrace(); } else if (eventCode == OrbEvent.LEADERSHIP_CHANGE) { synchronized (OrbPartition.this) { if ((leaderGroup.isLeader() && !isLeader()) || (!leaderGroup.isLeader() && isLeader())) { OrbPartition.this.notify(); } } } else if (eventCode == OrbEvent.NEW_MEMBER) { synchronized (OrbPartition.this) { if (waitingForAllToJoin) { OrbPartition.this.notify(); } } } } } public class OrbCommunicationInterface { /** * * @returns int */ public int superStep() { return getSuperStep(); } /** * * @param String * vertexID */ public void voteToHalt(String vertexID) { processingVoteToHaltSet.voteToHalt(vertexID); } /** * * @param Message * <? extends Writable> message */ public void sendMessage(Message<? extends Writable> message) { OrbPartition.this.outboundMessageQueue.sendMessage(message); } } /** * Return the protocolVersion */ @Override public long getProtocolVersion(String arg0, long arg1) throws IOException { return 0L; } /** * * @returns int */ @Override public int stop() { // TODO Shutdown stuff return 0; } /** * Return the unning */ @Override public boolean isRunning() { // TODO what constitutes that it is no longer running? return true; } /** * * @param Messages * messages */ @Override public void sendMessages(Messages messages) { MessagesHandler messagesHandler = new MessagesHandler(messages); messagesHandlers.add(messagesHandler); messageHandlerExecutor.execute(messagesHandler); } class MessagesHandler implements Runnable { private Messages messages; MessagesHandler(Messages messages) { this.messages = messages; } public void run() { synchronized (currentInboundMessageQueue) { currentInboundMessageQueue.addMessages(messages); synchronized (OrbPartition.this) { messagesHandlers.remove(this); LOG.info("Partition " + getPartitionID() + " " + OrbPartition.this + " messagesHandlerNotifying Parent " + Integer.toString(getSuperStep())); OrbPartition.this.notify(); } } } } /** * * @param Vertices * vertices */ @Override public void sendVertices(Vertices vertices) { LoadVerticesHandler loadVerticesHandler = new LoadVerticesHandler(vertices, this); loadVerticesHandlers.add(loadVerticesHandler); verticesLoaderHandlerExecutor.execute(loadVerticesHandler); } class LoadVerticesHandler implements Runnable { private Vertices vertices; /** * Constructor * * @param Vertices * vertices * @param OrbPartition * orbPartition */ public LoadVerticesHandler(Vertices vertices, OrbPartition orbPartition) { this.vertices = vertices; } /** * */ public void run() { synchronized (vertices) { for (Vertex<?,?,?> vertex : vertices.getArrayList()) { vertex.setOci(oci); OrbPartition.this.vertices.put(vertex.getVertexID(), vertex); } LOG.info("( Partition: " + Integer.toString(getPartitionID()) + ") Loaded " + vertices.size() + " vertices."); } loadVerticesHandlers.remove(this); synchronized (OrbPartition.this) { OrbPartition.this.notify(); } } } /** * * @param int partitionID */ @Override public void becomeActive(int partitionID) { if (standby) { setPartitionID(partitionID); standby = false; synchronized (this) { notify(); } } } /** * * @param RawSplit * rawsplit */ @Override public void loadVerticesFromInputSplit(RawSplit rawsplit) { InputSplitLoaderHandler inputSplitLoaderHandler = new InputSplitLoaderHandler(rawsplit); inputSplitLoaderHandlers.add(inputSplitLoaderHandler); inputSplitHandlerExecutor.execute(inputSplitLoaderHandler); } class InputSplitLoaderHandler implements Runnable { private RawSplit rawsplit; /** * Constructor * * @param RawSplit * rawsplit */ public InputSplitLoaderHandler(RawSplit rawsplit) { this.rawsplit = rawsplit; } /** * */ @SuppressWarnings("unchecked") @Override public void run() { OutboundVertexQueue outboundVertexQueue; outboundVertexQueue = new OutboundVertexQueue(getOrbConf().getOrbRequestedPartitions(), getOrbConf() .getVerticesPerBlock(), orbClients, (Class<? extends Vertex<?,?,?>>) getOrbConf().getVertexClass(), getPartitionID()); LOG.info("Loading on machine " + getHostname() + ":" + interpartitionCommunicationPort); VertexBuilder<?,?,?> vertexBuilder = ReflectionUtils.newInstance(getOrbConf() .getVertexInputFormatClass(), getOrbConf()); vertexBuilder.setOrbConf(getOrbConf()); vertexBuilder.setPartitionID(getPartitionID()); vertexBuilder.setRawSplit(rawsplit.getBytes()); vertexBuilder.setSplitClass(rawsplit.getClassName()); vertexBuilder.initialize(); try { while (vertexBuilder.nextVertex()) { outboundVertexQueue.sendVertex(vertexBuilder.getCurrentVertex()); } } catch (IOException e) { // TODO Data loading failed --- needs to fire a death event. e.printStackTrace(); } catch (InterruptedException e) { // TODO Data loading failed --- needs to fire a death event. e.printStackTrace(); } outboundVertexQueue.sendRemainingVertices(); inputSplitLoaderHandlers.remove(this); synchronized (OrbPartition.this) { OrbPartition.this.notify(); } } } private class HeartbeatGenerator implements Runnable, Killable { private boolean active = true; private Long heartbeat = 1L; /** * */ @Override public void run() { while (active) { synchronized (this) { try { wait((getOrbConf().getJobHeartbeatTimeout() / 10)); try { ZookeeperUtils.existsUpdateNodeData(zk, jobInProgressPath + "/messages/heartbeat", new LongWritable(heartbeat++)); LOG.debug("Creating heartbeat for: " + jobInProgressPath + "/messages/heartbeat" + " heartbeat is: " + heartbeat); } catch (OrbZKFailure e) { e.printStackTrace(); } } catch (InterruptedException e) { e.printStackTrace(); } } } } /** * */ @Override public void kill() { active = false; } /** * */ @Override public void restart() { active = true; } } /** * * @param String * barrierName * @param int superStep */ private void enterBarrier(String barrierName, int superStep) { enterBarrier(barrierName + Integer.toString(superStep)); } /** * * @param String * barrierName */ private void enterBarrier(String barrierName) { LOG.debug("creating barrier {}", barrierName); Barrier barrier = new OrbFastBarrier(getOrbConf(), jobInProgressPath + "/" + barrierName, leaderGroup.getNumOfMembers(), Integer.toString(getPartitionID()), zk); try { barrier.enter(); LOG.debug("{} entered " + getPartitionID(), barrierName); } catch (OrbZKFailure e) { LOG.error("Failed to complete barrier: " + barrierName, e); e.printStackTrace(); } } /** * @param boolean iAmDone * @param String * barrierName * @param int superStep * @return */ private boolean enterAllDoneBarrier(String barrierName, int superStep, boolean iAmDone) { return enterAllDoneBarrier(barrierName + Integer.toString(superStep), iAmDone); } /** * @param boolean iAmDone * @param String * barrierName */ private boolean enterAllDoneBarrier(String barrierName, boolean iAmDone) { AllDoneBarrier barrier = new OrbFastAllDoneBarrier(getOrbConf(), jobInProgressPath + "/" + barrierName, leaderGroup.getNumOfMembers(), Integer.toString(getPartitionID()), zk); try { return barrier.enter(iAmDone); } catch (OrbZKFailure e) { LOG.error("Failed to complete barrier: " + barrierName, e); e.printStackTrace(); } return false; } }
no message
src/main/java/org/goldenorb/OrbPartition.java
no message
<ide><path>rc/main/java/org/goldenorb/OrbPartition.java <ide> computing = false; <ide> LOG.info("Partition: (" + Integer.toString(getPartitionID()) + ") Done computing!!!!!!"); <ide> dumpData(); <add> enterBarrier("doneDumpingDataBarrier"); <ide> try { <ide> ZookeeperUtils.tryToCreateNode(zk, jobInProgressPath + "/messages/complete"); <ide> } catch (OrbZKFailure e) {
Java
mit
eb6877d034fb1d3ac57457b73733b90778d9ca85
0
AbeSkray/poker-calculator
package com.skraylabs.poker; import com.skraylabs.poker.model.Board; import com.skraylabs.poker.model.Card; import com.skraylabs.poker.model.CardUtils; import com.skraylabs.poker.model.GameState; import com.skraylabs.poker.model.Pocket; import com.skraylabs.poker.model.Rank; import java.awt.Point; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; /** * For a given {@link com.skraylabs.poker.model.GameState}, calculates the outcome probability for * each Player. */ class ProbabilityCalculator { private GameState gameState; public ProbabilityCalculator(GameState gameState) { this.gameState = gameState; } /** * Generic helper method that calculates a given outcome for a given player. * * @param outcomeEvaluator evaluates if a hand meets the criteria of a categorical poker outcome * (Two Of Kind, Full House, etc...) * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting the specified poker outcome */ double outcomeForAPlayer(Function<Collection<Card>, Boolean> outcomeEvaluator, int playerIndex) { if (playerIndex < 0 || playerIndex >= GameState.MAX_PLAYERS) { throw new IllegalArgumentException(String .format("Parameter \"playerIndex\" must be in range [0, %d].", GameState.MAX_PLAYERS)); } Collection<Card> dealtCards = CardUtils.collectCards(gameState); // Make a deck of undealt cards ArrayList<Card> deck = new ArrayList<Card>(); for (int i = 0; i < 52; i++) { Card card = CardUtils.cardFromNumber(i); if (!dealtCards.contains(card)) { deck.add(card); } } // Iterate through every possible GameState branch Board board = gameState.getBoard(); Pocket pocket = gameState.getPockets()[playerIndex]; Point count = countOutcomes(outcomeEvaluator, CardUtils.collectCards(board), CardUtils.collectCards(pocket), deck); return ((double) count.x) / count.y; } /** * Helper method that evaluates all the remaining combinations for a given set of board cards and * counts how many of contain a given Poker type (e.g. Two of a Kind). * * @param evaulator tests if a Card Collection contains a target Poker hand (e.g. Two of a Kind, * Full House, etc...) * @param board cards collected from a {@link Board} * @param pocket cards collected from a {@link Pocket} * @param undealtCards collection of cards that have yet to be dealt * @return a pair of numbers (x, y) where x is the number of target outcomes, and y is the total * number of outcomes */ static Point countOutcomes(Function<Collection<Card>, Boolean> evaluator, Collection<Card> board, Collection<Card> pocket, Collection<Card> undealtCards) { int winOutcomes = 0; int totalOutcomes = 0; if (board.size() == 5) { // Board is complete Collection<Card> cards = collectHandCards(board, pocket); if (evaluator.apply(cards)) { winOutcomes++; } totalOutcomes++; } else { // Board is incomplete // Recurse on all possible cards that could be dealt next Collection<Card> dealtCards = new ArrayList<Card>(); for (Card card : undealtCards) { Collection<Card> nextBoard = new ArrayList<Card>(board); nextBoard.add(card); dealtCards.add(card); Collection<Card> nextUndealtCards = new ArrayList<Card>(undealtCards); nextUndealtCards.removeAll(dealtCards); Point nextCount = countOutcomes(evaluator, nextBoard, pocket, nextUndealtCards); winOutcomes += nextCount.x; totalOutcomes += nextCount.y; } } return new Point(winOutcomes, totalOutcomes); } /** * Helper method to gather cards that could form a player's hand -- the combination of community * cards (Board) and Pocket cards. * * @param board Board cards * @param pocket Pocket cards * @return Collection of cards drawn from {@code board} and {@code pocket} */ static Collection<Card> collectHandCards(Collection<Card> board, Collection<Card> pocket) { Collection<Card> cards = new ArrayList<Card>(board); cards.addAll(pocket); return cards; } /** * Report the probability of a player getting a Two Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Two Of A Kind. */ public double twoOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasTwoOfAKind, playerIndex); } /** * Report the probability of a player getting a Two Pair. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Two Pair. */ public double twoPairForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Three Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Three Of A Kind. */ public double threeOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasThreeOfAKind, playerIndex); } /** * Report the probability of a player getting a Straight. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Straight. */ public double straightForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Flush. */ public double flushForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Full House. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Full House. */ public double fullHouseForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Four Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Four Of A Kind. */ public double fourOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasFourOfAKind, playerIndex); } /** * Report the probability of a player getting a Straight Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Straight Flush. */ public double straightFlushForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Royal Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Royal Flush. */ public double royalFlushForPlayer(int playerIndex) { return 0.0; } /** * Helper method that determines if an <i>n</i> of a Kind exists on a given combination of board * and pocket cards -- e.g. for n = 3, it will determine if there is a Three of a Kind. * * @param cards combined cards from a player's Pocket and the community Board * @param number a positive integer <i>n</i> * @return {@code true} if there is are {@code number} or more cards of the same rank. */ static boolean hasNOfAKind(Collection<Card> cards, int number) { boolean result = false; Map<Rank, Long> countByRank = cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); for (Long count : countByRank.values()) { if (count >= number) { result = true; break; } } return result; } /** * Helper method that determines if a Two of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is at least one Two of a Kind; {@code false} otherwise */ static boolean hasTwoOfAKind(Collection<Card> cards) { return hasNOfAKind(cards, 2); } /** * Helper method that determines if a Three of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is a Three of a Kind; {@code false} otherwise */ static boolean hasThreeOfAKind(Collection<Card> cards) { return hasNOfAKind(cards, 3); } /** * Helper method that determines if a Four of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is a Four of a Kind; {@code false} otherwise */ static boolean hasFourOfAKind(Collection<Card> cards) { return hasNOfAKind(cards, 4); } }
src/main/java/com/skraylabs/poker/ProbabilityCalculator.java
package com.skraylabs.poker; import com.skraylabs.poker.model.Board; import com.skraylabs.poker.model.Card; import com.skraylabs.poker.model.CardUtils; import com.skraylabs.poker.model.GameState; import com.skraylabs.poker.model.Pocket; import com.skraylabs.poker.model.Rank; import java.awt.Point; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; /** * For a given {@link com.skraylabs.poker.model.GameState}, calculates the outcome probability for * each Player. */ class ProbabilityCalculator { private GameState gameState; public ProbabilityCalculator(GameState gameState) { this.gameState = gameState; } /** * Generic helper method that calculates a given outcome for a given player. * * @param outcomeEvaluator evaluates if a hand meets the criteria of a categorical poker outcome * (Two Of Kind, Full House, etc...) * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting the specified poker outcome */ double outcomeForAPlayer(Function<Collection<Card>, Boolean> outcomeEvaluator, int playerIndex) { if (playerIndex < 0 || playerIndex >= GameState.MAX_PLAYERS) { throw new IllegalArgumentException(String .format("Parameter \"playerIndex\" must be in range [0, %d].", GameState.MAX_PLAYERS)); } Collection<Card> dealtCards = CardUtils.collectCards(gameState); // Make a deck of undealt cards ArrayList<Card> deck = new ArrayList<Card>(); for (int i = 0; i < 52; i++) { Card card = CardUtils.cardFromNumber(i); if (!dealtCards.contains(card)) { deck.add(card); } } // Iterate through every possible GameState branch Board board = gameState.getBoard(); Pocket pocket = gameState.getPockets()[playerIndex]; Point count = countOutcomes(outcomeEvaluator, CardUtils.collectCards(board), CardUtils.collectCards(pocket), deck); return ((double) count.x) / count.y; } /** * Helper method that evaluates all the remaining combinations for a given set of board cards and * counts how many of contain a given Poker type (e.g. Two of a Kind). * * @param evaulator tests if a Card Collection contains a target Poker hand (e.g. Two of a Kind, * Full House, etc...) * @param board cards collected from a {@link Board} * @param pocket cards collected from a {@link Pocket} * @param undealtCards collection of cards that have yet to be dealt * @return a pair of numbers (x, y) where x is the number of target outcomes, and y is the total * number of outcomes */ static Point countOutcomes(Function<Collection<Card>, Boolean> evaluator, Collection<Card> board, Collection<Card> pocket, Collection<Card> undealtCards) { int winOutcomes = 0; int totalOutcomes = 0; if (board.size() == 5) { // Board is complete Collection<Card> cards = collectHandCards(board, pocket); if (evaluator.apply(cards)) { winOutcomes++; } totalOutcomes++; } else { // Board is incomplete // Recurse on all possible cards that could be dealt next Collection<Card> dealtCards = new ArrayList<Card>(); for (Card card : undealtCards) { Collection<Card> nextBoard = new ArrayList<Card>(board); nextBoard.add(card); dealtCards.add(card); Collection<Card> nextUndealtCards = new ArrayList<Card>(undealtCards); nextUndealtCards.removeAll(dealtCards); Point nextCount = countOutcomes(evaluator, nextBoard, pocket, nextUndealtCards); winOutcomes += nextCount.x; totalOutcomes += nextCount.y; } } return new Point(winOutcomes, totalOutcomes); } /** * Helper method to gather cards that could form a player's hand -- the combination of community * cards (Board) and Pocket cards. * * @param board Board cards * @param pocket Pocket cards * @return Collection of cards drawn from {@code board} and {@code pocket} */ static Collection<Card> collectHandCards(Collection<Card> board, Collection<Card> pocket) { Collection<Card> cards = new ArrayList<Card>(board); cards.addAll(pocket); return cards; } /** * Report the probability of a player getting a Two Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Two Of A Kind. */ public double twoOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasTwoOfAKind, playerIndex); } /** * Report the probability of a player getting a Two Pair. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Two Pair. */ public double twoPairForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Three Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Three Of A Kind. */ public double threeOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasThreeOfAKind, playerIndex); } /** * Report the probability of a player getting a Straight. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Straight. */ public double straightForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Flush. */ public double flushForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Full House. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Full House. */ public double fullHouseForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Four Of A Kind. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Four Of A Kind. */ public double fourOfAKindForPlayer(int playerIndex) { return outcomeForAPlayer(ProbabilityCalculator::hasFourOfAKind, playerIndex); } /** * Report the probability of a player getting a Straight Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Straight Flush. */ public double straightFlushForPlayer(int playerIndex) { return 0.0; } /** * Report the probability of a player getting a Royal Flush. * * @param playerIndex index of Player in the GameState. A number in range [0, 9]. * @return the probability of getting a Royal Flush. */ public double royalFlushForPlayer(int playerIndex) { return 0.0; } /** * Helper method that determines if a Two of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is at least one Two of a Kind; {@code false} otherwise */ static boolean hasTwoOfAKind(Collection<Card> cards) { boolean result = false; Map<Rank, Long> countByRank = cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); for (Long count : countByRank.values()) { if (count >= 2) { result = true; break; } } return result; } /** * Helper method that determines if a Three of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is a Three of a Kind; {@code false} otherwise */ static boolean hasThreeOfAKind(Collection<Card> cards) { boolean result = false; Map<Rank, Long> countByRank = cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); for (Long count : countByRank.values()) { if (count >= 3) { result = true; break; } } return result; } /** * Helper method that determines if a Four of a Kind exists on a given combination of board and * pocket cards. * * @param cards combined cards from a player's Pocket and the community Board * @return {@code true} if there is a Four of a Kind; {@code false} otherwise */ static boolean hasFourOfAKind(Collection<Card> cards) { boolean result = false; Map<Rank, Long> countByRank = cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); for (Long count : countByRank.values()) { if (count >= 4) { result = true; break; } } return result; } }
Factored out common code for “n” of a Kind methods.
src/main/java/com/skraylabs/poker/ProbabilityCalculator.java
Factored out common code for “n” of a Kind methods.
<ide><path>rc/main/java/com/skraylabs/poker/ProbabilityCalculator.java <ide> } <ide> <ide> /** <del> * Helper method that determines if a Two of a Kind exists on a given combination of board and <del> * pocket cards. <del> * <del> * @param cards combined cards from a player's Pocket and the community Board <del> * @return {@code true} if there is at least one Two of a Kind; {@code false} otherwise <del> */ <del> static boolean hasTwoOfAKind(Collection<Card> cards) { <add> * Helper method that determines if an <i>n</i> of a Kind exists on a given combination of board <add> * and pocket cards -- e.g. for n = 3, it will determine if there is a Three of a Kind. <add> * <add> * @param cards combined cards from a player's Pocket and the community Board <add> * @param number a positive integer <i>n</i> <add> * @return {@code true} if there is are {@code number} or more cards of the same rank. <add> */ <add> static boolean hasNOfAKind(Collection<Card> cards, int number) { <ide> boolean result = false; <ide> Map<Rank, Long> countByRank = <ide> cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); <ide> for (Long count : countByRank.values()) { <del> if (count >= 2) { <add> if (count >= number) { <ide> result = true; <ide> break; <ide> } <ide> } <ide> <ide> /** <add> * Helper method that determines if a Two of a Kind exists on a given combination of board and <add> * pocket cards. <add> * <add> * @param cards combined cards from a player's Pocket and the community Board <add> * @return {@code true} if there is at least one Two of a Kind; {@code false} otherwise <add> */ <add> static boolean hasTwoOfAKind(Collection<Card> cards) { <add> return hasNOfAKind(cards, 2); <add> } <add> <add> /** <ide> * Helper method that determines if a Three of a Kind exists on a given combination of board and <ide> * pocket cards. <ide> * <ide> * @return {@code true} if there is a Three of a Kind; {@code false} otherwise <ide> */ <ide> static boolean hasThreeOfAKind(Collection<Card> cards) { <del> boolean result = false; <del> Map<Rank, Long> countByRank = <del> cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); <del> for (Long count : countByRank.values()) { <del> if (count >= 3) { <del> result = true; <del> break; <del> } <del> } <del> return result; <add> return hasNOfAKind(cards, 3); <ide> } <ide> <ide> /** <ide> * @return {@code true} if there is a Four of a Kind; {@code false} otherwise <ide> */ <ide> static boolean hasFourOfAKind(Collection<Card> cards) { <del> boolean result = false; <del> Map<Rank, Long> countByRank = <del> cards.stream().collect(Collectors.groupingBy(Card::getRank, Collectors.counting())); <del> for (Long count : countByRank.values()) { <del> if (count >= 4) { <del> result = true; <del> break; <del> } <del> } <del> return result; <add> return hasNOfAKind(cards, 4); <ide> } <ide> }