lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
JavaScript
mit
679d6ee3a7a2f97cd5680acd230c05b6b4687e4c
0
dontcallmedom/specberus,w3c/specberus,w3c/specberus,w3c/specberus,dontcallmedom/specberus
lib/l10n-wording.js
'use strict'; exports.selectors = { // These sections not necessary, for obvious reasons: // dummy/h2-foo, dummy/h1, dummy/dahut. // headers/title 'headers.title.not-found': "profiles.WD.section.['front-matter'].rules.titleTest" // headers/logo , 'headers.logo.not-found': "profiles.WD.section.['front-matter'].rules.logoTest" // headers/subm-logo // @tripu couldn't find this one. // headers/hr , 'headers.hr.not-found': "profiles.WD.section.['front-matter'].rules.hrAfterCopyrightTest" , 'headers.hr.duplicate': "profiles.WD.section.['front-matter'].rules.hrAfterCopyrightTest" // headers/h2-status , 'headers.h2-status.no-h2': "profiles.WD.section.['front-matter'].rules.dateTitleH2Test" , 'headers.h2-status.bad-h2': "profiles.WD.section.['front-matter'].rules.dateTitleH2Test" // [TODO] // headers/h2-toc // [TODO] // headers/h1-title // [TODO] // headers/dl // [TODO] // headers/errata // [TODO] // headers/translations // [TODO] // headers/div-head // [TODO] // headers/copyright // [TODO] // links/internal // [TODO] // links/linkchecker // [TODO] // links/compound // [TODO] // sotd/supersedable // [TODO] // sotd/submission // [TODO] // sotd/team-subm-link // [TODO] // sotd/stability // [TODO] // sotd/status // [TODO] // sotd/review-end // [TODO] // sotd/cr-end // [TODO] // sotd/pp // [TODO] // sotd/charter-disclosure // [TODO] // sotd/mailing-list // [TODO] // sotd/group-name // [TODO] // sotd/processDocument // [TODO] // sotd/implementation // [TODO] // sotd/ac-review // [TODO] // sotd/diff // [TODO] // structure/name // [TODO] // structure/section-ids // [TODO] // structure/h2 // [TODO] // structure/display-only // [TODO] // style/sheet // [TODO] // validation/css // [TODO] // validation/html // [TODO] // validation/wcag // [TODO] // heuristic/group // [TODO] // heuristic/date-format // [TODO] // Echidna // [TODO] };
Deleted redundant, deprecated selectors file.
lib/l10n-wording.js
Deleted redundant, deprecated selectors file.
<ide><path>ib/l10n-wording.js <del> <del>'use strict'; <del> <del>exports.selectors = { <del> <del> // These sections not necessary, for obvious reasons: <del> // dummy/h2-foo, dummy/h1, dummy/dahut. <del> <del> // headers/title <del> 'headers.title.not-found': "profiles.WD.section.['front-matter'].rules.titleTest" <del> // headers/logo <del>, 'headers.logo.not-found': "profiles.WD.section.['front-matter'].rules.logoTest" <del> // headers/subm-logo <del> // @tripu couldn't find this one. <del> // headers/hr <del>, 'headers.hr.not-found': "profiles.WD.section.['front-matter'].rules.hrAfterCopyrightTest" <del>, 'headers.hr.duplicate': "profiles.WD.section.['front-matter'].rules.hrAfterCopyrightTest" <del> // headers/h2-status <del>, 'headers.h2-status.no-h2': "profiles.WD.section.['front-matter'].rules.dateTitleH2Test" <del>, 'headers.h2-status.bad-h2': "profiles.WD.section.['front-matter'].rules.dateTitleH2Test" <del> // [TODO] <del> // headers/h2-toc <del> // [TODO] <del> // headers/h1-title <del> // [TODO] <del> // headers/dl <del> // [TODO] <del> // headers/errata <del> // [TODO] <del> // headers/translations <del> // [TODO] <del> // headers/div-head <del> // [TODO] <del> // headers/copyright <del> // [TODO] <del> // links/internal <del> // [TODO] <del> // links/linkchecker <del> // [TODO] <del> // links/compound <del> // [TODO] <del> // sotd/supersedable <del> // [TODO] <del> // sotd/submission <del> // [TODO] <del> // sotd/team-subm-link <del> // [TODO] <del> // sotd/stability <del> // [TODO] <del> // sotd/status <del> // [TODO] <del> // sotd/review-end <del> // [TODO] <del> // sotd/cr-end <del> // [TODO] <del> // sotd/pp <del> // [TODO] <del> // sotd/charter-disclosure <del> // [TODO] <del> // sotd/mailing-list <del> // [TODO] <del> // sotd/group-name <del> // [TODO] <del> // sotd/processDocument <del> // [TODO] <del> // sotd/implementation <del> // [TODO] <del> // sotd/ac-review <del> // [TODO] <del> // sotd/diff <del> // [TODO] <del> // structure/name <del> // [TODO] <del> // structure/section-ids <del> // [TODO] <del> // structure/h2 <del> // [TODO] <del> // structure/display-only <del> // [TODO] <del> // style/sheet <del> // [TODO] <del> // validation/css <del> // [TODO] <del> // validation/html <del> // [TODO] <del> // validation/wcag <del> // [TODO] <del> // heuristic/group <del> // [TODO] <del> // heuristic/date-format <del> // [TODO] <del> // Echidna <del> // [TODO] <del>}; <del>
Java
apache-2.0
6a7558bcdfc2f9950ff3265e6ab32ea9d259f843
0
lorenamgUMU/sakai,zqian/sakai,willkara/sakai,OpenCollabZA/sakai,joserabal/sakai,noondaysun/sakai,lorenamgUMU/sakai,introp-software/sakai,introp-software/sakai,frasese/sakai,puramshetty/sakai,kingmook/sakai,liubo404/sakai,surya-janani/sakai,bkirschn/sakai,introp-software/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,bkirschn/sakai,joserabal/sakai,lorenamgUMU/sakai,rodriguezdevera/sakai,OpenCollabZA/sakai,whumph/sakai,pushyamig/sakai,kingmook/sakai,rodriguezdevera/sakai,bkirschn/sakai,rodriguezdevera/sakai,noondaysun/sakai,surya-janani/sakai,pushyamig/sakai,kingmook/sakai,colczr/sakai,conder/sakai,noondaysun/sakai,hackbuteer59/sakai,willkara/sakai,buckett/sakai-gitflow,lorenamgUMU/sakai,kwedoff1/sakai,kingmook/sakai,joserabal/sakai,clhedrick/sakai,puramshetty/sakai,whumph/sakai,liubo404/sakai,puramshetty/sakai,hackbuteer59/sakai,ouit0408/sakai,udayg/sakai,Fudan-University/sakai,bzhouduke123/sakai,kingmook/sakai,colczr/sakai,conder/sakai,joserabal/sakai,bkirschn/sakai,whumph/sakai,wfuedu/sakai,lorenamgUMU/sakai,ktakacs/sakai,ouit0408/sakai,ouit0408/sakai,tl-its-umich-edu/sakai,frasese/sakai,surya-janani/sakai,OpenCollabZA/sakai,ktakacs/sakai,lorenamgUMU/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,willkara/sakai,frasese/sakai,whumph/sakai,surya-janani/sakai,frasese/sakai,rodriguezdevera/sakai,clhedrick/sakai,introp-software/sakai,OpenCollabZA/sakai,Fudan-University/sakai,liubo404/sakai,wfuedu/sakai,rodriguezdevera/sakai,puramshetty/sakai,wfuedu/sakai,bkirschn/sakai,ktakacs/sakai,clhedrick/sakai,zqian/sakai,wfuedu/sakai,colczr/sakai,willkara/sakai,noondaysun/sakai,joserabal/sakai,noondaysun/sakai,udayg/sakai,tl-its-umich-edu/sakai,udayg/sakai,udayg/sakai,zqian/sakai,willkara/sakai,frasese/sakai,bkirschn/sakai,OpenCollabZA/sakai,introp-software/sakai,kwedoff1/sakai,OpenCollabZA/sakai,ktakacs/sakai,joserabal/sakai,surya-janani/sakai,conder/sakai,wfuedu/sakai,noondaysun/sakai,rodriguezdevera/sakai,udayg/sakai,colczr/sakai,bzhouduke123/sakai,conder/sakai,bzhouduke123/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,surya-janani/sakai,conder/sakai,OpenCollabZA/sakai,zqian/sakai,tl-its-umich-edu/sakai,puramshetty/sakai,hackbuteer59/sakai,noondaysun/sakai,puramshetty/sakai,frasese/sakai,duke-compsci290-spring2016/sakai,Fudan-University/sakai,lorenamgUMU/sakai,pushyamig/sakai,pushyamig/sakai,hackbuteer59/sakai,kingmook/sakai,conder/sakai,whumph/sakai,bzhouduke123/sakai,bkirschn/sakai,ouit0408/sakai,udayg/sakai,zqian/sakai,OpenCollabZA/sakai,frasese/sakai,zqian/sakai,liubo404/sakai,bkirschn/sakai,buckett/sakai-gitflow,clhedrick/sakai,duke-compsci290-spring2016/sakai,willkara/sakai,ktakacs/sakai,puramshetty/sakai,kingmook/sakai,duke-compsci290-spring2016/sakai,joserabal/sakai,surya-janani/sakai,frasese/sakai,zqian/sakai,kwedoff1/sakai,whumph/sakai,conder/sakai,buckett/sakai-gitflow,udayg/sakai,kwedoff1/sakai,Fudan-University/sakai,udayg/sakai,whumph/sakai,bzhouduke123/sakai,duke-compsci290-spring2016/sakai,kwedoff1/sakai,tl-its-umich-edu/sakai,clhedrick/sakai,Fudan-University/sakai,joserabal/sakai,kingmook/sakai,rodriguezdevera/sakai,ouit0408/sakai,buckett/sakai-gitflow,introp-software/sakai,ouit0408/sakai,introp-software/sakai,willkara/sakai,bzhouduke123/sakai,willkara/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,whumph/sakai,Fudan-University/sakai,conder/sakai,Fudan-University/sakai,wfuedu/sakai,liubo404/sakai,colczr/sakai,ktakacs/sakai,tl-its-umich-edu/sakai,wfuedu/sakai,liubo404/sakai,colczr/sakai,clhedrick/sakai,colczr/sakai,surya-janani/sakai,lorenamgUMU/sakai,hackbuteer59/sakai,hackbuteer59/sakai,tl-its-umich-edu/sakai,ouit0408/sakai,ouit0408/sakai,buckett/sakai-gitflow,liubo404/sakai,clhedrick/sakai,liubo404/sakai,ktakacs/sakai,kwedoff1/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,puramshetty/sakai,rodriguezdevera/sakai,bzhouduke123/sakai,bzhouduke123/sakai,colczr/sakai,buckett/sakai-gitflow,noondaysun/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,ktakacs/sakai,tl-its-umich-edu/sakai,pushyamig/sakai,pushyamig/sakai,clhedrick/sakai,Fudan-University/sakai,hackbuteer59/sakai,pushyamig/sakai,wfuedu/sakai
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.content.tool; import java.io.IOException; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.Stack; import java.util.TreeSet; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.sakaiproject.authz.api.PermissionsHelper; import org.sakaiproject.authz.cover.AuthzGroupService; import org.sakaiproject.authz.cover.SecurityService; import org.sakaiproject.cheftool.Context; import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.PagedResourceHelperAction; import org.sakaiproject.cheftool.PortletConfig; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.cheftool.VelocityPortlet; import org.sakaiproject.cheftool.VelocityPortletPaneledAction; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentCollectionEdit; import org.sakaiproject.content.api.ContentEntity; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.content.api.ContentResourceEdit; import org.sakaiproject.content.api.ContentResourceFilter; import org.sakaiproject.content.api.FilePickerHelper; import org.sakaiproject.content.api.GroupAwareEdit; import org.sakaiproject.content.api.GroupAwareEntity; import org.sakaiproject.content.api.GroupAwareEntity.AccessMode; import org.sakaiproject.content.cover.ContentHostingService; import org.sakaiproject.content.cover.ContentTypeImageService; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.EntityPropertyNotDefinedException; import org.sakaiproject.entity.api.EntityPropertyTypeException; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.entity.cover.EntityManager; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.event.api.UsageSession; import org.sakaiproject.event.cover.NotificationService; import org.sakaiproject.event.cover.UsageSessionService; import org.sakaiproject.exception.IdInvalidException; import org.sakaiproject.exception.IdLengthException; import org.sakaiproject.exception.IdUniquenessException; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.IdUsedException; import org.sakaiproject.exception.InUseException; import org.sakaiproject.exception.InconsistentException; import org.sakaiproject.exception.OverQuotaException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.exception.ServerOverloadException; import org.sakaiproject.exception.TypeException; import org.sakaiproject.metaobj.shared.control.SchemaBean; import org.sakaiproject.metaobj.shared.mgt.HomeFactory; import org.sakaiproject.metaobj.shared.mgt.StructuredArtifactValidationService; import org.sakaiproject.metaobj.shared.mgt.home.StructuredArtifactHomeInterface; import org.sakaiproject.metaobj.shared.model.ElementBean; import org.sakaiproject.metaobj.shared.model.ValidationError; import org.sakaiproject.metaobj.utils.xml.SchemaNode; import org.sakaiproject.site.api.Group; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.time.api.Time; import org.sakaiproject.time.api.TimeBreakdown; import org.sakaiproject.time.cover.TimeService; import org.sakaiproject.tool.cover.ToolManager; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.FileItem; import org.sakaiproject.util.FormattedText; import org.sakaiproject.util.ParameterParser; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.util.StringUtil; import org.sakaiproject.util.Validator; import org.sakaiproject.util.Xml; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; /** * <p>ResourceAction is a ContentHosting application</p> * * @author University of Michigan, CHEF Software Development Team * @version $Revision$ */ public class ResourcesAction extends PagedResourceHelperAction // VelocityPortletPaneledAction { /** Resource bundle using current language locale */ private static ResourceLoader rb = new ResourceLoader("content"); private static final Log logger = LogFactory.getLog(ResourcesAction.class); /** Name of state attribute containing a list of opened/expanded collections */ private static final String STATE_EXPANDED_COLLECTIONS = "resources.expanded_collections"; /** Name of state attribute for status of initialization. */ private static final String STATE_INITIALIZED = "resources.initialized"; /** The content hosting service in the State. */ private static final String STATE_CONTENT_SERVICE = "resources.content_service"; /** The content type image lookup service in the State. */ private static final String STATE_CONTENT_TYPE_IMAGE_SERVICE = "resources.content_type_image_service"; /** The resources, helper or dropbox mode. */ public static final String STATE_MODE_RESOURCES = "resources.resources_mode"; /** The resources, helper or dropbox mode. */ public static final String STATE_RESOURCES_HELPER_MODE = "resources.resources_helper_mode"; /** state attribute for the maximum size for file upload */ private static final String STATE_FILE_UPLOAD_MAX_SIZE = "resources.file_upload_max_size"; /** state attribute indicating whether users in current site should be denied option of making resources public */ private static final String STATE_PREVENT_PUBLIC_DISPLAY = "resources.prevent_public_display"; /** The name of a state attribute indicating whether the resources tool/helper is allowed to show all sites the user has access to */ public static final String STATE_SHOW_ALL_SITES = "resources.allow_user_to_see_all_sites"; /** The name of a state attribute indicating whether the wants to see other sites if that is enabled */ public static final String STATE_SHOW_OTHER_SITES = "resources.user_chooses_to_see_other_sites"; /** The user copyright string */ private static final String STATE_MY_COPYRIGHT = "resources.mycopyright"; /** copyright path -- MUST have same value as AccessServlet.COPYRIGHT_PATH */ public static final String COPYRIGHT_PATH = Entity.SEPARATOR + "copyright"; /** The collection id being browsed. */ private static final String STATE_COLLECTION_ID = "resources.collection_id"; /** The id of the "home" collection (can't go up from here.) */ private static final String STATE_HOME_COLLECTION_ID = "resources.collection_home"; /** The display name of the "home" collection (can't go up from here.) */ private static final String STATE_HOME_COLLECTION_DISPLAY_NAME = "resources.collection_home_display_name"; /** The inqualified input field */ private static final String STATE_UNQUALIFIED_INPUT_FIELD = "resources.unqualified_input_field"; /** The collection id path */ private static final String STATE_COLLECTION_PATH = "resources.collection_path"; /** The name of the state attribute containing BrowseItems for all content collections the user has access to */ private static final String STATE_COLLECTION_ROOTS = "resources.collection_rootie_tooties"; /** The sort by */ private static final String STATE_SORT_BY = "resources.sort_by"; /** The sort ascending or decending */ private static final String STATE_SORT_ASC = "resources.sort_asc"; /** The copy flag */ private static final String STATE_COPY_FLAG = "resources.copy_flag"; /** The cut flag */ private static final String STATE_CUT_FLAG = "resources.cut_flag"; /** The can-paste flag */ private static final String STATE_PASTE_ALLOWED_FLAG = "resources.can_paste_flag"; /** The move flag */ private static final String STATE_MOVE_FLAG = "resources.move_flag"; /** The select all flag */ private static final String STATE_SELECT_ALL_FLAG = "resources.select_all_flag"; /** The name of the state attribute indicating whether the hierarchical list is expanded */ private static final String STATE_EXPAND_ALL_FLAG = "resources.expand_all_flag"; /** The name of the state attribute indicating whether the hierarchical list needs to be expanded */ private static final String STATE_NEED_TO_EXPAND_ALL = "resources.need_to_expand_all"; /** The name of the state attribute containing a java.util.Set with the id's of selected items */ private static final String STATE_LIST_SELECTIONS = "resources.ignore_delete_selections"; /** The root of the navigation breadcrumbs for a folder, either the home or another site the user belongs to */ private static final String STATE_NAVIGATION_ROOT = "resources.navigation_root"; /************** the more context *****************************************/ /** The more id */ private static final String STATE_MORE_ID = "resources.more_id"; /** The more collection id */ private static final String STATE_MORE_COLLECTION_ID = "resources.more_collection_id"; /************** the edit context *****************************************/ /** The edit id */ public static final String STATE_EDIT_ID = "resources.edit_id"; public static final String STATE_STACK_EDIT_ID = "resources.stack_edit_id"; public static final String STATE_EDIT_COLLECTION_ID = "resources.stack_edit_collection_id"; public static final String STATE_STACK_EDIT_COLLECTION_ID = "resources.stack_edit_collection_id"; private static final String STATE_EDIT_ALERTS = "resources.edit_alerts"; private static final String STATE_STACK_EDIT_ITEM = "resources.stack_edit_item"; private static final String STATE_STACK_EDIT_INTENT = "resources.stack_edit_intent"; private static final String STATE_SHOW_FORM_ITEMS = "resources.show_form_items"; private static final String STATE_STACK_EDIT_ITEM_TITLE = "resources.stack_title"; /************** the create contexts *****************************************/ public static final String STATE_SUSPENDED_OPERATIONS_STACK = "resources.suspended_operations_stack"; public static final String STATE_SUSPENDED_OPERATIONS_STACK_DEPTH = "resources.suspended_operations_stack_depth"; public static final String STATE_CREATE_TYPE = "resources.create_type"; public static final String STATE_CREATE_COLLECTION_ID = "resources.create_collection_id"; public static final String STATE_CREATE_NUMBER = "resources.create_number"; public static final String STATE_STRUCTOBJ_TYPE = "resources.create_structured_object_type"; public static final String STATE_STRUCTOBJ_TYPE_READONLY = "resources.create_structured_object_type_readonly"; public static final String STATE_STACK_CREATE_TYPE = "resources.stack_create_type"; public static final String STATE_STACK_CREATE_COLLECTION_ID = "resources.stack_create_collection_id"; public static final String STATE_STACK_CREATE_NUMBER = "resources.stack_create_number"; public static final String STATE_STACK_STRUCTOBJ_TYPE = "resources.stack_create_structured_object_type"; public static final String STATE_STACK_STRUCTOBJ_TYPE_READONLY = "resources.stack_create_structured_object_type_readonly"; private static final String STATE_STACK_CREATE_ITEMS = "resources.stack_create_items"; private static final String STATE_STACK_CREATE_ACTUAL_COUNT = "resources.stack_create_actual_count"; private static final String STATE_STACK_STRUCTOBJ_ROOTNAME = "resources.stack_create_structured_object_root"; private static final String STATE_CREATE_ALERTS = "resources.create_alerts"; protected static final String STATE_CREATE_MESSAGE = "resources.create_message"; private static final String STATE_CREATE_MISSING_ITEM = "resources.create_missing_item"; private static final String STATE_STRUCTOBJ_HOMES = "resources.create_structured_object_home"; private static final String STATE_STACK_STRUCT_OBJ_SCHEMA = "resources.stack_create_structured_object_schema"; private static final String MIME_TYPE_DOCUMENT_PLAINTEXT = "text/plain"; private static final String MIME_TYPE_DOCUMENT_HTML = "text/html"; public static final String MIME_TYPE_STRUCTOBJ = "application/x-osp"; public static final String TYPE_FOLDER = "folder"; public static final String TYPE_UPLOAD = "file"; public static final String TYPE_URL = "Url"; public static final String TYPE_FORM = MIME_TYPE_STRUCTOBJ; public static final String TYPE_HTML = MIME_TYPE_DOCUMENT_HTML; public static final String TYPE_TEXT = MIME_TYPE_DOCUMENT_PLAINTEXT; private static final int CREATE_MAX_ITEMS = 10; private static final int INTEGER_WIDGET_LENGTH = 12; private static final int DOUBLE_WIDGET_LENGTH = 18; private static final Pattern INDEXED_FORM_FIELD_PATTERN = Pattern.compile("(.+)\\.(\\d+)"); /************** the metadata extension of edit/create contexts *****************************************/ private static final String STATE_METADATA_GROUPS = "resources.metadata.types"; private static final String INTENT_REVISE_FILE = "revise"; private static final String INTENT_REPLACE_FILE = "replace"; /** State attribute for where there is at least one attachment before invoking attachment tool */ public static final String STATE_HAS_ATTACHMENT_BEFORE = "resources.has_attachment_before"; /** The name of the state attribute containing a list of new items to be attached */ private static final String STATE_HELPER_NEW_ITEMS = "resources.helper_new_items"; /** The name of the state attribute indicating that the list of new items has changed */ private static final String STATE_HELPER_CHANGED = "resources.helper_changed"; /** The name of the optional state attribute indicating the id of the collection that should be treated as the "home" collection */ public static final String STATE_ATTACH_COLLECTION_ID = "resources.attach_collection_id"; /** The name of the state attribute containing the name of the tool that invoked Resources as attachment helper */ public static final String STATE_ATTACH_TOOL_NAME = "resources.attach_tool_name"; /** The name of the state attribute for "new-item" attachment indicating the type of item */ public static final String STATE_ATTACH_TEXT = "resources.attach_text"; /** The name of the state attribute for "new-item" attachment indicating the id of the item to edit */ public static final String STATE_ATTACH_ITEM_ID = "resources.attach_collection_id"; /** The name of the state attribute for "new-item" attachment indicating the id of the form-type if item-type * is TYPE_FORM (ignored otherwise) */ public static final String STATE_ATTACH_FORM_ID = "resources.attach_form_id"; /** The name of the state attribute indicating which form field a resource should be attached to */ public static final String STATE_ATTACH_FORM_FIELD = "resources.attach_form_field"; /************** the helper context (file-picker) *****************************************/ /** * State attribute for the Vector of References, one for each attachment. * Using tools can pre-populate, and can read the results from here. */ public static final String STATE_ATTACHMENTS = "resources.state_attachments"; /** * The name of the state attribute indicating that the file picker should return links to * existing resources in an existing collection rather than copying it to the hidden attachments * area. If this value is not set, all attachments are to copies in the hidden attachments area. */ public static final String STATE_ATTACH_LINKS = "resources.state_attach_links"; /** * The name of the state attribute for the maximum number of items to attach. The attribute value will be an Integer, * usually CARDINALITY_SINGLE or CARDINALITY_MULTIPLE. */ public static final String STATE_ATTACH_CARDINALITY = "resources.state_attach_cardinality"; /** A constant indicating maximum of one item can be attached. */ public static final Integer CARDINALITY_SINGLE = FilePickerHelper.CARDINALITY_SINGLE; /** A constant indicating any the number of attachments is unlimited. */ public static final Integer CARDINALITY_MULTIPLE = FilePickerHelper.CARDINALITY_MULTIPLE; /** * The name of the state attribute for the title when a tool uses Resources as attachment helper (for create or attach but not for edit mode) */ public static final String STATE_ATTACH_TITLE = "resources.state_attach_title_text"; /** * The name of the state attribute for the instructions when a tool uses Resources as attachment helper * (for create or attach but not for edit mode) */ public static final String STATE_ATTACH_INSTRUCTION = "resources.state_attach_instruction_text"; /** * State Attribute for the org.sakaiproject.content.api.ContentResourceFilter * object that the current filter should honor. If this is set to null, then all files will * be selectable and viewable */ public static final String STATE_ATTACH_FILTER = "resources.state_attach_filter"; /** * @deprecated use STATE_ATTACH_TITLE and STATE_ATTACH_INSTRUCTION instead */ public static final String STATE_FROM_TEXT = "attachment.from_text"; /** * the name of the state attribute indicating that the user canceled out of the helper. Is set only if the user canceled out of the helper. */ public static final String STATE_HELPER_CANCELED_BY_USER = "resources.state_attach_canceled_by_user"; /** * The name of the state attribute indicating that dropboxes should be shown as places from which * to select attachments. The value should be a List of user-id's. The file picker will attempt to show * the dropbox for each user whose id is included in the list. If this */ public static final String STATE_ATTACH_SHOW_DROPBOXES = "resources.state_attach_show_dropboxes"; /** * The name of the state attribute indicating that the current user's workspace Resources collection * should be shown as places from which to select attachments. The value should be "true". The file picker will attempt to show * the workspace if this attribute is set to "true". */ public static final String STATE_ATTACH_SHOW_WORKSPACE = "resources.state_attach_show_workspace"; /************** the delete context *****************************************/ /** The delete ids */ private static final String STATE_DELETE_IDS = "resources.delete_ids"; /** The not empty delete ids */ private static final String STATE_NOT_EMPTY_DELETE_IDS = "resource.not_empty_delete_ids"; /** The name of the state attribute containing a list of BrowseItem objects corresponding to resources selected for deletion */ private static final String STATE_DELETE_ITEMS = "resources.delete_items"; /** The name of the state attribute containing a list of BrowseItem objects corresponding to nonempty folders selected for deletion */ private static final String STATE_DELETE_ITEMS_NOT_EMPTY = "resources.delete_items_not_empty"; /** The name of the state attribute containing a list of BrowseItem objects selected for deletion that cannot be deleted */ private static final String STATE_DELETE_ITEMS_CANNOT_DELETE = "resources.delete_items_cannot_delete"; /************** the cut items context *****************************************/ /** The cut item ids */ private static final String STATE_CUT_IDS = "resources.revise_cut_ids"; /************** the copied items context *****************************************/ /** The copied item ids */ private static final String STATE_COPIED_IDS = "resources.revise_copied_ids"; /** The copied item id */ private static final String STATE_COPIED_ID = "resources.revise_copied_id"; /************** the moved items context *****************************************/ /** The copied item ids */ private static final String STATE_MOVED_IDS = "resources.revise_moved_ids"; /** Modes. */ private static final String MODE_LIST = "list"; private static final String MODE_EDIT = "edit"; private static final String MODE_DAV = "webdav"; private static final String MODE_CREATE = "create"; public static final String MODE_HELPER = "helper"; private static final String MODE_DELETE_CONFIRM = "deleteConfirm"; private static final String MODE_MORE = "more"; private static final String MODE_PROPERTIES = "properties"; /** modes for attachment helper */ public static final String MODE_ATTACHMENT_SELECT = "resources.attachment_select"; public static final String MODE_ATTACHMENT_CREATE = "resources.attachment_create"; public static final String MODE_ATTACHMENT_NEW_ITEM = "resources.attachment_new_item"; public static final String MODE_ATTACHMENT_EDIT_ITEM = "resources.attachment_edit_item"; public static final String MODE_ATTACHMENT_CONFIRM = "resources.attachment_confirm"; public static final String MODE_ATTACHMENT_SELECT_INIT = "resources.attachment_select_initialized"; public static final String MODE_ATTACHMENT_CREATE_INIT = "resources.attachment_create_initialized"; public static final String MODE_ATTACHMENT_NEW_ITEM_INIT = "resources.attachment_new_item_initialized"; public static final String MODE_ATTACHMENT_EDIT_ITEM_INIT = "resources.attachment_edit_item_initialized"; public static final String MODE_ATTACHMENT_CONFIRM_INIT = "resources.attachment_confirm_initialized"; public static final String MODE_ATTACHMENT_DONE = "resources.attachment_done"; /** vm files for each mode. */ private static final String TEMPLATE_LIST = "content/chef_resources_list"; private static final String TEMPLATE_EDIT = "content/chef_resources_edit"; private static final String TEMPLATE_CREATE = "content/chef_resources_create"; private static final String TEMPLATE_DAV = "content/chef_resources_webdav"; private static final String TEMPLATE_ITEMTYPE = "content/chef_resources_itemtype"; private static final String TEMPLATE_SELECT = "content/chef_resources_select"; private static final String TEMPLATE_ATTACH = "content/chef_resources_attach"; private static final String TEMPLATE_MORE = "content/chef_resources_more"; private static final String TEMPLATE_DELETE_CONFIRM = "content/chef_resources_deleteConfirm"; private static final String TEMPLATE_PROPERTIES = "content/chef_resources_properties"; // private static final String TEMPLATE_REPLACE = "_replace"; /** the site title */ private static final String STATE_SITE_TITLE = "site_title"; /** copyright related info */ private static final String COPYRIGHT_TYPES = "copyright_types"; private static final String COPYRIGHT_TYPE = "copyright_type"; private static final String DEFAULT_COPYRIGHT = "default_copyright"; private static final String COPYRIGHT_ALERT = "copyright_alert"; private static final String DEFAULT_COPYRIGHT_ALERT = "default_copyright_alert"; private static final String COPYRIGHT_FAIRUSE_URL = "copyright_fairuse_url"; private static final String NEW_COPYRIGHT_INPUT = "new_copyright_input"; private static final String COPYRIGHT_SELF_COPYRIGHT = rb.getString("cpright2"); private static final String COPYRIGHT_NEW_COPYRIGHT = rb.getString("cpright3"); private static final String COPYRIGHT_ALERT_URL = ServerConfigurationService.getAccessUrl() + COPYRIGHT_PATH; /** state attribute indicating whether we're using the Creative Commons dialog instead of the "old" copyright dialog */ protected static final String STATE_USING_CREATIVE_COMMONS = "resources.usingCreativeCommons"; private static final int MAXIMUM_ATTEMPTS_FOR_UNIQUENESS = 100; /** The default value for whether to show all sites in file-picker (used if global value can't be read from server config service) */ public static final boolean SHOW_ALL_SITES_IN_FILE_PICKER = false; /** The default value for whether to show all sites in resources tool (used if global value can't be read from server config service) */ private static final boolean SHOW_ALL_SITES_IN_RESOURCES = false; /** The default value for whether to show all sites in dropbox (used if global value can't be read from server config service) */ private static final boolean SHOW_ALL_SITES_IN_DROPBOX = false; /** The number of members for a collection at which this tool should refuse to expand the collection */ protected static final int EXPANDABLE_FOLDER_SIZE_LIMIT = 256; protected static final String STATE_SHOW_REMOVE_ACTION = "resources.show_remove_action"; protected static final String STATE_SHOW_MOVE_ACTION = "resources.show_move_action"; protected static final String STATE_SHOW_COPY_ACTION = "resources.show_copy_action"; protected static final String STATE_HIGHLIGHTED_ITEMS = "resources.highlighted_items"; /** The default number of site collections per page. */ protected static final int DEFAULT_PAGE_SIZE = 50; protected static final String PARAM_PAGESIZE = "collections_per_page"; protected static final String STATE_TOP_MESSAGE_INDEX = "resources.top_message_index"; protected static final String STATE_REMOVED_ATTACHMENTS = "resources.removed_attachments"; /********* Global constants *********/ /** The null/empty string */ private static final String NULL_STRING = ""; /** The string used when pasting the same resource to the same folder */ private static final String DUPLICATE_STRING = rb.getString("copyof") + " "; /** The string used when pasting shirtcut of the same resource to the same folder */ private static final String SHORTCUT_STRING = rb.getString("shortcut"); /** The copyright character (Note: could be "\u00a9" if we supported UNICODE for specials -ggolden */ private static final String COPYRIGHT_SYMBOL = rb.getString("cpright1"); /** The String of new copyright */ private static final String NEW_COPYRIGHT = "newcopyright"; /** The resource not exist string */ private static final String RESOURCE_NOT_EXIST_STRING = rb.getString("notexist1"); /** The title invalid string */ private static final String RESOURCE_INVALID_TITLE_STRING = rb.getString("titlecannot"); /** The copy, cut, paste not operate on collection string */ private static final String RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING = rb.getString("notsupported"); /** The maximum number of suspended operations that can be on the stack. */ private static final int MAXIMUM_SUSPENDED_OPERATIONS_STACK_DEPTH = 10; /** portlet configuration parameter values**/ public static final String RESOURCES_MODE_RESOURCES = "resources"; public static final String RESOURCES_MODE_DROPBOX = "dropbox"; public static final String RESOURCES_MODE_HELPER = "helper"; /** The from state name */ private static final String STATE_FROM = "resources.from"; private static final String STATE_ENCODING = "resources.encoding"; private static final String DELIM = "@"; /** string used to represent "public" access mode in UI elements */ protected static final String PUBLIC_ACCESS = "public"; /** * Build the context for normal display */ public String buildMainPanelContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("copyright_alert_url", COPYRIGHT_ALERT_URL); String template = null; // place if notification is enabled and current site is not of My Workspace type boolean isUserSite = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("notification", new Boolean(!isUserSite && notificationEnabled(state))); // get the mode String mode = (String) state.getAttribute (STATE_MODE); String helper_mode = (String) state.getAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE); if (!MODE_HELPER.equals(mode) && helper_mode != null) { // not in helper mode, but a helper context is needed // if the mode is not done, defer to the helper context if (!mode.equals(ResourcesAction.MODE_ATTACHMENT_DONE)) { template = ResourcesAction.buildHelperContext(portlet, context, data, state); // template = AttachmentAction.buildHelperContext(portlet, context, runData, sstate); return template; } // clean up state.removeAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE); state.removeAttribute(ResourcesAction.STATE_ATTACHMENTS); } if (mode.equals (MODE_LIST)) { // build the context for add item template = buildListContext (portlet, context, data, state); } else if (mode.equals (MODE_HELPER)) { // build the context for add item template = buildHelperContext (portlet, context, data, state); } else if (mode.equals (MODE_CREATE)) { // build the context for add item template = buildCreateContext (portlet, context, data, state); } else if (mode.equals (MODE_DELETE_CONFIRM)) { // build the context for the basic step of delete confirm page template = buildDeleteConfirmContext (portlet, context, data, state); } else if (mode.equals (MODE_MORE)) { // build the context to display the property list template = buildMoreContext (portlet, context, data, state); } else if (mode.equals (MODE_EDIT)) { // build the context to display the property list template = buildEditContext (portlet, context, data, state); } else if (mode.equals (MODE_OPTIONS)) { template = buildOptionsPanelContext (portlet, context, data, state); } else if(mode.equals(MODE_DAV)) { template = buildWebdavContext (portlet, context, data, state); } return template; } // buildMainPanelContext /** * Build the context for the list view */ public String buildListContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); context.put("expandedCollections", state.getAttribute(STATE_EXPANDED_COLLECTIONS)); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); Set selectedItems = (Set) state.getAttribute(STATE_LIST_SELECTIONS); if(selectedItems == null) { selectedItems = new TreeSet(); state.setAttribute(STATE_LIST_SELECTIONS, selectedItems); } context.put("selectedItems", selectedItems); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); context.put ("service", contentService); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); boolean atHome = false; // %%STATE_MODE_RESOURCES%% boolean dropboxMode = RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES)); if (dropboxMode) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } else { //context.put("dropboxMode", Boolean.FALSE); } // make sure the channedId is set String collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); context.put ("collectionId", collectionId); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String siteTitle = (String) state.getAttribute (STATE_SITE_TITLE); if (collectionId.equals(homeCollectionId)) { atHome = true; context.put ("collectionDisplayName", state.getAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME)); } else { // should be not PermissionException thrown at this time, when the user can successfully navigate to this collection try { context.put("collectionDisplayName", contentService.getCollection(collectionId).getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME)); } catch (IdUnusedException e){} catch (TypeException e) {} catch (PermissionException e) {} } if(!inMyWorkspace && !dropboxMode && atHome && SiteService.allowUpdateSite(ToolManager.getCurrentPlacement().getContext())) { context.put("showPermissions", Boolean.TRUE.toString()); //buildListMenu(portlet, context, data, state); } context.put("atHome", Boolean.toString(atHome)); List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); context.put ("currentSortedBy", sortedBy); context.put ("currentSortAsc", sortedAsc); context.put("TRUE", Boolean.TRUE.toString()); boolean showRemoveAction = false; boolean showMoveAction = false; boolean showCopyAction = false; Set highlightedItems = new TreeSet(); try { try { contentService.checkCollection (collectionId); context.put ("collectionFlag", Boolean.TRUE.toString()); } catch(IdUnusedException ex) { logger.warn(this + "IdUnusedException: " + collectionId); try { ContentCollectionEdit coll = contentService.addCollection(collectionId); contentService.commitCollection(coll); } catch(IdUsedException inner) { // how can this happen?? logger.warn(this + "IdUsedException: " + collectionId); throw ex; } catch(IdInvalidException inner) { logger.warn(this + "IdInvalidException: " + collectionId); // what now? throw ex; } catch(InconsistentException inner) { logger.warn(this + "InconsistentException: " + collectionId); // what now? throw ex; } } catch(TypeException ex) { logger.warn(this + "TypeException."); throw ex; } catch(PermissionException ex) { logger.warn(this + "PermissionException."); throw ex; } String copyFlag = (String) state.getAttribute (STATE_COPY_FLAG); if (copyFlag.equals (Boolean.TRUE.toString())) { context.put ("copyFlag", copyFlag); List copiedItems = (List) state.getAttribute(STATE_COPIED_IDS); // context.put ("copiedItem", state.getAttribute (STATE_COPIED_ID)); highlightedItems.addAll(copiedItems); // context.put("copiedItems", copiedItems); } String moveFlag = (String) state.getAttribute (STATE_MOVE_FLAG); if (moveFlag.equals (Boolean.TRUE.toString())) { context.put ("moveFlag", moveFlag); List movedItems = (List) state.getAttribute(STATE_MOVED_IDS); highlightedItems.addAll(movedItems); // context.put ("copiedItem", state.getAttribute (STATE_COPIED_ID)); // context.put("movedItems", movedItems); } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); List all_roots = new Vector(); List this_site = new Vector(); List members = getBrowseItems(collectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, navRoot.equals(homeCollectionId), state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); showRemoveAction = showRemoveAction || root.hasDeletableChildren(); showMoveAction = showMoveAction || root.hasDeletableChildren(); showCopyAction = showCopyAction || root.hasCopyableChildren(); if(atHome && dropboxMode) { root.setName(siteTitle + " " + rb.getString("gen.drop")); } else if(atHome) { root.setName(siteTitle + " " + rb.getString("gen.reso")); } context.put("site", root); root.addMembers(members); this_site.add(root); all_roots.add(root); } context.put ("this_site", this_site); boolean show_all_sites = false; List other_sites = new Vector(); String allowed_to_see_other_sites = (String) state.getAttribute(STATE_SHOW_ALL_SITES); String show_other_sites = (String) state.getAttribute(STATE_SHOW_OTHER_SITES); context.put("show_other_sites", show_other_sites); if(Boolean.TRUE.toString().equals(allowed_to_see_other_sites)) { context.put("allowed_to_see_other_sites", Boolean.TRUE.toString()); show_all_sites = Boolean.TRUE.toString().equals(show_other_sites); } if(atHome && show_all_sites) { state.setAttribute(STATE_HIGHLIGHTED_ITEMS, highlightedItems); // TODO: see call to prepPage below. That also calls readAllResources. Are both calls necessary? other_sites.addAll(readAllResources(state)); all_roots.addAll(other_sites); List messages = prepPage(state); context.put("other_sites", messages); if (state.getAttribute(STATE_NUM_MESSAGES) != null) { context.put("allMsgNumber", state.getAttribute(STATE_NUM_MESSAGES).toString()); context.put("allMsgNumberInt", state.getAttribute(STATE_NUM_MESSAGES)); } context.put("pagesize", ((Integer) state.getAttribute(STATE_PAGESIZE)).toString()); // find the position of the message that is the top first on the page if ((state.getAttribute(STATE_TOP_MESSAGE_INDEX) != null) && (state.getAttribute(STATE_PAGESIZE) != null)) { int topMsgPos = ((Integer)state.getAttribute(STATE_TOP_MESSAGE_INDEX)).intValue() + 1; context.put("topMsgPos", Integer.toString(topMsgPos)); int btmMsgPos = topMsgPos + ((Integer)state.getAttribute(STATE_PAGESIZE)).intValue() - 1; if (state.getAttribute(STATE_NUM_MESSAGES) != null) { int allMsgNumber = ((Integer)state.getAttribute(STATE_NUM_MESSAGES)).intValue(); if (btmMsgPos > allMsgNumber) btmMsgPos = allMsgNumber; } context.put("btmMsgPos", Integer.toString(btmMsgPos)); } boolean goPPButton = state.getAttribute(STATE_PREV_PAGE_EXISTS) != null; context.put("goPPButton", Boolean.toString(goPPButton)); boolean goNPButton = state.getAttribute(STATE_NEXT_PAGE_EXISTS) != null; context.put("goNPButton", Boolean.toString(goNPButton)); /* boolean goFPButton = state.getAttribute(STATE_FIRST_PAGE_EXISTS) != null; context.put("goFPButton", Boolean.toString(goFPButton)); boolean goLPButton = state.getAttribute(STATE_LAST_PAGE_EXISTS) != null; context.put("goLPButton", Boolean.toString(goLPButton)); */ context.put("pagesize", state.getAttribute(STATE_PAGESIZE)); // context.put("pagesizes", PAGESIZES); } // context.put ("other_sites", other_sites); state.setAttribute(STATE_COLLECTION_ROOTS, all_roots); // context.put ("root", root); if(state.getAttribute(STATE_PASTE_ALLOWED_FLAG) != null) { context.put("paste_place_showing", state.getAttribute(STATE_PASTE_ALLOWED_FLAG)); } if(showRemoveAction) { context.put("showRemoveAction", Boolean.TRUE.toString()); } if(showMoveAction) { context.put("showMoveAction", Boolean.TRUE.toString()); } if(showCopyAction) { context.put("showCopyAction", Boolean.TRUE.toString()); } } catch (IdUnusedException e) { addAlert(state, rb.getString("cannotfind")); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(TypeException e) { logger.warn(this + "TypeException."); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(PermissionException e) { addAlert(state, rb.getString("notpermis1")); context.put ("collectionFlag", Boolean.FALSE.toString()); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", contentService.newResourceProperties ()); try { // TODO: why 'site' here? Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); context.put("siteTitle", site.getTitle()); } catch (IdUnusedException e) { // logger.warn(this + e.toString()); } context.put("expandallflag", state.getAttribute(STATE_EXPAND_ALL_FLAG)); state.removeAttribute(STATE_NEED_TO_EXPAND_ALL); // inform the observing courier that we just updated the page... // if there are pending requests to do so they can be cleared justDelivered(state); // pick the "show" template based on the standard template name // String template = (String) getContext(data).get("template"); return TEMPLATE_LIST; } // buildListContext /** * Build the context for the helper view */ public static String buildHelperContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { if(state.getAttribute(STATE_INITIALIZED) == null) { initStateAttributes(state, portlet); if(state.getAttribute(ResourcesAction.STATE_HELPER_CANCELED_BY_USER) != null) { state.removeAttribute(ResourcesAction.STATE_HELPER_CANCELED_BY_USER); } } String mode = (String) state.getAttribute(STATE_MODE); if(state.getAttribute(STATE_MODE_RESOURCES) == null && MODE_HELPER.equals(mode)) { state.setAttribute(ResourcesAction.STATE_MODE_RESOURCES, ResourcesAction.MODE_HELPER); } Set selectedItems = (Set) state.getAttribute(STATE_LIST_SELECTIONS); if(selectedItems == null) { selectedItems = new TreeSet(); state.setAttribute(STATE_LIST_SELECTIONS, selectedItems); } context.put("selectedItems", selectedItems); String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); boolean need_to_push = false; if(MODE_ATTACHMENT_SELECT.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_SELECT_INIT; } else if(MODE_ATTACHMENT_CREATE.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_CREATE_INIT; } else if(MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_NEW_ITEM_INIT; } else if(MODE_ATTACHMENT_EDIT_ITEM.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_EDIT_ITEM_INIT; } Map current_stack_frame = null; if(need_to_push) { current_stack_frame = pushOnStack(state); current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); if(MODE_ATTACHMENT_EDIT_ITEM_INIT.equals(helper_mode)) { String attachmentId = (String) state.getAttribute(STATE_EDIT_ID); if(attachmentId != null) { current_stack_frame.put(STATE_STACK_EDIT_ID, attachmentId); String collectionId = ContentHostingService.getContainingCollectionId(attachmentId); current_stack_frame.put(STATE_STACK_EDIT_COLLECTION_ID, collectionId); EditItem item = getEditItem(attachmentId, collectionId, data); if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } } } else { List attachments = (List) state.getAttribute(STATE_ATTACHMENTS); if(attachments == null) { attachments = EntityManager.newReferenceList(); } List attached = new Vector(); Iterator it = attachments.iterator(); while(it.hasNext()) { try { Reference ref = (Reference) it.next(); String itemId = ref.getId(); ResourceProperties properties = ref.getProperties(); String displayName = properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String containerId = ref.getContainer(); String accessUrl = ContentHostingService.getUrl(itemId); String contentType = properties.getProperty(ResourceProperties.PROP_CONTENT_TYPE); AttachItem item = new AttachItem(itemId, displayName, containerId, accessUrl); item.setContentType(contentType); attached.add(item); } catch(Exception ignore) {} } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, attached); } } else { current_stack_frame = peekAtStack(state); if(current_stack_frame.get(STATE_STACK_EDIT_INTENT) == null) { current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); } } if(helper_mode == null) { helper_mode = (String) current_stack_frame.get(STATE_RESOURCES_HELPER_MODE); } else { current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); } String helper_title = (String) current_stack_frame.get(STATE_ATTACH_TITLE); if(helper_title == null) { helper_title = (String) state.getAttribute(STATE_ATTACH_TITLE); if(helper_title != null) { current_stack_frame.put(STATE_ATTACH_TITLE, helper_title); } } if(helper_title != null) { context.put("helper_title", helper_title); } String helper_instruction = (String) current_stack_frame.get(STATE_ATTACH_INSTRUCTION); if(helper_instruction == null) { helper_instruction = (String) state.getAttribute(STATE_ATTACH_INSTRUCTION); if(helper_instruction != null) { current_stack_frame.put(STATE_ATTACH_INSTRUCTION, helper_instruction); } } if(helper_instruction != null) { context.put("helper_instruction", helper_instruction); } String title = (String) current_stack_frame.get(STATE_STACK_EDIT_ITEM_TITLE); if(title == null) { title = (String) state.getAttribute(STATE_ATTACH_TEXT); if(title != null) { current_stack_frame.put(STATE_STACK_EDIT_ITEM_TITLE, title); } } if(title != null && title.trim().length() > 0) { context.put("helper_subtitle", title); } String template = null; if(MODE_ATTACHMENT_SELECT_INIT.equals(helper_mode)) { template = buildSelectAttachmentContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_CREATE_INIT.equals(helper_mode)) { template = buildCreateContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { template = buildItemTypeContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_EDIT_ITEM_INIT.equals(helper_mode)) { template = buildEditContext(portlet, context, data, state); } return template; } public static String buildItemTypeContext(VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); initStateAttributes(state, portlet); Map current_stack_frame = peekAtStack(state); String mode = (String) state.getAttribute(STATE_MODE); if(mode == null || mode.trim().length() == 0) { mode = MODE_HELPER; state.setAttribute(STATE_MODE, mode); } String helper_mode = null; if(MODE_HELPER.equals(mode)) { helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode == null || helper_mode.trim().length() == 0) { helper_mode = MODE_ATTACHMENT_NEW_ITEM; state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); } current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); if(MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { context.put("attaching_this_item", Boolean.TRUE.toString()); } state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); } String msg = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (msg != null) { context.put("itemAlertMessage", msg); state.removeAttribute(STATE_CREATE_MESSAGE); } context.put("max_upload_size", state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE)); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } context.put("collectionId", collectionId); String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || "".equals(itemType)) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || "".equals(itemType)) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } context.put("itemType", itemType); Integer numberOfItems = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(numberOfItems == null) { numberOfItems = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, numberOfItems); } if(numberOfItems == null) { numberOfItems = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, numberOfItems); } context.put("numberOfItems", numberOfItems); context.put("max_number", new Integer(1)); Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); // TODO: does this method filter groups for this subcollection?? if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } Site site; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); } catch (IdUnusedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } String encoding = data.getRequest().getCharacterEncoding(); List inherited_access_groups = new Vector(); AccessMode inherited_access = AccessMode.INHERITED; try { ContentCollection parent = ContentHostingService.getCollection(collectionId); inherited_access = parent.getInheritedAccess(); inherited_access_groups.addAll(parent.getInheritedGroups()); } catch (IdUnusedException e) { } catch (TypeException e) { } catch (PermissionException e) { } boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); } context.put("new_items", new_items); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); String show_form_items = (String) current_stack_frame.get(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("TYPE_HTML", TYPE_HTML); context.put("TYPE_TEXT", TYPE_TEXT); context.put("TYPE_URL", TYPE_URL); context.put("TYPE_FORM", TYPE_FORM); // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); if(TYPE_FORM.equals(itemType)) { List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } context.put("formtype", formtype); String formtype_readonly = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = Boolean.FALSE.toString(); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE_READONLY, formtype_readonly); } if(formtype_readonly != null && formtype_readonly.equals(Boolean.TRUE.toString())) { context.put("formtype_readonly", formtype_readonly); } String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("DOT", ResourcesMetadata.DOT); } return TEMPLATE_ITEMTYPE; } /** * Access the top item on the suspended-operations stack * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The top item on the stack, or null if the stack is empty. */ private static Map peekAtStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(! operations_stack.isEmpty()) { current_stack_frame = (Map) operations_stack.peek(); } return current_stack_frame; } /** * Returns true if the suspended operations stack contains no elements. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return true if the suspended operations stack contains no elements */ private static boolean isStackEmpty(SessionState state) { Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } return operations_stack.isEmpty(); } /** * Push an item of the suspended-operations stack. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The new item that has just been added to the stack, or null if depth limit is exceeded. */ private static Map pushOnStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(operations_stack.size() < MAXIMUM_SUSPENDED_OPERATIONS_STACK_DEPTH) { current_stack_frame = (Map) operations_stack.push(new Hashtable()); } Object helper_mode = state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null) { current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); } return current_stack_frame; } /** * Remove and return the top item from the suspended-operations stack. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The item that has just been removed from the stack, or null if the stack was empty. */ private static Map popFromStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(! operations_stack.isEmpty()) { current_stack_frame = (Map) operations_stack.pop(); if(operations_stack.isEmpty()) { String canceled = (String) current_stack_frame.get(STATE_HELPER_CANCELED_BY_USER); if(canceled != null) { state.setAttribute(STATE_HELPER_CANCELED_BY_USER, canceled); } } } return current_stack_frame; } private static void resetCurrentMode(SessionState state) { String mode = (String) state.getAttribute(STATE_MODE); if(isStackEmpty(state)) { if(MODE_HELPER.equals(mode)) { cleanupState(state); state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_DONE); } else { state.setAttribute(STATE_MODE, MODE_LIST); state.removeAttribute(STATE_RESOURCES_HELPER_MODE); } return; } Map current_stack_frame = peekAtStack(state); String helper_mode = (String) current_stack_frame.get(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); } } /** * Build the context for selecting attachments */ public static String buildSelectAttachmentContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); initStateAttributes(state, portlet); Map current_stack_frame = peekAtStack(state); if(current_stack_frame == null) { current_stack_frame = pushOnStack(state); } state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); Set highlightedItems = new TreeSet(); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } context.put("attached", new_items); context.put("last", new Integer(new_items.size() - 1)); Integer max_cardinality = (Integer) current_stack_frame.get(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = (Integer) state.getAttribute(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = CARDINALITY_MULTIPLE; } current_stack_frame.put(STATE_ATTACH_CARDINALITY, max_cardinality); } context.put("max_cardinality", max_cardinality); if(new_items.size() >= max_cardinality.intValue()) { context.put("disable_attach_links", Boolean.TRUE.toString()); } if(state.getAttribute(STATE_HELPER_CHANGED) != null) { context.put("list_has_changed", "true"); } String form_field = (String) current_stack_frame.get(ResourcesAction.STATE_ATTACH_FORM_FIELD); if(form_field == null) { form_field = (String) state.getAttribute(ResourcesAction.STATE_ATTACH_FORM_FIELD); if(form_field != null) { current_stack_frame.put(ResourcesAction.STATE_ATTACH_FORM_FIELD, form_field); state.removeAttribute(ResourcesAction.STATE_ATTACH_FORM_FIELD); } } // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // context.put ("service", contentService); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); // context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); boolean atHome = false; // %%STATE_MODE_RESOURCES%% boolean dropboxMode = RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES)); // make sure the channedId is set String collectionId = (String) state.getAttribute(STATE_ATTACH_COLLECTION_ID); if(collectionId == null) { collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); } context.put ("collectionId", collectionId); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String siteTitle = (String) state.getAttribute (STATE_SITE_TITLE); if (collectionId.equals(homeCollectionId)) { atHome = true; //context.put ("collectionDisplayName", state.getAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME)); } else { /* // should be not PermissionException thrown at this time, when the user can successfully navigate to this collection try { context.put("collectionDisplayName", contentService.getCollection(collectionId).getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME)); } catch (IdUnusedException e){} catch (TypeException e) {} catch (PermissionException e) {} */ } List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); context.put ("currentSortedBy", sortedBy); context.put ("currentSortAsc", sortedAsc); context.put("TRUE", Boolean.TRUE.toString()); // String current_user_id = UserDirectoryService.getCurrentUser().getId(); try { try { contentService.checkCollection (collectionId); context.put ("collectionFlag", Boolean.TRUE.toString()); } catch(IdUnusedException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : IdUnusedException: " + collectionId); try { ContentCollectionEdit coll = contentService.addCollection(collectionId); contentService.commitCollection(coll); } catch(IdUsedException inner) { // how can this happen?? logger.warn("ResourcesAction.buildSelectAttachment (static) : IdUsedException: " + collectionId); throw ex; } catch(IdInvalidException inner) { logger.warn("ResourcesAction.buildSelectAttachment (static) : IdInvalidException: " + collectionId); // what now? throw ex; } catch(InconsistentException inner) { logger.warn("ResourcesAction.buildSelectAttachment (static) : InconsistentException: " + collectionId); // what now? throw ex; } } catch(TypeException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : TypeException."); throw ex; } catch(PermissionException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : PermissionException."); throw ex; } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); List this_site = new Vector(); User[] submitters = (User[]) state.getAttribute(STATE_ATTACH_SHOW_DROPBOXES); if(submitters != null) { String dropboxId = ContentHostingService.getDropboxCollection(); if(dropboxId == null) { ContentHostingService.createDropboxCollection(); dropboxId = ContentHostingService.getDropboxCollection(); } if(dropboxId == null) { // do nothing } else if(ContentHostingService.isDropboxMaintainer()) { for(int i = 0; i < submitters.length; i++) { User submitter = submitters[i]; String dbId = dropboxId + StringUtil.trimToZero(submitter.getId()) + "/"; try { ContentCollection db = ContentHostingService.getCollection(dbId); expandedCollections.put(dbId, db); List dbox = getBrowseItems(dbId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(dbox != null && dbox.size() > 0) { BrowseItem root = (BrowseItem) dbox.remove(0); // context.put("site", root); root.setName(submitter.getDisplayName() + " " + rb.getString("gen.drop")); root.addMembers(dbox); this_site.add(root); } } catch(IdUnusedException e) { // ignore a user's dropbox if it's not defined } } } else { try { ContentCollection db = ContentHostingService.getCollection(dropboxId); expandedCollections.put(dropboxId, db); List dbox = getBrowseItems(dropboxId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(dbox != null && dbox.size() > 0) { BrowseItem root = (BrowseItem) dbox.remove(0); // context.put("site", root); root.setName(ContentHostingService.getDropboxDisplayName()); root.addMembers(dbox); this_site.add(root); } } catch(IdUnusedException e) { // if an id is unused, ignore it } } } List members = getBrowseItems(collectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, navRoot.equals(homeCollectionId), state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); if(atHome && dropboxMode) { root.setName(siteTitle + " " + rb.getString("gen.drop")); } else if(atHome) { root.setName(siteTitle + " " + rb.getString("gen.reso")); } context.put("site", root); root.addMembers(members); this_site.add(root); } context.put ("this_site", this_site); List other_sites = new Vector(); boolean show_all_sites = false; String allowed_to_see_other_sites = (String) state.getAttribute(STATE_SHOW_ALL_SITES); String show_other_sites = (String) state.getAttribute(STATE_SHOW_OTHER_SITES); context.put("show_other_sites", show_other_sites); if(Boolean.TRUE.toString().equals(allowed_to_see_other_sites)) { context.put("allowed_to_see_other_sites", Boolean.TRUE.toString()); show_all_sites = Boolean.TRUE.toString().equals(show_other_sites); } if(show_all_sites) { state.setAttribute(STATE_HIGHLIGHTED_ITEMS, highlightedItems); other_sites.addAll(readAllResources(state)); List messages = prepPage(state); context.put("other_sites", messages); if (state.getAttribute(STATE_NUM_MESSAGES) != null) { context.put("allMsgNumber", state.getAttribute(STATE_NUM_MESSAGES).toString()); context.put("allMsgNumberInt", state.getAttribute(STATE_NUM_MESSAGES)); } context.put("pagesize", ((Integer) state.getAttribute(STATE_PAGESIZE)).toString()); // find the position of the message that is the top first on the page if ((state.getAttribute(STATE_TOP_MESSAGE_INDEX) != null) && (state.getAttribute(STATE_PAGESIZE) != null)) { int topMsgPos = ((Integer)state.getAttribute(STATE_TOP_MESSAGE_INDEX)).intValue() + 1; context.put("topMsgPos", Integer.toString(topMsgPos)); int btmMsgPos = topMsgPos + ((Integer)state.getAttribute(STATE_PAGESIZE)).intValue() - 1; if (state.getAttribute(STATE_NUM_MESSAGES) != null) { int allMsgNumber = ((Integer)state.getAttribute(STATE_NUM_MESSAGES)).intValue(); if (btmMsgPos > allMsgNumber) btmMsgPos = allMsgNumber; } context.put("btmMsgPos", Integer.toString(btmMsgPos)); } boolean goPPButton = state.getAttribute(STATE_PREV_PAGE_EXISTS) != null; context.put("goPPButton", Boolean.toString(goPPButton)); boolean goNPButton = state.getAttribute(STATE_NEXT_PAGE_EXISTS) != null; context.put("goNPButton", Boolean.toString(goNPButton)); /* boolean goFPButton = state.getAttribute(STATE_FIRST_PAGE_EXISTS) != null; context.put("goFPButton", Boolean.toString(goFPButton)); boolean goLPButton = state.getAttribute(STATE_LAST_PAGE_EXISTS) != null; context.put("goLPButton", Boolean.toString(goLPButton)); */ context.put("pagesize", state.getAttribute(STATE_PAGESIZE)); // context.put("pagesizes", PAGESIZES); // List other_sites = new Vector(); /* * NOTE: This does not (and should not) get all sites for admin. * Getting all sites for admin is too big a request and * would result in too big a display to render in html. */ /* Map othersites = ContentHostingService.getCollectionMap(); Iterator siteIt = othersites.keySet().iterator(); while(siteIt.hasNext()) { String displayName = (String) siteIt.next(); String collId = (String) othersites.get(displayName); if(! collectionId.equals(collId)) { members = getBrowseItems(collId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); root.addMembers(members); root.setName(displayName); other_sites.add(root); } } } context.put ("other_sites", other_sites); */ } // context.put ("root", root); context.put("expandedCollections", expandedCollections); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); } catch (IdUnusedException e) { addAlert(state, rb.getString("cannotfind")); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(TypeException e) { // logger.warn(this + "TypeException."); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(PermissionException e) { addAlert(state, rb.getString("notpermis1")); context.put ("collectionFlag", Boolean.FALSE.toString()); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", contentService.newResourceProperties ()); try { // TODO: why 'site' here? Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); context.put("siteTitle", site.getTitle()); } catch (IdUnusedException e) { // logger.warn(this + e.toString()); } context.put("expandallflag", state.getAttribute(STATE_EXPAND_ALL_FLAG)); state.removeAttribute(STATE_NEED_TO_EXPAND_ALL); // inform the observing courier that we just updated the page... // if there are pending requests to do so they can be cleared // justDelivered(state); // pick the template based on whether client wants links or copies String template = TEMPLATE_SELECT; Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { // user wants copies in hidden attachments area template = TEMPLATE_ATTACH; } return template; } // buildSelectAttachmentContext /** * Expand all the collection resources and put in EXPANDED_COLLECTIONS attribute. */ public void doList ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute (STATE_MODE, MODE_LIST); } // doList /** * Build the context for add display */ public String buildWebdavContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); context.put("server_url", ServerConfigurationService.getServerUrl()); context.put("site_id", ToolManager.getCurrentPlacement().getContext()); context.put("site_title", state.getAttribute(STATE_SITE_TITLE)); context.put("user_id", UserDirectoryService.getCurrentUser().getId()); context.put ("dav_group", "/dav/group/"); context.put ("dav_user", "/dav/user/"); String webdav_instructions = ServerConfigurationService.getString("webdav.instructions.url"); context.put("webdav_instructions" ,webdav_instructions); // TODO: get browser id from somewhere. //Session session = SessionManager.getCurrentSession(); //String browserId = session.; String browserID = UsageSessionService.getSession().getBrowserId(); if(browserID.equals(UsageSession.WIN_IE)) { context.put("isWinIEBrowser", Boolean.TRUE.toString()); } return TEMPLATE_DAV; } // buildWebdavContext /** * Build the context for delete confirmation page */ public String buildDeleteConfirmContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("collectionId", state.getAttribute (STATE_COLLECTION_ID) ); //%%%% FIXME context.put ("collectionPath", state.getAttribute (STATE_COLLECTION_PATH)); List deleteItems = (List) state.getAttribute(STATE_DELETE_ITEMS); List nonEmptyFolders = (List) state.getAttribute(STATE_DELETE_ITEMS_NOT_EMPTY); context.put ("deleteItems", deleteItems); Iterator it = nonEmptyFolders.iterator(); while(it.hasNext()) { BrowseItem folder = (BrowseItem) it.next(); addAlert(state, rb.getString("folder2") + " " + folder.getName() + " " + rb.getString("contain2") + " "); } // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("service", state.getAttribute (STATE_CONTENT_SERVICE)); // %%STATE_MODE_RESOURCES%% //not show the public option when in dropbox mode if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // not show the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", ContentHostingService.newResourceProperties ()); // String template = (String) getContext(data).get("template"); return TEMPLATE_DELETE_CONFIRM; } // buildDeleteConfirmContext /** * Build the context to show the list of resource properties */ public static String buildMoreContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService service = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); context.put ("service", service); Map current_stack_frame = peekAtStack(state); String id = (String) current_stack_frame.get(STATE_MORE_ID); context.put ("id", id); String collectionId = (String) current_stack_frame.get(STATE_MORE_COLLECTION_ID); context.put ("collectionId", collectionId); String homeCollectionId = (String) (String) state.getAttribute (STATE_HOME_COLLECTION_ID); context.put("homeCollectionId", homeCollectionId); List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); EditItem item = getEditItem(id, collectionId, data); context.put("item", item); // for the resources of type URL or plain text, show the content also try { ResourceProperties properties = service.getProperties (id); context.put ("properties", properties); String isCollection = properties.getProperty (ResourceProperties.PROP_IS_COLLECTION); if ((isCollection != null) && isCollection.equals (Boolean.FALSE.toString())) { String copyrightAlert = properties.getProperty(properties.getNamePropCopyrightAlert()); context.put("hasCopyrightAlert", copyrightAlert); String type = properties.getProperty (ResourceProperties.PROP_CONTENT_TYPE); if (type.equalsIgnoreCase (MIME_TYPE_DOCUMENT_PLAINTEXT) || type.equalsIgnoreCase (MIME_TYPE_DOCUMENT_HTML) || type.equalsIgnoreCase (ResourceProperties.TYPE_URL)) { ContentResource moreResource = service.getResource (id); // read the body String body = ""; byte[] content = null; try { content = moreResource.getContent(); if (content != null) { body = new String(content); } } catch(ServerOverloadException e) { // this represents server's file system is temporarily unavailable // report problem to user? log problem? } context.put ("content", body); } // if } // if else { // setup for quota - ADMIN only, collection only if (SecurityService.isSuperUser()) { try { // Getting the quota as a long validates the property long quota = properties.getLongProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); context.put("hasQuota", Boolean.TRUE); context.put("quota", properties.getProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA)); } catch (Exception any) {} } } } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); context.put("notExistFlag", new Boolean(true)); } catch (TypeException e) { addAlert(state, rb.getString("typeex") + " "); } catch (PermissionException e) { addAlert(state," " + rb.getString("notpermis2") + " " + id + ". "); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { context.put("notExistFlag", new Boolean(false)); } if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } else { context.put("dropboxMode", Boolean.FALSE); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } context.put("preventPublicDisplay", preventPublicDisplay); if(preventPublicDisplay.equals(Boolean.FALSE)) { // find out about pubview boolean pubview = ContentHostingService.isInheritingPubView(id); if (!pubview) pubview = ContentHostingService.isPubView(id); context.put("pubview", new Boolean(pubview)); } } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); if (state.getAttribute(COPYRIGHT_TYPES) != null) { List copyrightTypes = (List) state.getAttribute(COPYRIGHT_TYPES); context.put("copyrightTypes", copyrightTypes); } metadataGroupsIntoContext(state, context); // String template = (String) getContext(data).get("template"); return TEMPLATE_MORE; } // buildMoreContext /** * Build the context to edit the editable list of resource properties */ public static String buildEditContext (VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service Map current_stack_frame = peekAtStack(state); context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("from", state.getAttribute (STATE_FROM)); context.put ("mycopyright", (String) state.getAttribute (STATE_MY_COPYRIGHT)); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); String collectionId = (String) current_stack_frame.get(STATE_STACK_EDIT_COLLECTION_ID); context.put ("collectionId", collectionId); String id = (String) current_stack_frame.get(STATE_STACK_EDIT_ID); if(id == null) { id = (String) state.getAttribute(STATE_EDIT_ID); if(id == null) { id = ""; } current_stack_frame.put(STATE_STACK_EDIT_ID, id); } context.put ("id", id); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); if(homeCollectionId == null) { homeCollectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); state.setAttribute(STATE_HOME_COLLECTION_ID, homeCollectionId); } context.put("homeCollectionId", homeCollectionId); List collectionPath = getCollectionPath(state); context.put ("collectionPath", collectionPath); if(homeCollectionId.equals(id)) { context.put("atHome", Boolean.TRUE.toString()); } String intent = (String) current_stack_frame.get(STATE_STACK_EDIT_INTENT); if(intent == null) { intent = INTENT_REVISE_FILE; current_stack_frame.put(STATE_STACK_EDIT_INTENT, intent); } context.put("intent", intent); context.put("REVISE", INTENT_REVISE_FILE); context.put("REPLACE", INTENT_REPLACE_FILE); String show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); // TODO: does this method filter groups for this subcollection?? if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); context.put("theGroupsInThisSite", groups); } // put the item into context EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); if(item == null) { item = getEditItem(id, collectionId, data); if(item == null) { // what?? } if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } } item.setPossibleGroups(groups); context.put("item", item); if(item.isStructuredArtifact()) { context.put("formtype", item.getFormtype()); current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, item.getFormtype()); List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { ResourcesAction.setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype_readonly = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = Boolean.FALSE.toString(); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE_READONLY, formtype_readonly); } if(formtype_readonly != null && formtype_readonly.equals(Boolean.TRUE.toString())) { context.put("formtype_readonly", formtype_readonly); } String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("TRUE", Boolean.TRUE.toString()); } // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); // String template = (String) getContext(data).get("template"); return TEMPLATE_EDIT; } // buildEditContext /** * Navigate in the resource hireachy */ public static void doNavigate ( RunData data ) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); if (state.getAttribute (STATE_SELECT_ALL_FLAG)!=null && state.getAttribute (STATE_SELECT_ALL_FLAG).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } if (state.getAttribute (STATE_EXPAND_ALL_FLAG)!=null && state.getAttribute (STATE_EXPAND_ALL_FLAG).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); } // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String collectionId = data.getParameters().getString ("collectionId"); String navRoot = data.getParameters().getString("navRoot"); state.setAttribute(STATE_NAVIGATION_ROOT, navRoot); // the exception message try { ContentHostingService.checkCollection(collectionId); } catch(PermissionException e) { addAlert(state, " " + rb.getString("notpermis3") + " " ); } catch (IdUnusedException e) { addAlert(state, " " + rb.getString("notexist2") + " "); } catch (TypeException e) { addAlert(state," " + rb.getString("notexist2") + " "); } if (state.getAttribute(STATE_MESSAGE) == null) { String oldCollectionId = (String) state.getAttribute(STATE_COLLECTION_ID); // update this folder id in the set to be event-observed removeObservingPattern(oldCollectionId, state); addObservingPattern(collectionId, state); state.setAttribute(STATE_COLLECTION_ID, collectionId); state.setAttribute(STATE_EXPANDED_COLLECTIONS, new HashMap()); } } // doNavigate /** * Show information about WebDAV */ public void doShow_webdav ( RunData data ) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); state.setAttribute (STATE_MODE, MODE_DAV); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } } // doShow_webdav /** * initiate creation of one or more resource items (folders, file uploads, html docs, text docs, or urls) * default type is folder */ public static void doCreate(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemType = params.getString("itemType"); if(itemType == null || "".equals(itemType)) { itemType = TYPE_UPLOAD; } String stackOp = params.getString("suspended-operations-stack"); Map current_stack_frame = null; if(stackOp != null && stackOp.equals("peek")) { current_stack_frame = peekAtStack(state); } else { current_stack_frame = pushOnStack(state); } String encoding = data.getRequest().getCharacterEncoding(); String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); String collectionId = params.getString ("collectionId"); current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); List new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, new Integer(1)); state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); current_stack_frame.put(STATE_CREATE_MISSING_ITEM, new HashSet()); current_stack_frame.remove(STATE_STACK_STRUCTOBJ_TYPE); current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_CREATE_INIT); state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_CREATE_INIT); } // doCreate protected static List newEditItems(String collectionId, String itemtype, String encoding, String defaultCopyrightStatus, boolean preventPublicDisplay, int number) { List new_items = new Vector(); ContentCollection collection = null; AccessMode inheritedAccess = AccessMode.INHERITED; // Collection inheritedGroups = new Vector(); try { collection = ContentHostingService.getCollection(collectionId); inheritedAccess = collection.getAccess(); // inheritedGroups = collection.getGroups(); if(AccessMode.INHERITED == inheritedAccess) { inheritedAccess = collection.getInheritedAccess(); // inheritedGroups = collection.getInheritedGroups(); } } catch(PermissionException e) { //alerts.add(rb.getString("notpermis4")); e.printStackTrace(); } catch (IdUnusedException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TypeException e) { // TODO Auto-generated catch block e.printStackTrace(); } boolean pubviewset = ContentHostingService.isInheritingPubView(collectionId) || ContentHostingService.isPubView(collectionId); //Collection possibleGroups = ContentHostingService.getGroupsWithReadAccess(collectionId); boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); Collection possibleGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); Site site = null; Collection site_groups = null; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); } catch (IdUnusedException e) { // TODO Auto-generated catch block e.printStackTrace(); } if(site != null) { site_groups = site.getGroups(); } else { site_groups = new Vector(); } Collection inherited_access_groups = collection.getGroups(); if(inherited_access_groups == null || inherited_access_groups.isEmpty()) { inherited_access_groups = collection.getInheritedGroups(); } if(inherited_access_groups == null) { inherited_access_groups = new Vector(); } Collection allowedAddGroups = null; if(AccessMode.GROUPED == inheritedAccess) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); } else { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(ContentHostingService.getSiteCollection(site.getId())); } if(allowedAddGroups == null) { allowedAddGroups = new Vector(); } for(int i = 0; i < CREATE_MAX_ITEMS; i++) { EditItem item = new EditItem(itemtype); if(encoding != null) { item.setEncoding(encoding); } item.setInDropbox(isInDropbox); if(inheritedAccess == null || AccessMode.SITE == inheritedAccess) { item.setInheritedAccess(AccessMode.INHERITED.toString()); } else { item.setInheritedAccess(inheritedAccess.toString()); } item.setAllSiteGroups(site_groups); item.setInheritedGroupRefs(inherited_access_groups); item.setAllowedAddGroupRefs(allowedAddGroups); item.setCopyrightStatus(defaultCopyrightStatus); new_items.add(item); // item.setPossibleGroups(new Vector(possibleGroups)); // if(inheritedGroups != null) // { // item.setInheritedGroups(inheritedGroups); // } if(preventPublicDisplay) { item.setPubviewPossible(false); item.setPubviewInherited(false); item.setPubview(false); } else { item.setPubviewPossible(true); item.setPubviewInherited(pubviewset); //item.setPubview(pubviewset); } } return new_items; } public static void addCreateContextAlert(SessionState state, String message) { String soFar = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (soFar != null) { soFar = soFar + " " + message; } else { soFar = message; } state.setAttribute(STATE_CREATE_MESSAGE, soFar); } // addItemTypeContextAlert /** * initiate creation of one or more resource items (file uploads, html docs, text docs, or urls -- not folders) * default type is file upload */ /** * @param data */ public static void doCreateitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); Map current_stack_frame = peekAtStack(state); boolean pop = false; String collectionId = params.getString("collectionId"); String itemType = params.getString("itemType"); String flow = params.getString("flow"); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Set missing = new HashSet(); if(flow == null || flow.equals("cancel")) { pop = true; } else if(flow.equals("updateNumber")) { captureMultipleValues(state, params, false); int number = params.getInt("numberOfItems"); Integer numberOfItems = new Integer(number); current_stack_frame.put(ResourcesAction.STATE_STACK_CREATE_NUMBER, numberOfItems); // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); List items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); } current_stack_frame.put(STATE_STACK_CREATE_ITEMS, items); Iterator it = items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } state.removeAttribute(STATE_MESSAGE); } else if(flow.equals("create") && TYPE_FOLDER.equals(itemType)) { // Get the items captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { // Save the items createFolders(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && TYPE_UPLOAD.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && MIME_TYPE_DOCUMENT_HTML.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && MIME_TYPE_DOCUMENT_PLAINTEXT.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop =true; } } } else if(flow.equals("create") && TYPE_URL.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createUrls(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && TYPE_FORM.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } if(alerts.isEmpty()) { createStructuredArtifacts(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create")) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } alerts.add("Invalid item type"); state.setAttribute(STATE_CREATE_ALERTS, alerts); } else if(flow.equals("updateDocType")) { // captureMultipleValues(state, params, false); String formtype = params.getString("formtype"); if(formtype == null || formtype.equals("")) { alerts.add("Must select a form type"); missing.add("formtype"); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); setupStructuredObjects(state); } else if(flow.equals("addInstance")) { captureMultipleValues(state, params, false); String field = params.getString("field"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } EditItem item = (EditItem) new_items.get(0); addInstance(field, item.getProperties()); ResourcesMetadata form = item.getForm(); List flatList = form.getFlatList(); item.setProperties(flatList); } else if(flow.equals("linkResource") && TYPE_FORM.equals(itemType)) { captureMultipleValues(state, params, false); createLink(data, state); } else if(flow.equals("showOptional")) { captureMultipleValues(state, params, false); int twiggleNumber = params.getInt("twiggleNumber", 0); String metadataGroup = params.getString("metadataGroup"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } if(new_items != null && new_items.size() > twiggleNumber) { EditItem item = (EditItem) new_items.get(twiggleNumber); if(item != null) { item.showMetadataGroup(metadataGroup); } } // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); Iterator it = new_items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } } else if(flow.equals("hideOptional")) { captureMultipleValues(state, params, false); int twiggleNumber = params.getInt("twiggleNumber", 0); String metadataGroup = params.getString("metadataGroup"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } if(new_items != null && new_items.size() > twiggleNumber) { EditItem item = (EditItem) new_items.get(twiggleNumber); if(item != null) { item.hideMetadataGroup(metadataGroup); } } // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); Iterator it = new_items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } } alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Iterator alertIt = alerts.iterator(); while(alertIt.hasNext()) { String alert = (String) alertIt.next(); addCreateContextAlert(state, alert); //addAlert(state, alert); } alerts.clear(); current_stack_frame.put(STATE_CREATE_MISSING_ITEM, missing); if(pop) { List new_items = (List) current_stack_frame.get(ResourcesAction.STATE_HELPER_NEW_ITEMS); String helper_changed = (String) state.getAttribute(STATE_HELPER_CHANGED); if(Boolean.TRUE.toString().equals(helper_changed)) { // get list of attachments? if(new_items != null) { List attachments = (List) state.getAttribute(STATE_ATTACHMENTS); if(attachments == null) { attachments = EntityManager.newReferenceList(); state.setAttribute(STATE_ATTACHMENTS, attachments); } Iterator it = new_items.iterator(); while(it.hasNext()) { AttachItem item = (AttachItem) it.next(); try { ContentResource resource = ContentHostingService.getResource(item.getId()); if (checkSelctItemFilter(resource, state)) { attachments.add(resource.getReference()); } else { it.remove(); addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } } catch (PermissionException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } catch (IdUnusedException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } catch (TypeException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } Reference ref = EntityManager.newReference(ContentHostingService.getReference(item.getId())); } } } popFromStack(state); resetCurrentMode(state); if(!ResourcesAction.isStackEmpty(state) && new_items != null) { current_stack_frame = peekAtStack(state); List old_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(old_items == null) { old_items = new Vector(); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, old_items); } old_items.addAll(new_items); } } } // doCreateitem private static void createLink(RunData data, SessionState state) { ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String field = params.getString("field"); if(field == null) { } else { current_stack_frame.put(ResourcesAction.STATE_ATTACH_FORM_FIELD, field); } //state.setAttribute(ResourcesAction.STATE_MODE, ResourcesAction.MODE_HELPER); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); state.setAttribute(ResourcesAction.STATE_ATTACH_CARDINALITY, ResourcesAction.CARDINALITY_SINGLE); // put a copy of the attachments into the state // state.setAttribute(ResourcesAction.STATE_ATTACHMENTS, EntityManager.newReferenceList()); // whether there is already an attachment /* if (attachments.size() > 0) { sstate.setAttribute(ResourcesAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.TRUE); } else { sstate.setAttribute(ResourcesAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.FALSE); } */ // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } } /** * Add a new StructuredArtifact to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ private static void createStructuredArtifacts(SessionState state) { Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null) { itemType = ResourcesAction.TYPE_FORM; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } String encoding = (String) state.getAttribute(STATE_ENCODING); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, true, CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); if(number == null) { number = new Integer(1); } current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfItems = number.intValue(); SchemaBean rootSchema = (SchemaBean) current_stack_frame.get(STATE_STACK_STRUCT_OBJ_SCHEMA); SchemaNode rootNode = rootSchema.getSchema(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } SaveArtifactAttempt attempt = new SaveArtifactAttempt(item, rootNode); validateStructuredArtifact(attempt); List errors = attempt.getErrors(); if(errors.isEmpty()) { try { ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty(ResourceProperties.PROP_CONTENT_ENCODING, "UTF-8"); resourceProperties.addProperty(ResourceProperties.PROP_STRUCTOBJ_TYPE, item.getFormtype()); resourceProperties.addProperty(ContentHostingService.PROP_ALTERNATE_REFERENCE, org.sakaiproject.metaobj.shared.mgt.MetaobjEntityManager.METAOBJ_ENTITY_PREFIX); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); String filename = Validator.escapeResourceName(item.getName()).trim(); String extension = ".xml"; int attemptNum = 0; String attemptStr = ""; String newResourceId = collectionId + filename + attemptStr + extension; if(newResourceId.length() > ContentHostingService.MAXIMUM_RESOURCE_ID_LENGTH) { alerts.add(rb.getString("toolong") + " " + newResourceId); continue outerloop; } SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (filename + extension, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, MIME_TYPE_STRUCTOBJ, item.getContent(), resourceProperties, groups, item.getNotification()); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(! preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } } catch(RuntimeException e) { logger.warn("ResourcesAction.createStructuredArtifacts ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(currentMap == null) { // do nothing } else if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); } else { Iterator errorIt = errors.iterator(); while(errorIt.hasNext()) { ValidationError error = (ValidationError) errorIt.next(); alerts.add(error.getDefaultMessage()); } } } state.setAttribute(STATE_CREATE_ALERTS, alerts); } /** * Convert from a hierarchical list of ResourcesMetadata objects to an org.w3.dom.Document, * then to a string representation, then to a metaobj ElementBean. Validate the ElementBean * against a SchemaBean. If it validates, save the string representation. Otherwise, on * return, the parameter contains a non-empty list of ValidationError objects describing the * problems. * @param attempt A wrapper for the EditItem object which contains the hierarchical list of * ResourcesMetadata objects for this form. Also contains an initially empty list of * ValidationError objects that describes any of the problems found in validating the form. */ private static void validateStructuredArtifact(SaveArtifactAttempt attempt) { EditItem item = attempt.getItem(); ResourcesMetadata form = item.getForm(); Stack processStack = new Stack(); processStack.push(form); Map parents = new Hashtable(); Document doc = Xml.createDocument(); int count = 0; while(!processStack.isEmpty()) { Object object = processStack.pop(); if(object instanceof ResourcesMetadata) { ResourcesMetadata element = (ResourcesMetadata) object; Element node = doc.createElement(element.getLocalname()); if(element.isNested()) { processStack.push(new ElementCarrier(node, element.getDottedname())); List children = element.getNestedInstances(); //List children = element.getNested(); for(int k = children.size() - 1; k >= 0; k--) { ResourcesMetadata child = (ResourcesMetadata) children.get(k); processStack.push(child); parents.put(child.getDottedname(), node); } } else { List values = element.getInstanceValues(); Iterator valueIt = values.iterator(); while(valueIt.hasNext()) { Object value = valueIt.next(); if(value == null) { // do nothing } else if(value instanceof String) { node.appendChild(doc.createTextNode((String)value)); } else if(value instanceof Time) { Time time = (Time) value; TimeBreakdown breakdown = time.breakdownLocal(); int year = breakdown.getYear(); int month = breakdown.getMonth(); int day = breakdown.getDay(); String date = "" + year + (month < 10 ? "-0" : "-") + month + (day < 10 ? "-0" : "-") + day; node.appendChild(doc.createTextNode(date)); } else if(value instanceof Date) { Date date = (Date) value; SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); String formatted = df.format(date); node.appendChild(doc.createTextNode(formatted)); } else if(value instanceof Reference) { node.appendChild(doc.createTextNode(((Reference)value).getId())); } else { node.appendChild(doc.createTextNode(value.toString())); } } Element parent = (Element) parents.get(element.getDottedname()); if(parent == null) { doc.appendChild(node); count++; } else { parent.appendChild(node); } } } else if(object instanceof ElementCarrier) { ElementCarrier carrier = (ElementCarrier) object; Element node = carrier.getElement(); Element parent = (Element) parents.get(carrier.getParent()); if(parent == null) { doc.appendChild(node); count++; } else { parent.appendChild(node); } } } String content = Xml.writeDocumentToString(doc); item.setContent(content); StructuredArtifactValidationService validator = (StructuredArtifactValidationService) ComponentManager.get("org.sakaiproject.metaobj.shared.mgt.StructuredArtifactValidationService"); List errors = new ArrayList(); // convert the String representation to an ElementBean object. If that fails, // add an error and return. ElementBean bean = null; SAXBuilder builder = new SAXBuilder(); StringReader reader = new StringReader(content); try { org.jdom.Document jdoc = builder.build(reader); bean = new ElementBean(jdoc.getRootElement(), attempt.getSchema(), true); } catch (JDOMException e) { // add message to list of errors errors.add(new ValidationError("","",null,"JDOMException")); } catch (IOException e) { // add message to list of errors errors.add(new ValidationError("","",null,"IOException")); } // call this.validate(bean, rootSchema, errors) and add results to errors list. if(bean == null) { // add message to list of errors errors.add(new ValidationError("","",null,"Bean is null")); } else { errors.addAll(validator.validate(bean)); } attempt.setErrors(errors); } // validateStructuredArtifact /** * Add a new folder to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createFolders(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, TYPE_FOLDER, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfFolders = 1; numberOfFolders = number.intValue(); outerloop: for(int i = 0; i < numberOfFolders; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } String newCollectionId = collectionId + Validator.escapeResourceName(item.getName()) + Entity.SEPARATOR; if(newCollectionId.length() > ContentHostingService.MAXIMUM_RESOURCE_ID_LENGTH) { alerts.add(rb.getString("toolong") + " " + newCollectionId); continue outerloop; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); try { resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); ContentCollection collection = ContentHostingService.addCollection (newCollectionId, resourceProperties, groups); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(collection.getId(), true); } } catch (IdUsedException e) { alerts.add(rb.getString("resotitle") + " " + item.getName() + " " + rb.getString("used4")); } catch (IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); } catch (PermissionException e) { alerts.add(rb.getString("notpermis5") + " " + item.getName()); } catch (InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); } // try-catch } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createFolders /** * Add a new file to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createFiles(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, TYPE_FOLDER, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfItems = 1; numberOfItems = number.intValue(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty (ResourceProperties.PROP_COPYRIGHT, item.getCopyrightInfo()); resourceProperties.addProperty(ResourceProperties.PROP_COPYRIGHT_CHOICE, item.getCopyrightStatus()); if (item.hasCopyrightAlert()) { resourceProperties.addProperty (ResourceProperties.PROP_COPYRIGHT_ALERT, Boolean.toString(item.hasCopyrightAlert())); } else { resourceProperties.removeProperty (ResourceProperties.PROP_COPYRIGHT_ALERT); } BasicRightsAssignment rightsObj = item.getRights(); rightsObj.addResourceProperties(resourceProperties); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); if(item.isHtml()) { resourceProperties.addProperty(ResourceProperties.PROP_CONTENT_ENCODING, "UTF-8"); } List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); String filename = Validator.escapeResourceName(item.getFilename().trim()); if("".equals(filename)) { filename = Validator.escapeResourceName(item.getName().trim()); } resourceProperties.addProperty(ResourceProperties.PROP_ORIGINAL_FILENAME, filename); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (filename, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, item.getMimeType(), item.getContent(), resourceProperties, groups, item.getNotification()); item.setAdded(true); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } catch(RuntimeException e) { logger.warn("ResourcesAction.createFiles ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); continue outerloop; } } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(currentMap == null) { // do nothing } else { if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); } state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createFiles /** * Process user's request to add an instance of a particular field to a structured object. * @param data */ public static void doInsertValue(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); captureMultipleValues(state, params, false); Map current_stack_frame = peekAtStack(state); String field = params.getString("field"); EditItem item = null; String mode = (String) state.getAttribute(STATE_MODE); if (MODE_CREATE.equals(mode)) { int index = params.getInt("index"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items != null) { item = (EditItem) new_items.get(index); } } else if(MODE_EDIT.equals(mode)) { item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); } if(item != null) { addInstance(field, item.getProperties()); } } // doInsertValue /** * Search a flat list of ResourcesMetadata properties for one whose localname matches "field". * If found and the field can have additional instances, increment the count for that item. * @param field * @param properties * @return true if the field is found, false otherwise. */ protected static boolean addInstance(String field, List properties) { Iterator propIt = properties.iterator(); boolean found = false; while(!found && propIt.hasNext()) { ResourcesMetadata property = (ResourcesMetadata) propIt.next(); if(field.equals(property.getDottedname())) { found = true; property.incrementCount(); } } return found; } public static void doAttachitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemId = params.getString("itemId"); Map current_stack_frame = peekAtStack(state); Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(itemId, state); } else { attachLink(itemId, state); } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); // popFromStack(state); // resetCurrentMode(state); } public static void doAttachupload(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1024 * 1024; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1024 * 1024; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1024 * 1024; } FileItem fileitem = null; try { fileitem = params.getFileItem("upload"); } catch(Exception e) { } if(fileitem == null) { // "The user submitted a file to upload but it was too big!" addAlert(state, rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { addAlert(state, rb.getString("choosefile7")); } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contentType = fileitem.getContentType(); if(bytes.length >= max_bytes) { addAlert(state, rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else if(bytes.length > 0) { // we just want the file name part - strip off any drive and path stuff String name = Validator.getFileName(filename); String resourceId = Validator.escapeResourceName(name); // make a set of properties to add for the new resource ResourcePropertiesEdit props = ContentHostingService.newResourceProperties(); props.addProperty(ResourceProperties.PROP_DISPLAY_NAME, name); props.addProperty(ResourceProperties.PROP_DESCRIPTION, filename); // make an attachment resource for this URL try { String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(resourceId, siteId, toolName, contentType, bytes, props); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } String containerId = ContentHostingService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), filename, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); //check -- jim state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.doAttachupload ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } else { addAlert(state, rb.getString("choosefile7")); } } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); //popFromStack(state); //resetCurrentMode(state); } // doAttachupload public static void doAttachurl(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String url = params.getCleanString("url"); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, url); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, url); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); try { url = validateURL(url); byte[] newUrl = url.getBytes(); String newResourceId = Validator.escapeResourceName(url); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(newResourceId, siteId, toolName, ResourceProperties.TYPE_URL, newUrl, resourceProperties); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } String containerId = ContentHostingService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), url, containerId, accessUrl); item.setContentType(ResourceProperties.TYPE_URL); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } catch(MalformedURLException e) { // invalid url addAlert(state, rb.getString("validurl") + " \"" + url + "\" " + rb.getString("invalid")); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.doAttachurl ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); // popFromStack(state); // resetCurrentMode(state); } public static void doRemoveitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemId = params.getString("itemId"); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } AttachItem item = null; boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(found && item != null) { new_items.remove(item); List removed = (List) state.getAttribute(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = new Vector(); state.setAttribute(STATE_REMOVED_ATTACHMENTS, removed); } removed.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } } // doRemoveitem public static void doAddattachments(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } List removed = (List) current_stack_frame.get(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = (List) state.getAttribute(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = new Vector(); } current_stack_frame.put(STATE_REMOVED_ATTACHMENTS, removed); } Iterator removeIt = removed.iterator(); while(removeIt.hasNext()) { AttachItem item = (AttachItem) removeIt.next(); try { if(ContentHostingService.isAttachmentResource(item.getId())) { ContentResourceEdit edit = ContentHostingService.editResource(item.getId()); ContentHostingService.removeResource(edit); ContentCollectionEdit coll = ContentHostingService.editCollection(item.getCollectionId()); ContentHostingService.removeCollection(coll); } } catch(Exception ignore) { // log failure } } state.removeAttribute(STATE_REMOVED_ATTACHMENTS); // add to the attachments vector List attachments = EntityManager.newReferenceList(); Iterator it = new_items.iterator(); while(it.hasNext()) { AttachItem item = (AttachItem) it.next(); try { Reference ref = EntityManager.newReference(ContentHostingService.getReference(item.getId())); attachments.add(ref); } catch(Exception e) { } } cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); // end up in main mode popFromStack(state); resetCurrentMode(state); current_stack_frame = peekAtStack(state); String field = null; // if there is at least one attachment if (attachments.size() > 0) { //check -- jim state.setAttribute(AttachmentAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.TRUE); if(current_stack_frame == null) { } else { field = (String) current_stack_frame.get(STATE_ATTACH_FORM_FIELD); } } if(field != null) { int index = 0; String fieldname = field; Matcher matcher = INDEXED_FORM_FIELD_PATTERN.matcher(field.trim()); if(matcher.matches()) { fieldname = matcher.group(0); index = Integer.parseInt(matcher.group(1)); } // we are trying to attach a link to a form field and there is at least one attachment if(new_items == null) { new_items = (List) current_stack_frame.get(ResourcesAction.STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(ResourcesAction.STATE_HELPER_NEW_ITEMS); } } EditItem edit_item = null; List edit_items = (List) current_stack_frame.get(ResourcesAction.STATE_STACK_CREATE_ITEMS); if(edit_items == null) { edit_item = (EditItem) current_stack_frame.get(ResourcesAction.STATE_STACK_EDIT_ITEM); } else { edit_item = (EditItem) edit_items.get(0); } if(edit_item != null) { Reference ref = (Reference) attachments.get(0); edit_item.setPropertyValue(fieldname, index, ref); } } } public static void attachItem(String itemId, SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { AttachItem item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(!found) { try { ContentResource res = contentService.getResource(itemId); ResourceProperties props = res.getProperties(); ResourcePropertiesEdit newprops = contentService.newResourceProperties(); newprops.set(props); byte[] bytes = res.getContent(); String contentType = res.getContentType(); String filename = Validator.getFileName(itemId); String resourceId = Validator.escapeResourceName(filename); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(resourceId, siteId, toolName, contentType, bytes, props); String displayName = newprops.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); String containerId = contentService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), displayName, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(TypeException ignore) { // other exceptions should be caught earlier } catch(IdUnusedException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.attachItem ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } public static void attachLink(String itemId, SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } Integer max_cardinality = (Integer) current_stack_frame.get(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = (Integer) state.getAttribute(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = CARDINALITY_MULTIPLE; } current_stack_frame.put(STATE_ATTACH_CARDINALITY, max_cardinality); } boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { AttachItem item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(!found) { try { ContentResource res = contentService.getResource(itemId); ResourceProperties props = res.getProperties(); String contentType = res.getContentType(); String filename = Validator.getFileName(itemId); String resourceId = Validator.escapeResourceName(filename); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } String displayName = props.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); String containerId = contentService.getContainingCollectionId (itemId); String accessUrl = res.getUrl(); AttachItem item = new AttachItem(itemId, displayName, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(TypeException ignore) { // other exceptions should be caught earlier } catch(IdUnusedException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.attachItem ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } /** * Add a new URL to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createUrls(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } int numberOfItems = 1; numberOfItems = number.intValue(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); byte[] newUrl = item.getFilename().getBytes(); String name = Validator.escapeResourceName(item.getName()); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (name, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, item.getMimeType(), newUrl, resourceProperties, groups, item.getNotification()); item.setAdded(true); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } catch(RuntimeException e) { logger.warn("ResourcesAction.createFiles ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); continue outerloop; } } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createUrls /** * Build the context for creating folders and items */ public static String buildCreateContext (VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("TYPE_HTML", TYPE_HTML); context.put("TYPE_TEXT", TYPE_TEXT); context.put("TYPE_URL", TYPE_URL); context.put("TYPE_FORM", TYPE_FORM); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); context.put("max_upload_size", state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE)); Map current_stack_frame = peekAtStack(state); String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } context.put("itemType", itemType); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } context.put("collectionId", collectionId); String field = (String) current_stack_frame.get(STATE_ATTACH_FORM_FIELD); if(field == null) { field = (String) state.getAttribute(STATE_ATTACH_FORM_FIELD); if(field != null) { current_stack_frame.put(STATE_ATTACH_FORM_FIELD, field); state.removeAttribute(STATE_ATTACH_FORM_FIELD); } } String msg = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (msg != null) { context.put("createAlertMessage", msg); state.removeAttribute(STATE_CREATE_MESSAGE); } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } context.put("new_items", new_items); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } context.put("numberOfItems", number); context.put("max_number", new Integer(CREATE_MAX_ITEMS)); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); context.put("homeCollectionId", homeCollectionId); List collectionPath = getCollectionPath(state); context.put ("collectionPath", collectionPath); if(homeCollectionId.equals(collectionId)) { context.put("atHome", Boolean.TRUE.toString()); } Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); List theGroupsInThisSite = new Vector(); for(int i = 0; i < CREATE_MAX_ITEMS; i++) { theGroupsInThisSite.add(groups.iterator()); } context.put("theGroupsInThisSite", theGroupsInThisSite); } String show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); /* Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); context.put("theGroupsInThisSite", groups); } */ if(TYPE_FORM.equals(itemType)) { List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } context.put("formtype", formtype); String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("DOT", ResourcesMetadata.DOT); } Set missing = (Set) current_stack_frame.remove(STATE_CREATE_MISSING_ITEM); context.put("missing", missing); // String template = (String) getContext(data).get("template"); return TEMPLATE_CREATE; } /** * show the resource properties */ public static void doMore ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = pushOnStack(state); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // the hosted item ID String id = NULL_STRING; // the collection id String collectionId = NULL_STRING; try { id = params.getString ("id"); if (id!=null) { // set the collection/resource id for more context current_stack_frame.put(STATE_MORE_ID, id); } else { // get collection/resource id from the state object id =(String) current_stack_frame.get(STATE_MORE_ID); } collectionId = params.getString ("collectionId"); current_stack_frame.put(STATE_MORE_COLLECTION_ID, collectionId); if (collectionId.equals ((String) state.getAttribute(STATE_HOME_COLLECTION_ID))) { try { // this is a test to see if the collection exists. If not, it is created. ContentCollection collection = ContentHostingService.getCollection (collectionId); } catch (IdUnusedException e ) { try { // default copyright String mycopyright = (String) state.getAttribute (STATE_MY_COPYRIGHT); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, ContentHostingService.getProperties (homeCollectionId).getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME)); ContentCollection collection = ContentHostingService.addCollection (homeCollectionId, resourceProperties); } catch (IdUsedException ee) { addAlert(state, rb.getString("idused")); } catch (IdUnusedException ee) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (IdInvalidException ee) { addAlert(state, rb.getString("title") + " " + ee.getMessage ()); } catch (PermissionException ee) { addAlert(state, rb.getString("permisex")); } catch (InconsistentException ee) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } } catch (TypeException e ) { addAlert(state, rb.getString("typeex")); } catch (PermissionException e ) { addAlert(state, rb.getString("permisex")); } } } catch (NullPointerException eE) { addAlert(state," " + rb.getString("nullex") + " " + id + ". "); } EditItem item = getEditItem(id, collectionId, data); // is there no error? if (state.getAttribute(STATE_MESSAGE) == null) { // go to the more state state.setAttribute(STATE_MODE, MODE_MORE); } // if-else } // doMore /** * doDelete to delete the selected collection or resource items */ public void doDelete ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } ParameterParser params = data.getParameters (); List Items = (List) state.getAttribute(STATE_DELETE_ITEMS); // Vector deleteIds = (Vector) state.getAttribute (STATE_DELETE_IDS); // delete the lowest item in the hireachy first Hashtable deleteItems = new Hashtable(); // String collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); int maxDepth = 0; int depth = 0; Iterator it = Items.iterator(); while(it.hasNext()) { BrowseItem item = (BrowseItem) it.next(); depth = ContentHostingService.getDepth(item.getId(), item.getRoot()); if (depth > maxDepth) { maxDepth = depth; } List v = (List) deleteItems.get(new Integer(depth)); if(v == null) { v = new Vector(); } v.add(item); deleteItems.put(new Integer(depth), v); } boolean isCollection = false; for (int j=maxDepth; j>0; j--) { List v = (List) deleteItems.get(new Integer(j)); if (v==null) { v = new Vector(); } Iterator itemIt = v.iterator(); while(itemIt.hasNext()) { BrowseItem item = (BrowseItem) itemIt.next(); try { if (item.isFolder()) { ContentHostingService.removeCollection(item.getId()); } else { ContentHostingService.removeResource(item.getId()); } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis6") + " " + item.getName() + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("deleteres") + " " + item.getName() + " " + rb.getString("wrongtype")); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (InUseException e) { addAlert(state, rb.getString("deleteres") + " " + item.getName() + " " + rb.getString("locked")); }// try - catch catch(RuntimeException e) { logger.warn("ResourcesAction.doDelete ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // for } // for if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful state.setAttribute (STATE_MODE, MODE_LIST); if (((String) state.getAttribute (STATE_SELECT_ALL_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } } // if-else } // doDelete /** * doCancel to return to the previous state */ public static void doCancel ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); if(!isStackEmpty(state)) { Map current_stack_frame = peekAtStack(state); current_stack_frame.put(STATE_HELPER_CANCELED_BY_USER, Boolean.TRUE.toString()); popFromStack(state); } resetCurrentMode(state); } // doCancel /** * Paste the previously copied/cutted item(s) */ public void doHandlepaste ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the cut items to be pasted Vector pasteCutItems = (Vector) state.getAttribute (STATE_CUT_IDS); // get the copied items to be pasted Vector pasteCopiedItems = (Vector) state.getAttribute (STATE_COPIED_IDS); String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; // handle cut and paste if (((String) state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { for (int i = 0; i < pasteCutItems.size (); i++) { String currentPasteCutItem = (String) pasteCutItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteCutItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); /* if (Boolean.TRUE.toString().equals(properties.getProperty (ResourceProperties.PROP_IS_COLLECTION))) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { */ // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteCutItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteCutItem); String id = collectionId + Validator.escapeResourceName(p.getProperty(ResourceProperties.PROP_DISPLAY_NAME)); // cut-paste to the same collection? boolean cutPasteSameCollection = false; String displayName = p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); // till paste successfully or it fails ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if (!properties.isLiveProperty (propertyName)) { if (propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME)&&(displayName.length ()>0)) { resourceProperties.addProperty (propertyName, displayName); } else { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } // if-else } // if } // while try { // paste the cutted resource to the new collection - no notification ContentResource newResource = ContentHostingService.addResource (id, resource.getContentType (), resource.getContent (), resourceProperties, resource.getGroups(), NotificationService.NOTI_NONE); String uuid = ContentHostingService.getUuid(resource.getId()); ContentHostingService.setUuid(id, uuid); } catch (InconsistentException e) { addAlert(state,RESOURCE_INVALID_TITLE_STRING); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (IdInvalidException e) { addAlert(state, rb.getString("title") + " " + e.getMessage ()); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { // cut and paste to the same collection; stop adding new resource if (id.equals(currentPasteCutItem)) { cutPasteSameCollection = true; } else { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // pasted to the same folder as before; add "Copy of "/ "copy (n) of" to the id if (countNumber==1) { displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); } else { displayName = "Copy (" + countNumber + ") of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); } countNumber++; */ } } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (!cutPasteSameCollection) { // remove the cutted resource ContentHostingService.removeResource (currentPasteCutItem); } // } // if-else } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName + ". "); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis7") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // for } // cut // handling copy and paste if (Boolean.toString(true).equalsIgnoreCase((String) state.getAttribute (STATE_COPY_FLAG))) { for (int i = 0; i < pasteCopiedItems.size (); i++) { String currentPasteCopiedItem = (String) pasteCopiedItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteCopiedItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteCopiedItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteCopiedItem); String displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String id = collectionId + Validator.escapeResourceName(displayName); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if (!properties.isLiveProperty (propertyName)) { if (propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME)&&(displayName.length ()>0)) { resourceProperties.addProperty (propertyName, displayName); } else { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } } } try { // paste the copied resource to the new collection ContentResource newResource = ContentHostingService.addResource (id, resource.getContentType (), resource.getContent (), resourceProperties, resource.getGroups(), NotificationService.NOTI_NONE); } catch (InconsistentException e) { addAlert(state,RESOURCE_INVALID_TITLE_STRING); } catch (IdInvalidException e) { addAlert(state,rb.getString("title") + " " + e.getMessage ()); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // copying // pasted to the same folder as before; add "Copy of " to the id if (countNumber > 1) { displayName = "Copy (" + countNumber + ") of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); } else if (countNumber == 1) { displayName = "Copy of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); } id = collectionId + Validator.escapeResourceName(displayName); countNumber++; */ } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch } // for } // copy if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful state.setAttribute (STATE_MODE, MODE_LIST); // reset the cut flag if (((String)state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_CUT_FLAG, Boolean.FALSE.toString()); } // reset the copy flag if (Boolean.toString(true).equalsIgnoreCase((String)state.getAttribute (STATE_COPY_FLAG))) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } } } // doHandlepaste /** * Paste the shortcut(s) of previously copied item(s) */ public void doHandlepasteshortcut ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the items to be pasted Vector pasteItems = new Vector (); if (((String) state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { pasteItems = (Vector) ( (Vector) state.getAttribute (STATE_COPIED_IDS)).clone (); } if (((String) state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { addAlert(state, rb.getString("choosecp")); } if (state.getAttribute(STATE_MESSAGE) == null) { String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; for (int i = 0; i < pasteItems.size (); i++) { String currentPasteItem = (String) pasteItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { // paste the collection } else { // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteItem); String displayName = SHORTCUT_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String id = collectionId + Validator.escapeResourceName(displayName); //int countNumber = 2; ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if ((!properties.isLiveProperty (propertyName)) && (!propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME))) { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } } // %%%%% should be _blank for items that can be displayed in browser, _self for others // resourceProperties.addProperty (ResourceProperties.PROP_OPEN_NEWWINDOW, "_self"); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, displayName); try { ContentResource referedResource= ContentHostingService.getResource (currentPasteItem); ContentResource newResource = ContentHostingService.addResource (id, ResourceProperties.TYPE_URL, referedResource.getUrl().getBytes (), resourceProperties, referedResource.getGroups(), NotificationService.NOTI_NONE); } catch (InconsistentException e) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (IdInvalidException e) { addAlert(state, rb.getString("title") + " " + e.getMessage ()); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // pasted shortcut to the same folder as before; add countNumber to the id displayName = "Shortcut (" + countNumber + ") to " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); countNumber++; */ } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepasteshortcut ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis9") + " " + currentPasteItem.substring (currentPasteItem.lastIndexOf (Entity.SEPARATOR)+1) + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + currentPasteItem.substring (currentPasteItem.lastIndexOf (Entity.SEPARATOR)+1) + " " + rb.getString("mismatch")); } // try-catch } // for } if (state.getAttribute(STATE_MESSAGE) == null) { if (((String) state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { // reset the copy flag state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // paste shortcut sucessful state.setAttribute (STATE_MODE, MODE_LIST); } } // doHandlepasteshortcut /** * Edit the editable collection/resource properties */ public static void doEdit ( RunData data ) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Map current_stack_frame = pushOnStack(state); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } String id = NULL_STRING; id = params.getString ("id"); if(id == null || id.length() == 0) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile2")); return; } current_stack_frame.put(STATE_STACK_EDIT_ID, id); String collectionId = (String) params.getString("collectionId"); if(collectionId == null) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); state.setAttribute(STATE_HOME_COLLECTION_ID, collectionId); } current_stack_frame.put(STATE_STACK_EDIT_COLLECTION_ID, collectionId); EditItem item = getEditItem(id, collectionId, data); if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values // state.setAttribute (STATE_MODE, MODE_EDIT); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_EDIT_ITEM_INIT); state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } else { popFromStack(state); } } // doEdit public static EditItem getEditItem(String id, String collectionId, RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); Map current_stack_frame = peekAtStack(state); EditItem item = null; // populate an EditItem object with values from the resource and return the EditItem try { ResourceProperties properties = ContentHostingService.getProperties(id); boolean isCollection = false; try { isCollection = properties.getBooleanProperty(ResourceProperties.PROP_IS_COLLECTION); } catch(Exception e) { // assume isCollection is false if property is not set } ContentEntity entity = null; String itemType = ""; byte[] content = null; if(isCollection) { itemType = "folder"; entity = ContentHostingService.getCollection(id); } else { entity = ContentHostingService.getResource(id); itemType = ((ContentResource) entity).getContentType(); content = ((ContentResource) entity).getContent(); } String itemName = properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); item = new EditItem(id, itemName, itemType); item.setInDropbox(ContentHostingService.isInDropbox(id)); BasicRightsAssignment rightsObj = new BasicRightsAssignment(item.getItemNum(), properties); item.setRights(rightsObj); String encoding = data.getRequest().getCharacterEncoding(); if(encoding != null) { item.setEncoding(encoding); } String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } item.setCopyrightStatus(defaultCopyrightStatus); if(content != null) { item.setContent(content); } String dummyId = collectionId.trim(); if(dummyId.endsWith(Entity.SEPARATOR)) { dummyId += "dummy"; } else { dummyId += Entity.SEPARATOR + "dummy"; } String containerId = ContentHostingService.getContainingCollectionId (id); item.setContainer(containerId); boolean canRead = ContentHostingService.allowGetCollection(id); boolean canAddFolder = ContentHostingService.allowAddCollection(id); boolean canAddItem = ContentHostingService.allowAddResource(id); boolean canDelete = ContentHostingService.allowRemoveResource(id); boolean canRevise = ContentHostingService.allowUpdateResource(id); item.setCanRead(canRead); item.setCanRevise(canRevise); item.setCanAddItem(canAddItem); item.setCanAddFolder(canAddFolder); item.setCanDelete(canDelete); // item.setIsUrl(isUrl); AccessMode access = ((GroupAwareEntity) entity).getAccess(); if(access == null) { item.setAccess(AccessMode.INHERITED.toString()); } else { item.setAccess(access.toString()); } AccessMode inherited_access = ((GroupAwareEntity) entity).getInheritedAccess(); if(inherited_access == null || inherited_access.equals(AccessMode.SITE)) { item.setInheritedAccess(AccessMode.INHERITED.toString()); } else { item.setInheritedAccess(inherited_access.toString()); } Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); Collection site_groups = site.getGroups(); item.setAllSiteGroups(site_groups); List access_groups = new Vector(((GroupAwareEntity) entity).getGroups()); item.setEntityGroupRefs(access_groups); // if(access_groups != null) // { // // Iterator it = access_groups.iterator(); // while(it.hasNext()) // { // String groupRef = (String) it.next(); // Group group = site.getGroup(groupRef); // item.addGroup(group.getId()); // } // } List inherited_access_groups = new Vector(((GroupAwareEntity) entity).getInheritedGroups()); item.setInheritedGroupRefs(inherited_access_groups); // if(inherited_access_groups != null) // { // Iterator it = inherited_access_groups.iterator(); // while(it.hasNext()) // { // String groupRef = (String) it.next(); // Group group = site.getGroup(groupRef); // item.addInheritedGroup(group.getId()); // } // } Collection allowedRemoveGroups = null; if(AccessMode.GROUPED == access) { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(id); Collection more = ContentHostingService.getGroupsWithRemovePermission(collectionId); if(more != null && ! more.isEmpty()) { allowedRemoveGroups.addAll(more); } } else if(AccessMode.GROUPED == inherited_access) { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(collectionId); } else { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(ContentHostingService.getSiteCollection(site.getId())); } item.setAllowedRemoveGroupRefs(allowedRemoveGroups); Collection allowedAddGroups = null; if(AccessMode.GROUPED == access) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(id); Collection more = ContentHostingService.getGroupsWithAddPermission(collectionId); if(more != null && ! more.isEmpty()) { allowedAddGroups.addAll(more); } } else if(AccessMode.GROUPED == inherited_access) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); } else { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(ContentHostingService.getSiteCollection(site.getId())); } item.setAllowedAddGroupRefs(allowedAddGroups); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(preventPublicDisplay.booleanValue()) { item.setPubviewPossible(false); item.setPubviewInherited(false); item.setPubview(false); } else { item.setPubviewPossible(true); // find out about pubview boolean pubviewset = ContentHostingService.isInheritingPubView(id); item.setPubviewInherited(pubviewset); boolean pubview = pubviewset; if (!pubviewset) { pubview = ContentHostingService.isPubView(id); item.setPubview(pubview); } } if(item.isUrl()) { String url = new String(content); item.setFilename(url); } else if(item.isStructuredArtifact()) { String formtype = properties.getProperty(ResourceProperties.PROP_STRUCTOBJ_TYPE); current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); setupStructuredObjects(state); Document doc = Xml.readDocumentFromString(new String(content)); Element root = doc.getDocumentElement(); importStructuredArtifact(root, item.getForm()); List flatList = item.getForm().getFlatList(); item.setProperties(flatList); } else if(item.isHtml() || item.isPlaintext() || item.isFileUpload()) { String filename = properties.getProperty(ResourceProperties.PROP_ORIGINAL_FILENAME); if(filename == null) { // this is a hack to deal with the fact that original filenames were not saved for some time. if(containerId != null && item.getId().startsWith(containerId) && containerId.length() < item.getId().length()) { filename = item.getId().substring(containerId.length()); } } if(filename == null) { item.setFilename(itemName); } else { item.setFilename(filename); } } String description = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); item.setDescription(description); try { Time creTime = properties.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTime = creTime.toStringLocalShortDate() + " " + creTime.toStringLocalShort(); item.setCreatedTime(createdTime); } catch(Exception e) { String createdTime = properties.getProperty(ResourceProperties.PROP_CREATION_DATE); item.setCreatedTime(createdTime); } try { String createdBy = getUserProperty(properties, ResourceProperties.PROP_CREATOR).getDisplayName(); item.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = properties.getProperty(ResourceProperties.PROP_CREATOR); item.setCreatedBy(createdBy); } try { Time modTime = properties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTime = modTime.toStringLocalShortDate() + " " + modTime.toStringLocalShort(); item.setModifiedTime(modifiedTime); } catch(Exception e) { String modifiedTime = properties.getProperty(ResourceProperties.PROP_MODIFIED_DATE); item.setModifiedTime(modifiedTime); } try { String modifiedBy = getUserProperty(properties, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); item.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = properties.getProperty(ResourceProperties.PROP_MODIFIED_BY); item.setModifiedBy(modifiedBy); } String url = ContentHostingService.getUrl(id); item.setUrl(url); String size = ""; if(properties.getProperty(ResourceProperties.PROP_CONTENT_LENGTH) != null) { size = properties.getPropertyFormatted(ResourceProperties.PROP_CONTENT_LENGTH) + " (" + Validator.getFileSizeWithDividor(properties.getProperty(ResourceProperties.PROP_CONTENT_LENGTH)) +" bytes)"; } item.setSize(size); String copyrightStatus = properties.getProperty(properties.getNamePropCopyrightChoice()); if(copyrightStatus == null || copyrightStatus.trim().equals("")) { copyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); } item.setCopyrightStatus(copyrightStatus); String copyrightInfo = properties.getPropertyFormatted(properties.getNamePropCopyright()); item.setCopyrightInfo(copyrightInfo); String copyrightAlert = properties.getProperty(properties.getNamePropCopyrightAlert()); if("true".equalsIgnoreCase(copyrightAlert)) { item.setCopyrightAlert(true); } else { item.setCopyrightAlert(false); } Map metadata = new Hashtable(); List groups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(groups != null && ! groups.isEmpty()) { Iterator it = groups.iterator(); while(it.hasNext()) { MetadataGroup group = (MetadataGroup) it.next(); Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String name = prop.getFullname(); String widget = prop.getWidget(); if(widget.equals(ResourcesMetadata.WIDGET_DATE) || widget.equals(ResourcesMetadata.WIDGET_DATETIME) || widget.equals(ResourcesMetadata.WIDGET_TIME)) { Time time = TimeService.newTime(); try { time = properties.getTimeProperty(name); } catch(Exception ignore) { // use "now" as default in that case } metadata.put(name, time); } else { String value = properties.getPropertyFormatted(name); metadata.put(name, value); } } } item.setMetadata(metadata); } else { item.setMetadata(new Hashtable()); } // for collections only if(item.isFolder()) { // setup for quota - ADMIN only, site-root collection only if (SecurityService.isSuperUser()) { Reference ref = EntityManager.newReference(entity.getReference()); String context = ref.getContext(); String siteCollectionId = ContentHostingService.getSiteCollection(context); if(siteCollectionId.equals(entity.getId())) { item.setCanSetQuota(true); try { long quota = properties.getLongProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); item.setHasQuota(true); item.setQuota(Long.toString(quota)); } catch (Exception any) { } } } } } catch (IdUnusedException e) { addAlert(state, RESOURCE_NOT_EXIST_STRING); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis2") + " " + id + ". " ); } catch(TypeException e) { addAlert(state," " + rb.getString("typeex") + " " + id); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch(RuntimeException e) { logger.warn("ResourcesAction.doEdit ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } return item; } /** * This method updates the session state with information needed to create or modify * structured artifacts in the resources tool. Among other things, it obtains a list * of "forms" available to the user and places that list in state indexed as * "STATE_STRUCTOBJ_HOMES". If the current formtype is known (in state indexed as * "STATE_STACK_STRUCTOBJ_TYPE"), the list of properties associated with that form type is * generated. If we are in a "create" context, the properties are added to each of * the items in the list of items indexed as "STATE_STACK_CREATE_ITEMS". If we are in an * "edit" context, the properties are added to the current item being edited (a state * attribute indexed as "STATE_STACK_EDIT_ITEM"). The metaobj SchemaBean associated with * the current form and its root SchemaNode object are also placed in state for later * reference. */ public static void setupStructuredObjects(SessionState state) { Map current_stack_frame = peekAtStack(state); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } HomeFactory factory = (HomeFactory) ComponentManager.get("homeFactory"); Map homes = factory.getHomes(StructuredArtifactHomeInterface.class); List listOfHomes = new Vector(); Iterator it = homes.keySet().iterator(); while(it.hasNext()) { String key = (String) it.next(); try { Object obj = homes.get(key); listOfHomes.add(obj); } catch(Exception ignore) {} } current_stack_frame.put(STATE_STRUCTOBJ_HOMES, listOfHomes); StructuredArtifactHomeInterface home = null; SchemaBean rootSchema = null; ResourcesMetadata elements = null; if(formtype == null || formtype.equals("")) { formtype = ""; current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } else if(listOfHomes.isEmpty()) { // hmmm } else { try { home = (StructuredArtifactHomeInterface) factory.getHome(formtype); } catch(NullPointerException ignore) { home = null; } } if(home != null) { rootSchema = new SchemaBean(home.getRootNode(), home.getSchema(), formtype, home.getType().getDescription()); List fields = rootSchema.getFields(); String docRoot = rootSchema.getFieldName(); elements = new ResourcesMetadata("", docRoot, "", "", ResourcesMetadata.WIDGET_NESTED, ResourcesMetadata.WIDGET_NESTED); elements.setDottedparts(docRoot); elements.setContainer(null); elements = createHierarchicalList(elements, fields, 1); String instruction = home.getInstruction(); current_stack_frame.put(STATE_STACK_STRUCTOBJ_ROOTNAME, docRoot); current_stack_frame.put(STATE_STACK_STRUCT_OBJ_SCHEMA, rootSchema); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items != null) { Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } List flatList = elements.getFlatList(); for(int i = 0; i < number.intValue(); i++) { //%%%%% doing this wipes out data that's been stored previously EditItem item = (EditItem) new_items.get(i); item.setRootname(docRoot); item.setFormtype(formtype); item.setInstruction(instruction); item.setProperties(flatList); item.setForm(elements); } current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } else if(current_stack_frame.get(STATE_STACK_EDIT_ITEM) != null) { EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); item.setRootname(docRoot); item.setFormtype(formtype); item.setInstruction(instruction); item.setForm(elements); } } } // setupStructuredArtifacts /** * This method navigates through a list of SchemaNode objects representing fields in a form, * creates a ResourcesMetadata object for each field and adds those as nested fields within * a root element. If a field contains nested fields, a recursive call adds nested fields * in the corresponding ResourcesMetadata object. * @param element The root element to which field descriptions are added. * @param fields A list of metaobj SchemaNode objects. * @param depth The depth of nesting, corresponding to the amount of indent that will be used * when displaying the list. * @return The update root element. */ private static ResourcesMetadata createHierarchicalList(ResourcesMetadata element, List fields, int depth) { List properties = new Vector(); for(Iterator fieldIt = fields.iterator(); fieldIt.hasNext(); ) { SchemaBean field = (SchemaBean) fieldIt.next(); SchemaNode node = field.getSchema(); Map annotations = field.getAnnotations(); Pattern pattern = null; String localname = field.getFieldName(); String description = field.getDescription(); String label = (String) annotations.get("label"); if(label == null || label.trim().equals("")) { label = description; } String richText = (String) annotations.get("isRichText"); boolean isRichText = richText != null && richText.equalsIgnoreCase(Boolean.TRUE.toString()); Class javaclass = node.getObjectType(); String typename = javaclass.getName(); String widget = ResourcesMetadata.WIDGET_STRING; int length = 0; List enumerals = null; if(field.getFields().size() > 0) { widget = ResourcesMetadata.WIDGET_NESTED; } else if(node.hasEnumerations()) { enumerals = node.getEnumeration(); typename = String.class.getName(); widget = ResourcesMetadata.WIDGET_ENUM; } else if(typename.equals(String.class.getName())) { length = node.getType().getMaxLength(); String baseType = node.getType().getBaseType(); if(isRichText) { widget = ResourcesMetadata.WIDGET_WYSIWYG; } else if(baseType.trim().equalsIgnoreCase(ResourcesMetadata.NAMESPACE_XSD_ABBREV + ResourcesMetadata.XSD_NORMALIZED_STRING)) { widget = ResourcesMetadata.WIDGET_STRING; if(length > 50) { length = 50; } } else if(length > 100 || length < 1) { widget = ResourcesMetadata.WIDGET_TEXTAREA; } else if(length > 50) { length = 50; } pattern = node.getType().getPattern(); } else if(typename.equals(Date.class.getName())) { widget = ResourcesMetadata.WIDGET_DATE; } else if(typename.equals(Boolean.class.getName())) { widget = ResourcesMetadata.WIDGET_BOOLEAN; } else if(typename.equals(URI.class.getName())) { widget = ResourcesMetadata.WIDGET_ANYURI; } else if(typename.equals(Number.class.getName())) { widget = ResourcesMetadata.WIDGET_INTEGER; //length = node.getType().getTotalDigits(); length = INTEGER_WIDGET_LENGTH; } else if(typename.equals(Double.class.getName())) { widget = ResourcesMetadata.WIDGET_DOUBLE; length = DOUBLE_WIDGET_LENGTH; } int minCard = node.getMinOccurs(); int maxCard = node.getMaxOccurs(); if(maxCard < 1) { maxCard = Integer.MAX_VALUE; } if(minCard < 0) { minCard = 0; } minCard = java.lang.Math.max(0,minCard); maxCard = java.lang.Math.max(1,maxCard); int currentCount = java.lang.Math.min(java.lang.Math.max(1,minCard),maxCard); ResourcesMetadata prop = new ResourcesMetadata(element.getDottedname(), localname, label, description, typename, widget); List parts = new Vector(element.getDottedparts()); parts.add(localname); prop.setDottedparts(parts); prop.setContainer(element); if(ResourcesMetadata.WIDGET_NESTED.equals(widget)) { prop = createHierarchicalList(prop, field.getFields(), depth + 1); } prop.setMinCardinality(minCard); prop.setMaxCardinality(maxCard); prop.setCurrentCount(currentCount); prop.setDepth(depth); if(enumerals != null) { prop.setEnumeration(enumerals); } if(length > 0) { prop.setLength(length); } if(pattern != null) { prop.setPattern(pattern); } properties.add(prop); } element.setNested(properties); return element; } // createHierarchicalList /** * This method captures property values from an org.w3c.dom.Document and inserts them * into a hierarchical list of ResourcesMetadata objects which describes the structure * of the form. The values are added by inserting nested instances into the properties. * * @param element An org.w3c.dom.Element containing values to be imported. * @param properties A hierarchical list of ResourcesMetadata objects describing a form */ public static void importStructuredArtifact(Node node, ResourcesMetadata property) { if(property == null || node == null) { return; } String tagname = property.getLocalname(); String nodename = node.getLocalName(); if(! tagname.equals(nodename)) { // return; } if(property.getNested().size() == 0) { boolean value_found = false; Node child = node.getFirstChild(); while(! value_found && child != null) { if(child.getNodeType() == Node.TEXT_NODE) { Text value = (Text) child; if(ResourcesMetadata.WIDGET_DATE.equals(property.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(property.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(property.getWidget())) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); Time time = TimeService.newTime(); try { Date date = df.parse(value.getData()); time = TimeService.newTime(date.getTime()); } catch(Exception ignore) { // use "now" as default in that case } property.setValue(0, time); } else if(ResourcesMetadata.WIDGET_ANYURI.equals(property.getWidget())) { Reference ref = EntityManager.newReference(ContentHostingService.getReference(value.getData())); property.setValue(0, ref); } else { property.setValue(0, value.getData()); } } child = child.getNextSibling(); } } else if(node instanceof Element) { // a nested element Iterator nestedIt = property.getNested().iterator(); while(nestedIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) nestedIt.next(); NodeList nodes = ((Element) node).getElementsByTagName(prop.getLocalname()); if(nodes == null) { continue; } for(int i = 0; i < nodes.getLength(); i++) { Node n = nodes.item(i); if(n != null) { ResourcesMetadata instance = prop.addInstance(); if(instance != null) { importStructuredArtifact(n, instance); } } } } } } // importStructuredArtifact protected static String validateURL(String url) throws MalformedURLException { if (url.equals (NULL_STRING)) { // ignore the empty url field } else if (url.indexOf ("://") == -1) { // if it's missing the transport, add http:// url = "http://" + url; } if(!url.equals(NULL_STRING)) { // valid protocol? try { // test to see if the input validates as a URL. // Checks string for format only. URL u = new URL(url); } catch (MalformedURLException e1) { try { Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { // if URL has "unknown" protocol, check remaider with // "http" protocol and accept input it that validates. URL test = new URL("http://" + matcher.group(2)); } else { throw e1; } } catch (MalformedURLException e2) { throw e1; } } } return url; } /** * Retrieve values for an item from edit context. Edit context contains just one item at a time of a known type * (folder, file, text document, structured-artifact, etc). This method retrieves the data apppropriate to the * type and updates the values of the EditItem stored as the STATE_STACK_EDIT_ITEM attribute in state. * @param state * @param params * @param item */ protected static void captureValues(SessionState state, ParameterParser params) { Map current_stack_frame = peekAtStack(state); EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); Set alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_EDIT_ALERTS, alerts); } String flow = params.getString("flow"); boolean intentChanged = "intentChanged".equals(flow); String check_fileName = params.getString("check_fileName"); boolean expectFile = "true".equals(check_fileName); String intent = params.getString("intent"); String oldintent = (String) current_stack_frame.get(STATE_STACK_EDIT_INTENT); boolean upload_file = expectFile && item.isFileUpload() || ((item.isHtml() || item.isPlaintext()) && !intentChanged && INTENT_REPLACE_FILE.equals(intent) && INTENT_REPLACE_FILE.equals(oldintent)); boolean revise_file = (item.isHtml() || item.isPlaintext()) && !intentChanged && INTENT_REVISE_FILE.equals(intent) && INTENT_REVISE_FILE.equals(oldintent); String name = params.getString("name"); if(name == null || "".equals(name.trim())) { alerts.add(rb.getString("titlenotnull")); // addAlert(state, rb.getString("titlenotnull")); } else { item.setName(name.trim()); } String description = params.getString("description"); if(description == null) { item.setDescription(""); } else { item.setDescription(description); } item.setContentHasChanged(false); if(upload_file) { String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() >= max_bytes) { alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else */ { // check for file replacement FileItem fileitem = params.getFileItem("fileName"); if(fileitem == null) { // "The user submitted a file to upload but it was too big!" alerts.clear(); alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); //item.setMissing("fileName"); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { if(item.getContent() == null || item.getContent().length <= 0) { // "The user submitted the form, but didn't select a file to upload!" alerts.add(rb.getString("choosefile") + ". "); //item.setMissing("fileName"); } } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contenttype = fileitem.getContentType(); if(bytes.length >= max_bytes) { alerts.clear(); alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); // item.setMissing("fileName"); } else if(bytes.length > 0) { item.setContent(bytes); item.setContentHasChanged(true); item.setMimeType(contenttype); item.setFilename(filename); } } } } else if(revise_file) { // check for input from editor (textarea) String content = params.getString("content"); if(content != null) { item.setContent(content); item.setContentHasChanged(true); } } else if(item.isUrl()) { String url = params.getString("Url"); if(url == null || url.trim().equals("")) { item.setFilename(""); alerts.add(rb.getString("validurl")); } else { // valid protocol? item.setFilename(url); try { // test format of input URL u = new URL(url); } catch (MalformedURLException e1) { try { // if URL did not validate, check whether the problem was an // unrecognized protocol, and accept input if that's the case. Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { URL test = new URL("http://" + matcher.group(2)); } else { url = "http://" + url; URL test = new URL(url); item.setFilename(url); } } catch (MalformedURLException e2) { // invalid url alerts.add(rb.getString("validurl")); } } } } else if(item.isFolder()) { if(item.canSetQuota()) { // read the quota fields String setQuota = params.getString("setQuota"); boolean hasQuota = params.getBoolean("hasQuota"); item.setHasQuota(hasQuota); if(hasQuota) { int q = params.getInt("quota"); item.setQuota(Integer.toString(q)); } } } else if(item.isStructuredArtifact()) { String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } String formtype_check = params.getString("formtype"); if(formtype_check == null || formtype_check.equals("")) { alerts.add(rb.getString("type")); item.setMissing("formtype"); } else if(formtype_check.equals(formtype)) { item.setFormtype(formtype); capturePropertyValues(params, item, item.getProperties()); } } if(! item.isFolder() && ! item.isStructuredArtifact() && ! item.isUrl()) { String mime_category = params.getString("mime_category"); String mime_subtype = params.getString("mime_subtype"); if(mime_category != null && mime_subtype != null) { String mimetype = mime_category + "/" + mime_subtype; if(! mimetype.equals(item.getMimeType())) { item.setMimeType(mimetype); item.setContentTypeHasChanged(true); } } } if(item.isFileUpload() || item.isHtml() || item.isPlaintext()) { BasicRightsAssignment rightsObj = item.getRights(); rightsObj.captureValues(params); boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnership = params.getString("ccOwnership"); if(ccOwnership != null) { item.setRightsownership(ccOwnership); } String ccTerms = params.getString("ccTerms"); if(ccTerms != null) { item.setLicense(ccTerms); } String ccCommercial = params.getString("ccCommercial"); if(ccCommercial != null) { item.setAllowCommercial(ccCommercial); } String ccModification = params.getString("ccModification"); if(ccCommercial != null) { item.setAllowModifications(ccModification); } String ccRightsYear = params.getString("ccRightsYear"); if(ccRightsYear != null) { item.setRightstyear(ccRightsYear); } String ccRightsOwner = params.getString("ccRightsOwner"); if(ccRightsOwner != null) { item.setRightsowner(ccRightsOwner); } /* ccValues.ccOwner = new Array(); ccValues.myRights = new Array(); ccValues.otherRights = new Array(); ccValues.ccCommercial = new Array(); ccValues.ccModifications = new Array(); ccValues.ccRightsYear = new Array(); ccValues.ccRightsOwner = new Array(); */ } else { // check for copyright status // check for copyright info // check for copyright alert String copyrightStatus = StringUtil.trimToNull(params.getString ("copyrightStatus")); String copyrightInfo = StringUtil.trimToNull(params.getCleanString ("copyrightInfo")); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert")); if (copyrightStatus != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyrightStatus.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (copyrightInfo != null) { item.setCopyrightInfo( copyrightInfo ); } else { alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyrightStatus.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { item.setCopyrightInfo((String) state.getAttribute (STATE_MY_COPYRIGHT)); } item.setCopyrightStatus( copyrightStatus ); } item.setCopyrightAlert(copyrightAlert != null); } } if(! RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } String access_mode = params.getString("access_mode"); if(access_mode == null || AccessMode.GROUPED.toString().equals(access_mode)) { // we inherit more than one group and must check whether group access changes at this item String[] access_groups = params.getStrings("access_groups"); SortedSet new_groups = new TreeSet(); if(access_groups != null) { new_groups.addAll(Arrays.asList(access_groups)); } new_groups = item.convertToRefs(new_groups); Collection inh_grps = item.getInheritedGroupRefs(); boolean groups_are_inherited = (new_groups.size() == inh_grps.size()) && inh_grps.containsAll(new_groups); if(groups_are_inherited) { new_groups.clear(); item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.INHERITED.toString()); } else { item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.GROUPED.toString()); } item.setPubview(false); } else if(PUBLIC_ACCESS.equals(access_mode)) { if(! preventPublicDisplay.booleanValue() && ! item.isPubviewInherited()) { item.setPubview(true); item.setAccess(AccessMode.INHERITED.toString()); } } else if(AccessMode.INHERITED.toString().equals(access_mode)) { item.setAccess(AccessMode.INHERITED.toString()); item.clearGroups(); item.setPubview(false); } } int noti = NotificationService.NOTI_NONE; // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // set noti to none if in dropbox mode noti = NotificationService.NOTI_NONE; } else { // read the notification options String notification = params.getString("notify"); if ("r".equals(notification)) { noti = NotificationService.NOTI_REQUIRED; } else if ("o".equals(notification)) { noti = NotificationService.NOTI_OPTIONAL; } } item.setNotification(noti); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { Iterator groupIt = metadataGroups.iterator(); while(groupIt.hasNext()) { MetadataGroup group = (MetadataGroup) groupIt.next(); if(group.isShowing()) { Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String propname = prop.getFullname(); if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_year", year); month = params.getInt(propname + "_month", month); day = params.getInt(propname + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_hour", hour); minute = params.getInt(propname + "_minute", minute); second = params.getInt(propname + "_second", second); millisecond = params.getInt(propname + "_millisecond", millisecond); ampm = params.getString(propname + "_ampm").trim(); if("pm".equalsIgnoreCase("ampm")) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); item.setMetadataItem(propname,value); } else { String value = params.getString(propname); if(value != null) { item.setMetadataItem(propname, value); } } } } } } current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); state.setAttribute(STATE_EDIT_ALERTS, alerts); } // captureValues /** * Retrieve from an html form all the values needed to create a new resource * @param item The EditItem object in which the values are temporarily stored. * @param index The index of the item (used as a suffix in the name of the form element) * @param state * @param params * @param markMissing Indicates whether to mark required elements if they are missing. * @return */ public static Set captureValues(EditItem item, int index, SessionState state, ParameterParser params, boolean markMissing) { Map current_stack_frame = peekAtStack(state); Set item_alerts = new HashSet(); boolean blank_entry = true; item.clearMissing(); String name = params.getString("name" + index); if(name == null || name.trim().equals("")) { if(markMissing) { item_alerts.add(rb.getString("titlenotnull")); item.setMissing("name"); } item.setName(""); // addAlert(state, rb.getString("titlenotnull")); } else { item.setName(name); blank_entry = false; } String description = params.getString("description" + index); if(description == null || description.trim().equals("")) { item.setDescription(""); } else { item.setDescription(description); blank_entry = false; } item.setContentHasChanged(false); if(item.isFileUpload()) { String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() >= max_bytes) { item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else */ { // check for file replacement FileItem fileitem = null; try { fileitem = params.getFileItem("fileName" + index); } catch(Exception e) { // this is an error in Firefox, Mozilla and Netscape // "The user didn't select a file to upload!" if(item.getContent() == null || item.getContent().length <= 0) { item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } if(fileitem == null) { // "The user submitted a file to upload but it was too big!" item_alerts.clear(); item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); item.setMissing("fileName"); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { if(item.getContent() == null || item.getContent().length <= 0) { // "The user submitted the form, but didn't select a file to upload!" item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contenttype = fileitem.getContentType(); if(bytes.length >= max_bytes) { item_alerts.clear(); item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); item.setMissing("fileName"); } else if(bytes.length > 0) { item.setContent(bytes); item.setContentHasChanged(true); item.setMimeType(contenttype); item.setFilename(filename); blank_entry = false; } else { item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } } } else if(item.isPlaintext()) { // check for input from editor (textarea) String content = params.getString("content" + index); if(content != null) { item.setContentHasChanged(true); item.setContent(content); blank_entry = false; } item.setMimeType(MIME_TYPE_DOCUMENT_PLAINTEXT); } else if(item.isHtml()) { // check for input from editor (textarea) String content = params.getCleanString("content" + index); StringBuffer alertMsg = new StringBuffer(); content = FormattedText.processHtmlDocument(content, alertMsg); if (alertMsg.length() > 0) { item_alerts.add(alertMsg.toString()); } if(content != null && !content.equals("")) { item.setContent(content); item.setContentHasChanged(true); blank_entry = false; } item.setMimeType(MIME_TYPE_DOCUMENT_HTML); } else if(item.isUrl()) { item.setMimeType(ResourceProperties.TYPE_URL); String url = params.getString("Url" + index); if(url == null || url.trim().equals("")) { item.setFilename(""); item_alerts.add(rb.getString("specifyurl")); item.setMissing("Url"); } else { item.setFilename(url); blank_entry = false; // is protocol supplied and, if so, is it recognized? try { // check format of input URL u = new URL(url); } catch (MalformedURLException e1) { try { // if URL did not validate, check whether the problem was an // unrecognized protocol, and accept input if that's the case. Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { URL test = new URL("http://" + matcher.group(2)); } else { url = "http://" + url; URL test = new URL(url); item.setFilename(url); } } catch (MalformedURLException e2) { // invalid url item_alerts.add(rb.getString("validurl")); item.setMissing("Url"); } } } } else if(item.isStructuredArtifact()) { String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } String formtype_check = params.getString("formtype"); if(formtype_check == null || formtype_check.equals("")) { item_alerts.add("Must select a form type"); item.setMissing("formtype"); } else if(formtype_check.equals(formtype)) { item.setFormtype(formtype); capturePropertyValues(params, item, item.getProperties()); // blank_entry = false; } item.setMimeType(MIME_TYPE_STRUCTOBJ); } if(item.isFileUpload() || item.isHtml() || item.isPlaintext()) { BasicRightsAssignment rightsObj = item.getRights(); rightsObj.captureValues(params); boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnership = params.getString("ccOwnership" + index); if(ccOwnership != null) { item.setRightsownership(ccOwnership); } String ccTerms = params.getString("ccTerms" + index); if(ccTerms != null) { item.setLicense(ccTerms); } String ccCommercial = params.getString("ccCommercial" + index); if(ccCommercial != null) { item.setAllowCommercial(ccCommercial); } String ccModification = params.getString("ccModification" + index); if(ccCommercial != null) { item.setAllowModifications(ccModification); } String ccRightsYear = params.getString("ccRightsYear" + index); if(ccRightsYear != null) { item.setRightstyear(ccRightsYear); } String ccRightsOwner = params.getString("ccRightsOwner" + index); if(ccRightsOwner != null) { item.setRightsowner(ccRightsOwner); } /* ccValues.ccOwner = new Array(); ccValues.myRights = new Array(); ccValues.otherRights = new Array(); ccValues.ccCommercial = new Array(); ccValues.ccModifications = new Array(); ccValues.ccRightsYear = new Array(); ccValues.ccRightsOwner = new Array(); */ } else { // check for copyright status // check for copyright info // check for copyright alert String copyrightStatus = StringUtil.trimToNull(params.getString ("copyright" + index)); String copyrightInfo = StringUtil.trimToNull(params.getCleanString ("newcopyright" + index)); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert" + index)); if (copyrightStatus != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyrightStatus.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (copyrightInfo != null) { item.setCopyrightInfo( copyrightInfo ); } else { item_alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyrightStatus.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { item.setCopyrightInfo((String) state.getAttribute (STATE_MY_COPYRIGHT)); } item.setCopyrightStatus( copyrightStatus ); } item.setCopyrightAlert(copyrightAlert != null); } } if(! RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } String access_mode = params.getString("access_mode" + index); if(access_mode == null || AccessMode.GROUPED.toString().equals(access_mode)) { // we inherit more than one group and must check whether group access changes at this item String[] access_groups = params.getStrings("access_groups" + index); SortedSet new_groups = new TreeSet(); if(access_groups != null) { new_groups.addAll(Arrays.asList(access_groups)); } new_groups = item.convertToRefs(new_groups); Collection inh_grps = item.getInheritedGroupRefs(); boolean groups_are_inherited = (new_groups.size() == inh_grps.size()) && inh_grps.containsAll(new_groups); if(groups_are_inherited) { new_groups.clear(); item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.INHERITED.toString()); } else { item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.GROUPED.toString()); } item.setPubview(false); } else if(PUBLIC_ACCESS.equals(access_mode)) { if(! preventPublicDisplay.booleanValue() && ! item.isPubviewInherited()) { item.setPubview(true); item.setAccess(AccessMode.INHERITED.toString()); } } else if(AccessMode.INHERITED.toString().equals(access_mode) ) { item.setAccess(AccessMode.INHERITED.toString()); item.clearGroups(); item.setPubview(false); } } int noti = NotificationService.NOTI_NONE; // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // set noti to none if in dropbox mode noti = NotificationService.NOTI_NONE; } else { // read the notification options String notification = params.getString("notify" + index); if ("r".equals(notification)) { noti = NotificationService.NOTI_REQUIRED; } else if ("o".equals(notification)) { noti = NotificationService.NOTI_OPTIONAL; } } item.setNotification(noti); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { Iterator groupIt = metadataGroups.iterator(); while(groupIt.hasNext()) { MetadataGroup group = (MetadataGroup) groupIt.next(); if(item.isGroupShowing(group.getName())) { Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String propname = prop.getFullname(); if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_" + index + "_year", year); month = params.getInt(propname + "_" + index + "_month", month); day = params.getInt(propname + "_" + index + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_" + index + "_hour", hour); minute = params.getInt(propname + "_" + index + "_minute", minute); second = params.getInt(propname + "_" + index + "_second", second); millisecond = params.getInt(propname + "_" + index + "_millisecond", millisecond); ampm = params.getString(propname + "_" + index + "_ampm").trim(); if("pm".equalsIgnoreCase(ampm)) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); item.setMetadataItem(propname,value); } else { String value = params.getString(propname + "_" + index); if(value != null) { item.setMetadataItem(propname, value); } } } } } } item.markAsBlank(blank_entry); return item_alerts; } /** * Retrieve values for one or more items from create context. Create context contains up to ten items at a time * all of the same type (folder, file, text document, structured-artifact, etc). This method retrieves the data * apppropriate to the type and updates the values of the EditItem objects stored as the STATE_STACK_CREATE_ITEMS * attribute in state. If the third parameter is "true", missing/incorrect user inputs will generate error messages * and attach flags to the input elements. * @param state * @param params * @param markMissing Should this method generate error messages and add flags for missing/incorrect user inputs? */ protected static void captureMultipleValues(SessionState state, ParameterParser params, boolean markMissing) { Map current_stack_frame = peekAtStack(state); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String collectionId = params.getString("collectionId"); String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } int actualCount = 0; Set first_item_alerts = null; String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() > max_bytes) { alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); state.setAttribute(STATE_CREATE_ALERTS, alerts); return; } */ for(int i = 0; i < number.intValue(); i++) { EditItem item = (EditItem) new_items.get(i); Set item_alerts = captureValues(item, i, state, params, markMissing); if(i == 0) { first_item_alerts = item_alerts; } else if(item.isBlank()) { item.clearMissing(); } if(! item.isBlank()) { alerts.addAll(item_alerts); actualCount ++; } } if(actualCount > 0) { EditItem item = (EditItem) new_items.get(0); if(item.isBlank()) { item.clearMissing(); } } else if(markMissing) { alerts.addAll(first_item_alerts); } state.setAttribute(STATE_CREATE_ALERTS, alerts); current_stack_frame.put(STATE_STACK_CREATE_ACTUAL_COUNT, Integer.toString(actualCount)); } // captureMultipleValues protected static void capturePropertyValues(ParameterParser params, EditItem item, List properties) { // use the item's properties if they're not supplied if(properties == null) { properties = item.getProperties(); } // if max cardinality > 1, value is a list (Iterate over members of list) // else value is an object, not a list // if type is nested, object is a Map (iterate over name-value pairs for the properties of the nested object) // else object is type to store value, usually a string or a date/time Iterator it = properties.iterator(); while(it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); String propname = prop.getDottedname(); if(ResourcesMetadata.WIDGET_NESTED.equals(prop.getWidget())) { // do nothing } else if(ResourcesMetadata.WIDGET_BOOLEAN.equals(prop.getWidget())) { String value = params.getString(propname); if(value == null || Boolean.FALSE.toString().equals(value)) { prop.setValue(0, Boolean.FALSE.toString()); } else { prop.setValue(0, Boolean.TRUE.toString()); } } else if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_year", year); month = params.getInt(propname + "_month", month); day = params.getInt(propname + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_hour", hour); minute = params.getInt(propname + "_minute", minute); second = params.getInt(propname + "_second", second); millisecond = params.getInt(propname + "_millisecond", millisecond); ampm = params.getString(propname + "_ampm"); if("pm".equalsIgnoreCase(ampm)) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); prop.setValue(0, value); } else if(ResourcesMetadata.WIDGET_ANYURI.equals(prop.getWidget())) { String value = params.getString(propname); if(value != null && ! value.trim().equals("")) { Reference ref = EntityManager.newReference(ContentHostingService.getReference(value)); prop.setValue(0, ref); } } else { String value = params.getString(propname); if(value != null) { prop.setValue(0, value); } } } } // capturePropertyValues /** * Modify the properties */ public static void doSavechanges ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); String flow = params.getString("flow").trim(); if(flow == null || "cancel".equals(flow)) { doCancel(data); return; } // get values from form and update STATE_STACK_EDIT_ITEM attribute in state captureValues(state, params); Map current_stack_frame = peekAtStack(state); EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); if(flow.equals("showMetadata")) { doShow_metadata(data); return; } else if(flow.equals("hideMetadata")) { doHide_metadata(data); return; } else if(flow.equals("intentChanged")) { doToggle_intent(data); return; } else if(flow.equals("addInstance")) { String field = params.getString("field"); addInstance(field, item.getProperties()); ResourcesMetadata form = item.getForm(); List flatList = form.getFlatList(); item.setProperties(flatList); return; } else if(flow.equals("linkResource")) { // captureMultipleValues(state, params, false); createLink(data, state); //Map new_stack_frame = pushOnStack(state); //new_stack_frame.put(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); return; } Set alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); if(item.isStructuredArtifact()) { SchemaBean bean = (SchemaBean) current_stack_frame.get(STATE_STACK_STRUCT_OBJ_SCHEMA); SaveArtifactAttempt attempt = new SaveArtifactAttempt(item, bean.getSchema()); validateStructuredArtifact(attempt); Iterator errorIt = attempt.getErrors().iterator(); while(errorIt.hasNext()) { ValidationError error = (ValidationError) errorIt.next(); alerts.add(error.getDefaultMessage()); } } if(alerts.isEmpty()) { // populate the property list try { // get an edit ContentCollectionEdit cedit = null; ContentResourceEdit redit = null; GroupAwareEdit gedit = null; ResourcePropertiesEdit pedit = null; if(item.isFolder()) { cedit = ContentHostingService.editCollection(item.getId()); gedit = cedit; pedit = cedit.getPropertiesEdit(); } else { redit = ContentHostingService.editResource(item.getId()); gedit = redit; pedit = redit.getPropertiesEdit(); } try { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(! preventPublicDisplay.booleanValue()) { ContentHostingService.setPubView(gedit.getId(), item.isPubview()); } if(! AccessMode.GROUPED.toString().equals(item.getAccess()) && AccessMode.GROUPED == gedit.getAccess()) { gedit.clearGroupAccess(); } else if(AccessMode.GROUPED.toString().equals(item.getAccess()) && ! item.getEntityGroupRefs().isEmpty()) { gedit.setGroupAccess(item.getEntityGroupRefs()); } else { gedit.clearGroupAccess(); } } catch(InconsistentException e) { // TODO: Should this be reported to user?? logger.warn("ResourcesAction.doSavechanges ***** InconsistentException changing groups ***** " + e.getMessage()); } if(item.isFolder()) { } else { if(item.isUrl()) { redit.setContent(item.getFilename().getBytes()); } else if(item.isStructuredArtifact()) { redit.setContentType(item.getMimeType()); redit.setContent(item.getContent()); } else if(item.contentHasChanged()) { redit.setContentType(item.getMimeType()); redit.setContent(item.getContent()); } else if(item.contentTypeHasChanged()) { redit.setContentType(item.getMimeType()); } BasicRightsAssignment rightsObj = item.getRights(); rightsObj.addResourceProperties(pedit); String copyright = StringUtil.trimToNull(params.getString ("copyright")); String newcopyright = StringUtil.trimToNull(params.getCleanString (NEW_COPYRIGHT)); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert")); if (copyright != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyright.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (newcopyright != null) { pedit.addProperty (ResourceProperties.PROP_COPYRIGHT, newcopyright); } else { alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyright.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { String mycopyright = (String) state.getAttribute (STATE_MY_COPYRIGHT); pedit.addProperty (ResourceProperties.PROP_COPYRIGHT, mycopyright); } pedit.addProperty(ResourceProperties.PROP_COPYRIGHT_CHOICE, copyright); } if (copyrightAlert != null) { pedit.addProperty (ResourceProperties.PROP_COPYRIGHT_ALERT, copyrightAlert); } else { pedit.removeProperty (ResourceProperties.PROP_COPYRIGHT_ALERT); } } if (!(item.isFolder() && (item.getId().equals ((String) state.getAttribute (STATE_HOME_COLLECTION_ID))))) { pedit.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); } // the home collection's title is not modificable pedit.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); // deal with quota (collections only) if ((cedit != null) && item.canSetQuota()) { if (item.hasQuota()) { // set the quota pedit.addProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA, item.getQuota()); } else { // clear the quota pedit.removeProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); } } List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); state.setAttribute(STATE_EDIT_ALERTS, alerts); saveMetadata(pedit, metadataGroups, item); alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); // commit the change if (cedit != null) { ContentHostingService.commitCollection(cedit); } else { ContentHostingService.commitResource(redit, item.getNotification()); } current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } // need to refresh collection containing current edit item make changes show up String containerId = ContentHostingService.getContainingCollectionId(item.getId()); Map expandedCollections = (Map) state.getAttribute(STATE_EXPANDED_COLLECTIONS); Object old = expandedCollections.remove(containerId); if (old != null) { try { ContentCollection container = ContentHostingService.getCollection(containerId); expandedCollections.put(containerId, container); } catch (Throwable ignore){} } if(item.isFolder()) { old = expandedCollections.remove(item.getId()); if (old != null) { try { ContentCollection folder = ContentHostingService.getCollection(item.getId()); expandedCollections.put(item.getId(), folder); } catch (Throwable ignore){} } } } catch (TypeException e) { alerts.add(rb.getString("typeex") + " " + item.getId()); // addAlert(state," " + rb.getString("typeex") + " " + item.getId()); } catch (IdUnusedException e) { alerts.add(RESOURCE_NOT_EXIST_STRING); // addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (PermissionException e) { alerts.add(rb.getString("notpermis10") + " " + item.getId()); // addAlert(state, rb.getString("notpermis10") + " " + item.getId() + ". " ); } catch (InUseException e) { alerts.add(rb.getString("someone") + " " + item.getId()); // addAlert(state, rb.getString("someone") + " " + item.getId() + ". "); } catch (ServerOverloadException e) { alerts.add(rb.getString("failed")); } catch (OverQuotaException e) { alerts.add(rb.getString("changing1") + " " + item.getId() + " " + rb.getString("changing2")); // addAlert(state, rb.getString("changing1") + " " + item.getId() + " " + rb.getString("changing2")); } catch(RuntimeException e) { logger.warn("ResourcesAction.doSavechanges ***** Unknown Exception ***** " + e.getMessage()); logger.warn("ResourcesAction.doSavechanges ***** Unknown Exception ***** ", e); alerts.add(rb.getString("failed")); } } // if - else if(alerts.isEmpty()) { // modify properties sucessful String mode = (String) state.getAttribute(STATE_MODE); popFromStack(state); resetCurrentMode(state); } //if-else else { Iterator alertIt = alerts.iterator(); while(alertIt.hasNext()) { String alert = (String) alertIt.next(); addAlert(state, alert); } alerts.clear(); state.setAttribute(STATE_EDIT_ALERTS, alerts); // state.setAttribute(STATE_CREATE_MISSING_ITEM, missing); } } // doSavechanges /** * @param pedit * @param metadataGroups * @param metadata */ private static void saveMetadata(ResourcePropertiesEdit pedit, List metadataGroups, EditItem item) { if(metadataGroups != null && !metadataGroups.isEmpty()) { MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(it.hasNext()) { group = (MetadataGroup) it.next(); Iterator props = group.iterator(); while(props.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) props.next(); if(ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { Time val = (Time)item.getMetadata().get(prop.getFullname()); if(val != null) { pedit.addProperty(prop.getFullname(), val.toString()); } } else { String val = (String) item.getMetadata().get(prop.getFullname()); pedit.addProperty(prop.getFullname(), val); } } } } } /** * @param data */ protected static void doToggle_intent(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); String intent = params.getString("intent"); Map current_stack_frame = peekAtStack(state); current_stack_frame.put(STATE_STACK_EDIT_INTENT, intent); } // doToggle_intent /** * @param data */ public static void doHideOtherSites(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.FALSE.toString()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); } /** * @param data */ public static void doShowOtherSites(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.TRUE.toString()); } /** * @param data */ public static void doHide_metadata(RunData data) { ParameterParser params = data.getParameters (); String name = params.getString("metadataGroup"); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { boolean found = false; MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(!found && it.hasNext()) { group = (MetadataGroup) it.next(); found = (name.equals(Validator.escapeUrl(group.getName())) || name.equals(group.getName())); } if(found) { group.setShowing(false); } } } // doHide_metadata /** * @param data */ public static void doShow_metadata(RunData data) { ParameterParser params = data.getParameters (); String name = params.getString("metadataGroup"); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { boolean found = false; MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(!found && it.hasNext()) { group = (MetadataGroup) it.next(); found = (name.equals(Validator.escapeUrl(group.getName())) || name.equals(group.getName())); } if(found) { group.setShowing(true); } } } // doShow_metadata /** * Sort based on the given property */ public static void doSort ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String criteria = params.getString ("criteria"); if (criteria.equals ("title")) { criteria = ResourceProperties.PROP_DISPLAY_NAME; } else if (criteria.equals ("size")) { criteria = ResourceProperties.PROP_CONTENT_LENGTH; } else if (criteria.equals ("created by")) { criteria = ResourceProperties.PROP_CREATOR; } else if (criteria.equals ("last modified")) { criteria = ResourceProperties.PROP_MODIFIED_DATE; } // current sorting sequence String asc = NULL_STRING; if (!criteria.equals (state.getAttribute (STATE_SORT_BY))) { state.setAttribute (STATE_SORT_BY, criteria); asc = Boolean.TRUE.toString(); state.setAttribute (STATE_SORT_ASC, asc); } else { // current sorting sequence asc = (String) state.getAttribute (STATE_SORT_ASC); //toggle between the ascending and descending sequence if (asc.equals (Boolean.TRUE.toString())) { asc = Boolean.FALSE.toString(); } else { asc = Boolean.TRUE.toString(); } state.setAttribute (STATE_SORT_ASC, asc); } if (state.getAttribute(STATE_MESSAGE) == null) { // sort sucessful // state.setAttribute (STATE_MODE, MODE_LIST); } // if-else } // doSort /** * set the state name to be "deletecofirm" if any item has been selected for deleting */ public void doDeleteconfirm ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Set deleteIdSet = new TreeSet(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } String[] deleteIds = data.getParameters ().getStrings ("selectedMembers"); if (deleteIds == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile3")); } else { deleteIdSet.addAll(Arrays.asList(deleteIds)); List deleteItems = new Vector(); List notDeleteItems = new Vector(); List nonEmptyFolders = new Vector(); List roots = (List) state.getAttribute(STATE_COLLECTION_ROOTS); Iterator rootIt = roots.iterator(); while(rootIt.hasNext()) { BrowseItem root = (BrowseItem) rootIt.next(); List members = root.getMembers(); Iterator memberIt = members.iterator(); while(memberIt.hasNext()) { BrowseItem member = (BrowseItem) memberIt.next(); if(deleteIdSet.contains(member.getId())) { if(member.isFolder()) { if(ContentHostingService.allowRemoveCollection(member.getId())) { deleteItems.add(member); if(! member.isEmpty()) { nonEmptyFolders.add(member); } } else { notDeleteItems.add(member); } } else if(ContentHostingService.allowRemoveResource(member.getId())) { deleteItems.add(member); } else { notDeleteItems.add(member); } } } } if(! notDeleteItems.isEmpty()) { String notDeleteNames = ""; boolean first_item = true; Iterator notIt = notDeleteItems.iterator(); while(notIt.hasNext()) { BrowseItem item = (BrowseItem) notIt.next(); if(first_item) { notDeleteNames = item.getName(); first_item = false; } else if(notIt.hasNext()) { notDeleteNames += ", " + item.getName(); } else { notDeleteNames += " and " + item.getName(); } } addAlert(state, rb.getString("notpermis14") + notDeleteNames); } /* //htripath-SAK-1712 - Set new collectionId as resources are not deleted under 'more' requirement. if(state.getAttribute(STATE_MESSAGE) == null){ String newCollectionId=ContentHostingService.getContainingCollectionId(currentId); state.setAttribute(STATE_COLLECTION_ID, newCollectionId); } */ // delete item state.setAttribute (STATE_DELETE_ITEMS, deleteItems); state.setAttribute (STATE_DELETE_ITEMS_NOT_EMPTY, nonEmptyFolders); } // if-else if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_MODE, MODE_DELETE_CONFIRM); state.setAttribute(STATE_LIST_SELECTIONS, deleteIdSet); } } // doDeleteconfirm /** * set the state name to be "cut" if any item has been selected for cutting */ public void doCut ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); String[] cutItems = data.getParameters ().getStrings ("selectedMembers"); if (cutItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile5")); state.setAttribute (STATE_MODE, MODE_LIST); } else { Vector cutIdsVector = new Vector (); String nonCutIds = NULL_STRING; String cutId = NULL_STRING; for (int i = 0; i < cutItems.length; i++) { cutId = cutItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (cutId); if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { if (ContentHostingService.allowRemoveResource (cutId)) { cutIdsVector.add (cutId); } else { nonCutIds = nonCutIds + " " + properties.getProperty (ResourceProperties.PROP_DISPLAY_NAME) + "; "; } } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { if (nonCutIds.length ()>0) { addAlert(state, rb.getString("notpermis16") +" " + nonCutIds); } if (cutIdsVector.size ()>0) { state.setAttribute (STATE_CUT_FLAG, Boolean.TRUE.toString()); if (((String) state.getAttribute (STATE_SELECT_ALL_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } Vector copiedIds = (Vector) state.getAttribute (STATE_COPIED_IDS); for (int i = 0; i < cutIdsVector.size (); i++) { String currentId = (String) cutIdsVector.elementAt (i); if ( copiedIds.contains (currentId)) { copiedIds.remove (currentId); } } if (copiedIds.size ()==0) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } state.setAttribute (STATE_COPIED_IDS, copiedIds); state.setAttribute (STATE_CUT_IDS, cutIdsVector); } } } // if-else } // doCut /** * set the state name to be "copy" if any item has been selected for copying */ public void doCopy ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } Vector copyItemsVector = new Vector (); String[] copyItems = data.getParameters ().getStrings ("selectedMembers"); if (copyItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { String copyId = NULL_STRING; for (int i = 0; i < copyItems.length; i++) { copyId = copyItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (copyId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_COPY_FLAG, Boolean.TRUE.toString()); copyItemsVector.addAll(Arrays.asList(copyItems)); ContentHostingService.eliminateDuplicates(copyItemsVector); state.setAttribute (STATE_COPIED_IDS, copyItemsVector); } // if-else } // if-else } // doCopy /** * Handle user's selection of items to be moved. */ public void doMove ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List moveItemsVector = new Vector(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String[] moveItems = data.getParameters ().getStrings ("selectedMembers"); if (moveItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { String moveId = NULL_STRING; for (int i = 0; i < moveItems.length; i++) { moveId = moveItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (moveId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_MOVE_FLAG, Boolean.TRUE.toString()); moveItemsVector.addAll(Arrays.asList(moveItems)); ContentHostingService.eliminateDuplicates(moveItemsVector); state.setAttribute (STATE_MOVED_IDS, moveItemsVector); } // if-else } // if-else } // doMove /** * If copy-flag is set to false, erase the copied-id's list and set copied flags to false * in all the browse items. If copied-id's list is empty, set copy-flag to false and set * copied flags to false in all the browse items. If copy-flag is set to true and copied-id's * list is not empty, update the copied flags of all browse items so copied flags for the * copied items are set to true and all others are set to false. */ protected void setCopyFlags(SessionState state) { String copyFlag = (String) state.getAttribute(STATE_COPY_FLAG); List copyItemsVector = (List) state.getAttribute(STATE_COPIED_IDS); if(copyFlag == null) { copyFlag = Boolean.FALSE.toString(); state.setAttribute(STATE_COPY_FLAG, copyFlag); } if(copyFlag.equals(Boolean.TRUE.toString())) { if(copyItemsVector == null) { copyItemsVector = new Vector(); state.setAttribute(STATE_COPIED_IDS, copyItemsVector); } if(copyItemsVector.isEmpty()) { state.setAttribute(STATE_COPY_FLAG, Boolean.FALSE.toString()); } } else { copyItemsVector = new Vector(); state.setAttribute(STATE_COPIED_IDS, copyItemsVector); } List roots = (List) state.getAttribute(STATE_COLLECTION_ROOTS); Iterator rootIt = roots.iterator(); while(rootIt.hasNext()) { BrowseItem root = (BrowseItem) rootIt.next(); boolean root_copied = copyItemsVector.contains(root.getId()); root.setCopied(root_copied); List members = root.getMembers(); Iterator memberIt = members.iterator(); while(memberIt.hasNext()) { BrowseItem member = (BrowseItem) memberIt.next(); boolean member_copied = copyItemsVector.contains(member.getId()); member.setCopied(member_copied); } } // check -- jim state.setAttribute(STATE_COLLECTION_ROOTS, roots); } // setCopyFlags /** * Expand all the collection resources. */ static public void doExpandall ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); // expansion actually occurs in getBrowseItems method. state.setAttribute(STATE_EXPAND_ALL_FLAG, Boolean.TRUE.toString()); state.setAttribute(STATE_NEED_TO_EXPAND_ALL, Boolean.TRUE.toString()); } // doExpandall /** * Unexpand all the collection resources */ public static void doUnexpandall ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); state.setAttribute(STATE_EXPANDED_COLLECTIONS, new HashMap()); state.setAttribute(STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); } // doUnexpandall /** * Populate the state object, if needed - override to do something! */ protected void initState(SessionState state, VelocityPortlet portlet, JetspeedRunData data) { super.initState(state, portlet, data); if(state.getAttribute(STATE_INITIALIZED) == null) { initCopyContext(state); initMoveContext(state); } initStateAttributes(state, portlet); } // initState /** * Remove the state variables used internally, on the way out. */ static private void cleanupState(SessionState state) { state.removeAttribute(STATE_FROM_TEXT); state.removeAttribute(STATE_HAS_ATTACHMENT_BEFORE); state.removeAttribute(STATE_ATTACH_SHOW_DROPBOXES); state.removeAttribute(STATE_ATTACH_COLLECTION_ID); state.removeAttribute(COPYRIGHT_FAIRUSE_URL); state.removeAttribute(COPYRIGHT_NEW_COPYRIGHT); state.removeAttribute(COPYRIGHT_SELF_COPYRIGHT); state.removeAttribute(COPYRIGHT_TYPES); state.removeAttribute(DEFAULT_COPYRIGHT_ALERT); state.removeAttribute(DEFAULT_COPYRIGHT); state.removeAttribute(STATE_EXPANDED_COLLECTIONS); state.removeAttribute(STATE_FILE_UPLOAD_MAX_SIZE); state.removeAttribute(NEW_COPYRIGHT_INPUT); state.removeAttribute(STATE_COLLECTION_ID); state.removeAttribute(STATE_COLLECTION_PATH); state.removeAttribute(STATE_CONTENT_SERVICE); state.removeAttribute(STATE_CONTENT_TYPE_IMAGE_SERVICE); //state.removeAttribute(STATE_STACK_EDIT_INTENT); state.removeAttribute(STATE_EXPAND_ALL_FLAG); state.removeAttribute(STATE_HELPER_NEW_ITEMS); state.removeAttribute(STATE_HELPER_CHANGED); state.removeAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME); state.removeAttribute(STATE_HOME_COLLECTION_ID); state.removeAttribute(STATE_LIST_SELECTIONS); state.removeAttribute(STATE_MY_COPYRIGHT); state.removeAttribute(STATE_NAVIGATION_ROOT); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); state.removeAttribute(STATE_SELECT_ALL_FLAG); state.removeAttribute(STATE_SHOW_ALL_SITES); state.removeAttribute(STATE_SITE_TITLE); state.removeAttribute(STATE_SORT_ASC); state.removeAttribute(STATE_SORT_BY); state.removeAttribute(STATE_STACK_STRUCTOBJ_TYPE); state.removeAttribute(STATE_STACK_STRUCTOBJ_TYPE_READONLY); state.removeAttribute(STATE_INITIALIZED); state.removeAttribute(VelocityPortletPaneledAction.STATE_HELPER); } // cleanupState public static void initStateAttributes(SessionState state, VelocityPortlet portlet) { if (state.getAttribute (STATE_INITIALIZED) != null) return; if (state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE) == null) { state.setAttribute(STATE_FILE_UPLOAD_MAX_SIZE, ServerConfigurationService.getString("content.upload.max", "1")); } PortletConfig config = portlet.getPortletConfig(); try { Integer size = new Integer(config.getInitParameter(PARAM_PAGESIZE)); if(size == null || size.intValue() < 1) { size = new Integer(DEFAULT_PAGE_SIZE); } state.setAttribute(STATE_PAGESIZE, size); } catch(Exception any) { state.setAttribute(STATE_PAGESIZE, new Integer(DEFAULT_PAGE_SIZE)); } // state.setAttribute(STATE_TOP_PAGE_MESSAGE, ""); state.setAttribute (STATE_CONTENT_SERVICE, ContentHostingService.getInstance()); state.setAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE, ContentTypeImageService.getInstance()); TimeBreakdown timeBreakdown = (TimeService.newTime()).breakdownLocal (); String mycopyright = COPYRIGHT_SYMBOL + " " + timeBreakdown.getYear () +", " + UserDirectoryService.getCurrentUser().getDisplayName () + ". All Rights Reserved. "; state.setAttribute (STATE_MY_COPYRIGHT, mycopyright); if(state.getAttribute(STATE_MODE) == null) { state.setAttribute (STATE_MODE, MODE_LIST); state.setAttribute (STATE_FROM, NULL_STRING); } state.setAttribute (STATE_SORT_BY, ResourceProperties.PROP_DISPLAY_NAME); state.setAttribute (STATE_SORT_ASC, Boolean.TRUE.toString()); state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); state.setAttribute (STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); state.setAttribute (STATE_COLLECTION_PATH, new Vector ()); // %%STATE_MODE_RESOURCES%% // In helper mode, calling tool should set attribute STATE_MODE_RESOURCES String resources_mode = (String) state.getAttribute(STATE_MODE_RESOURCES); if(resources_mode == null) { // get resources mode from tool registry resources_mode = portlet.getPortletConfig().getInitParameter("resources_mode"); if(resources_mode != null) { state.setAttribute(STATE_MODE_RESOURCES, resources_mode); } } boolean show_other_sites = false; if(RESOURCES_MODE_HELPER.equals(resources_mode)) { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.helper", SHOW_ALL_SITES_IN_FILE_PICKER); } else if(RESOURCES_MODE_DROPBOX.equals(resources_mode)) { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.dropbox", SHOW_ALL_SITES_IN_DROPBOX); } else { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.tool", SHOW_ALL_SITES_IN_RESOURCES); } /** This attribute indicates whether "Other Sites" twiggle should show */ state.setAttribute(STATE_SHOW_ALL_SITES, Boolean.toString(show_other_sites)); /** This attribute indicates whether "Other Sites" twiggle should be open */ state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.FALSE.toString()); // set the home collection to the parameter, if present, or the default if not String home = StringUtil.trimToNull(portlet.getPortletConfig().getInitParameter("home")); state.setAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME, home); if ((home == null) || (home.length() == 0)) { // no home set, see if we are in dropbox mode if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase(resources_mode)) { home = ContentHostingService.getDropboxCollection(); // if it came back null, we will pretend not to be in dropbox mode if (home != null) { state.setAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME, ContentHostingService.getDropboxDisplayName()); // create/update the collection of folders in the dropbox ContentHostingService.createDropboxCollection(); } } // if we still don't have a home, if ((home == null) || (home.length() == 0)) { home = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); // TODO: what's the 'name' of the context? -ggolden // we'll need this to create the home collection if needed state.setAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME, ToolManager.getCurrentPlacement().getContext() /*SiteService.getSiteDisplay(ToolManager.getCurrentPlacement().getContext()) */); } } state.setAttribute (STATE_HOME_COLLECTION_ID, home); state.setAttribute (STATE_COLLECTION_ID, home); state.setAttribute (STATE_NAVIGATION_ROOT, home); HomeFactory factory = (HomeFactory) ComponentManager.get("homeFactory"); if(factory != null) { Map homes = factory.getHomes(StructuredArtifactHomeInterface.class); if(! homes.isEmpty()) { state.setAttribute(STATE_SHOW_FORM_ITEMS, Boolean.TRUE.toString()); } } // state.setAttribute (STATE_COLLECTION_ID, state.getAttribute (STATE_HOME_COLLECTION_ID)); if (state.getAttribute(STATE_SITE_TITLE) == null) { String title = ""; try { title = ((Site) SiteService.getSite(ToolManager.getCurrentPlacement().getContext())).getTitle(); } catch (IdUnusedException e) { // ignore } state.setAttribute(STATE_SITE_TITLE, title); } HashMap expandedCollections = new HashMap(); //expandedCollections.add (state.getAttribute (STATE_HOME_COLLECTION_ID)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); if(state.getAttribute(STATE_USING_CREATIVE_COMMONS) == null) { String usingCreativeCommons = ServerConfigurationService.getString("copyright.use_creative_commons"); if( usingCreativeCommons != null && usingCreativeCommons.equalsIgnoreCase(Boolean.TRUE.toString())) { state.setAttribute(STATE_USING_CREATIVE_COMMONS, Boolean.TRUE.toString()); } else { state.setAttribute(STATE_USING_CREATIVE_COMMONS, Boolean.FALSE.toString()); } } if (state.getAttribute(COPYRIGHT_TYPES) == null) { if (ServerConfigurationService.getStrings("copyrighttype") != null) { state.setAttribute(COPYRIGHT_TYPES, new ArrayList(Arrays.asList(ServerConfigurationService.getStrings("copyrighttype")))); } } if (state.getAttribute(DEFAULT_COPYRIGHT) == null) { if (ServerConfigurationService.getString("default.copyright") != null) { state.setAttribute(DEFAULT_COPYRIGHT, ServerConfigurationService.getString("default.copyright")); } } if (state.getAttribute(DEFAULT_COPYRIGHT_ALERT) == null) { if (ServerConfigurationService.getString("default.copyright.alert") != null) { state.setAttribute(DEFAULT_COPYRIGHT_ALERT, ServerConfigurationService.getString("default.copyright.alert")); } } if (state.getAttribute(NEW_COPYRIGHT_INPUT) == null) { if (ServerConfigurationService.getString("newcopyrightinput") != null) { state.setAttribute(NEW_COPYRIGHT_INPUT, ServerConfigurationService.getString("newcopyrightinput")); } } if (state.getAttribute(COPYRIGHT_FAIRUSE_URL) == null) { if (ServerConfigurationService.getString("fairuse.url") != null) { state.setAttribute(COPYRIGHT_FAIRUSE_URL, ServerConfigurationService.getString("fairuse.url")); } } if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) == null) { if (ServerConfigurationService.getString("copyrighttype.own") != null) { state.setAttribute(COPYRIGHT_SELF_COPYRIGHT, ServerConfigurationService.getString("copyrighttype.own")); } } if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) == null) { if (ServerConfigurationService.getString("copyrighttype.new") != null) { state.setAttribute(COPYRIGHT_NEW_COPYRIGHT, ServerConfigurationService.getString("copyrighttype.new")); } } // get resources mode from tool registry String optional_properties = portlet.getPortletConfig().getInitParameter("optional_properties"); if(optional_properties != null && "true".equalsIgnoreCase(optional_properties)) { initMetadataContext(state); } state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, Boolean.FALSE); String[] siteTypes = ServerConfigurationService.getStrings("prevent.public.resources"); if(siteTypes != null) { Site site; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); for(int i = 0; i < siteTypes.length; i++) { if ((StringUtil.trimToNull(siteTypes[i])).equals(site.getType())) { state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, Boolean.TRUE); } } } catch (IdUnusedException e) { // allow public display } catch(NullPointerException e) { // allow public display } } state.setAttribute (STATE_INITIALIZED, Boolean.TRUE.toString()); } /** * Setup our observer to be watching for change events for the collection */ private void updateObservation(SessionState state, String peid) { // ContentObservingCourier observer = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // // // the delivery location for this tool // String deliveryId = clientWindowId(state, peid); // observer.setDeliveryId(deliveryId); } /** * Add additional resource pattern to the observer *@param pattern The pattern value to be added *@param state The state object */ private static void addObservingPattern(String pattern, SessionState state) { // // get the observer and add the pattern // ContentObservingCourier o = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // o.addResourcePattern(ContentHostingService.getReference(pattern)); // // // add it back to state // state.setAttribute(STATE_OBSERVER, o); } // addObservingPattern /** * Remove a resource pattern from the observer *@param pattern The pattern value to be removed *@param state The state object */ private static void removeObservingPattern(String pattern, SessionState state) { // // get the observer and remove the pattern // ContentObservingCourier o = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // o.removeResourcePattern(ContentHostingService.getReference(pattern)); // // // add it back to state // state.setAttribute(STATE_OBSERVER, o); } // removeObservingPattern /** * initialize the copy context */ private static void initCopyContext (SessionState state) { state.setAttribute (STATE_COPIED_IDS, new Vector ()); state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // initCopyContent /** * initialize the copy context */ private static void initMoveContext (SessionState state) { state.setAttribute (STATE_MOVED_IDS, new Vector ()); state.setAttribute (STATE_MOVE_FLAG, Boolean.FALSE.toString()); } // initCopyContent /** * initialize the cut context */ private void initCutContext (SessionState state) { state.setAttribute (STATE_CUT_IDS, new Vector ()); state.setAttribute (STATE_CUT_FLAG, Boolean.FALSE.toString()); } // initCutContent /** * find out whether there is a duplicate item in testVector * @param testVector The Vector to be tested on * @param testSize The integer of the test range * @return The index value of the duplicate ite */ private int repeatedName (Vector testVector, int testSize) { for (int i=1; i <= testSize; i++) { String currentName = (String) testVector.get (i); for (int j=i+1; j <= testSize; j++) { String comparedTitle = (String) testVector.get (j); if (comparedTitle.length()>0 && currentName.length()>0 && comparedTitle.equals (currentName)) { return j; } } } return 0; } // repeatedName /** * Is the id already exist in the current resource? * @param testVector The Vector to be tested on * @param testSize The integer of the test range * @parma isCollection Looking for collection or not * @return The index value of the exist id */ private int foundInResource (Vector testVector, int testSize, String collectionId, boolean isCollection) { try { ContentCollection c = ContentHostingService.getCollection(collectionId); Iterator membersIterator = c.getMemberResources().iterator(); while (membersIterator.hasNext()) { ResourceProperties p = ((Entity) membersIterator.next()).getProperties(); String displayName = p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); if (displayName != null) { String collectionOrResource = p.getProperty(ResourceProperties.PROP_IS_COLLECTION); for (int i=1; i <= testSize; i++) { String testName = (String) testVector.get(i); if ((testName != null) && (displayName.equals (testName)) && ((isCollection && collectionOrResource.equals (Boolean.TRUE.toString())) || (!isCollection && collectionOrResource.equals(Boolean.FALSE.toString())))) { return i; } } // for } } } catch (IdUnusedException e){} catch (TypeException e){} catch (PermissionException e){} return 0; } // foundInResource /** * empty String Vector object with the size sepecified * @param size The Vector object size -1 * @return The Vector object consists of null Strings */ private static Vector emptyVector (int size) { Vector v = new Vector (); for (int i=0; i <= size; i++) { v.add (i, ""); } return v; } // emptyVector /** * Setup for customization **/ public String buildOptionsPanelContext( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); String home = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); Reference ref = EntityManager.newReference(ContentHostingService.getReference(home)); String siteId = ref.getContext(); context.put("form-submit", BUTTON + "doConfigure_update"); context.put("form-cancel", BUTTON + "doCancel_options"); context.put("description", "Setting options for Resources in worksite " + SiteService.getSiteDisplay(siteId)); // pick the "-customize" template based on the standard template name String template = (String)getContext(data).get("template"); return template + "-customize"; } // buildOptionsPanelContext /** * Handle the configure context's update button */ public void doConfigure_update(RunData data, Context context) { // access the portlet element id to find our state String peid = ((JetspeedRunData)data).getJs_peid(); SessionState state = ((JetspeedRunData)data).getPortletSessionState(peid); // we are done with customization... back to the main (browse) mode state.setAttribute(STATE_MODE, MODE_LIST); // commit the change // saveOptions(); cancelOptions(); } // doConfigure_update /** * doCancel_options called for form input tags type="submit" named="eventSubmit_doCancel" * cancel the options process */ public void doCancel_options(RunData data, Context context) { // access the portlet element id to find our state String peid = ((JetspeedRunData)data).getJs_peid(); SessionState state = ((JetspeedRunData)data).getPortletSessionState(peid); // cancel the options cancelOptions(); // we are done with customization... back to the main (MODE_LIST) mode state.setAttribute(STATE_MODE, MODE_LIST); } // doCancel_options /** * Add the collection id into the expanded collection list * @throws PermissionException * @throws TypeException * @throws IdUnusedException */ public static void doExpand_collection(RunData data) throws IdUnusedException, TypeException, PermissionException { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String id = params.getString("collectionId"); currentMap.put (id,ContentHostingService.getCollection (id)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(id, state); } // doExpand_collection /** * Remove the collection id from the expanded collection list */ static public void doCollapse_collection(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); String collectionId = params.getString("collectionId"); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); HashMap newSet = new HashMap(); Iterator l = currentMap.keySet().iterator (); while (l.hasNext ()) { // remove the collection id and all of the subcollections // Resource collection = (Resource) l.next(); // String id = (String) collection.getId(); String id = (String) l.next(); if (id.indexOf (collectionId)==-1) { // newSet.put(id,collection); newSet.put(id,currentMap.get(id)); } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, newSet); // remove this folder id into the set to be event-observed removeObservingPattern(collectionId, state); } // doCollapse_collection /** * @param state * @param homeCollectionId * @param currentCollectionId * @return */ public static List getCollectionPath(SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // make sure the channedId is set String currentCollectionId = (String) state.getAttribute (STATE_COLLECTION_ID); if(! isStackEmpty(state)) { Map current_stack_frame = peekAtStack(state); String createCollectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(createCollectionId == null) { createCollectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); } if(createCollectionId != null) { currentCollectionId = createCollectionId; } else { String editCollectionId = (String) current_stack_frame.get(STATE_EDIT_COLLECTION_ID); if(editCollectionId == null) { editCollectionId = (String) state.getAttribute(STATE_EDIT_COLLECTION_ID); } if(editCollectionId != null) { currentCollectionId = editCollectionId; } } } String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); LinkedList collectionPath = new LinkedList(); String previousCollectionId = ""; Vector pathitems = new Vector(); while ((currentCollectionId != null) && (!currentCollectionId.equals(navRoot)) && (!currentCollectionId.equals(previousCollectionId)) && (!contentService.isRootCollection(previousCollectionId))) { pathitems.add(currentCollectionId); previousCollectionId = currentCollectionId; currentCollectionId = contentService.getContainingCollectionId(currentCollectionId); } pathitems.add(navRoot); if(!navRoot.equals(homeCollectionId)) { pathitems.add(homeCollectionId); } Iterator items = pathitems.iterator(); while(items.hasNext()) { String id = (String) items.next(); try { ResourceProperties props = contentService.getProperties(id); String name = props.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); PathItem item = new PathItem(id, name); boolean canRead = contentService.allowGetCollection(id) || contentService.allowGetResource(id); item.setCanRead(canRead); String url = contentService.getUrl(id); item.setUrl(url); item.setLast(collectionPath.isEmpty()); if(id.equals(homeCollectionId)) { item.setRoot(homeCollectionId); } else { item.setRoot(navRoot); } try { boolean isFolder = props.getBooleanProperty(ResourceProperties.PROP_IS_COLLECTION); item.setIsFolder(isFolder); } catch (EntityPropertyNotDefinedException e1) { } catch (EntityPropertyTypeException e1) { } collectionPath.addFirst(item); } catch (PermissionException e) { } catch (IdUnusedException e) { } } return collectionPath; } /** * Get the items in this folder that should be seen. * @param collectionId - String version of * @param expandedCollections - Hash of collection resources * @param sortedBy - pass through to ContentHostingComparator * @param sortedAsc - pass through to ContentHostingComparator * @param parent - The folder containing this item * @param isLocal - true if navigation root and home collection id of site are the same, false otherwise * @param state - The session state * @return a List of BrowseItem objects */ protected static List getBrowseItems(String collectionId, HashMap expandedCollections, Set highlightedItems, String sortedBy, String sortedAsc, BrowseItem parent, boolean isLocal, SessionState state) { boolean need_to_expand_all = Boolean.TRUE.toString().equals((String)state.getAttribute(STATE_NEED_TO_EXPAND_ALL)); List newItems = new LinkedList(); try { // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // get the collection // try using existing resource first ContentCollection collection = null; // get the collection if (expandedCollections.containsKey(collectionId)) { collection = (ContentCollection) expandedCollections.get(collectionId); } else { collection = ContentHostingService.getCollection(collectionId); if(need_to_expand_all) { expandedCollections.put(collectionId, collection); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); } } String dummyId = collectionId.trim(); if(dummyId.endsWith(Entity.SEPARATOR)) { dummyId += "dummy"; } else { dummyId += Entity.SEPARATOR + "dummy"; } boolean canRead = false; boolean canDelete = false; boolean canRevise = false; boolean canAddFolder = false; boolean canAddItem = false; boolean canUpdate = false; int depth = 0; if(parent == null || ! parent.canRead()) { canRead = contentService.allowGetCollection(collectionId); } else { canRead = parent.canRead(); } if(parent == null || ! parent.canDelete()) { canDelete = contentService.allowRemoveResource(collectionId); } else { canDelete = parent.canDelete(); } if(parent == null || ! parent.canRevise()) { canRevise = contentService.allowUpdateResource(collectionId); } else { canRevise = parent.canRevise(); } if(parent == null || ! parent.canAddFolder()) { canAddFolder = contentService.allowAddCollection(dummyId); } else { canAddFolder = parent.canAddFolder(); } if(parent == null || ! parent.canAddItem()) { canAddItem = contentService.allowAddResource(dummyId); } else { canAddItem = parent.canAddItem(); } if(parent == null || ! parent.canUpdate()) { canUpdate = AuthzGroupService.allowUpdate(collectionId); } else { canUpdate = parent.canUpdate(); } if(parent != null) { depth = parent.getDepth() + 1; } if(canAddItem) { state.setAttribute(STATE_PASTE_ALLOWED_FLAG, Boolean.TRUE.toString()); } boolean hasDeletableChildren = canDelete; boolean hasCopyableChildren = canRead; String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); ResourceProperties cProperties = collection.getProperties(); String folderName = cProperties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); if(collectionId.equals(homeCollectionId)) { folderName = (String) state.getAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME); } BrowseItem folder = new BrowseItem(collectionId, folderName, "folder"); if(parent == null) { folder.setRoot(collectionId); } else { folder.setRoot(parent.getRoot()); } boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); folder.setInDropbox(isInDropbox); BasicRightsAssignment rightsObj = new BasicRightsAssignment(folder.getItemNum(), cProperties); folder.setRights(rightsObj); AccessMode access = collection.getAccess(); if(access == null || AccessMode.SITE == access) { folder.setAccess(AccessMode.INHERITED.toString()); } else { folder.setAccess(access.toString()); } AccessMode inherited_access = collection.getInheritedAccess(); if(inherited_access == null || AccessMode.SITE == inherited_access) { folder.setInheritedAccess(AccessMode.INHERITED.toString()); } else { folder.setInheritedAccess(inherited_access.toString()); } Collection access_groups = collection.getGroupObjects(); if(access_groups == null) { access_groups = new Vector(); } folder.setGroups(access_groups); Collection inherited_access_groups = collection.getInheritedGroupObjects(); if(inherited_access_groups == null) { inherited_access_groups = new Vector(); } folder.setInheritedGroups(inherited_access_groups); if(parent != null && (parent.isPubview() || parent.isPubviewInherited())) { folder.setPubviewInherited(true); folder.setPubview(false); } else if(ContentHostingService.isPubView(folder.getId())) { folder.setPubview(true); } if(highlightedItems == null || highlightedItems.isEmpty()) { // do nothing } else if(parent != null && parent.isHighlighted()) { folder.setInheritsHighlight(true); folder.setHighlighted(true); } else if(highlightedItems.contains(collectionId)) { folder.setHighlighted(true); folder.setInheritsHighlight(false); } String containerId = contentService.getContainingCollectionId (collectionId); folder.setContainer(containerId); folder.setCanRead(canRead); folder.setCanRevise(canRevise); folder.setCanAddItem(canAddItem); folder.setCanAddFolder(canAddFolder); folder.setCanDelete(canDelete); folder.setCanUpdate(canUpdate); try { Time createdTime = cProperties.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTimeString = createdTime.toStringLocalShortDate(); folder.setCreatedTime(createdTimeString); } catch(Exception e) { String createdTimeString = cProperties.getProperty(ResourceProperties.PROP_CREATION_DATE); folder.setCreatedTime(createdTimeString); } try { String createdBy = getUserProperty(cProperties, ResourceProperties.PROP_CREATOR).getDisplayName(); folder.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = cProperties.getProperty(ResourceProperties.PROP_CREATOR); folder.setCreatedBy(createdBy); } try { Time modifiedTime = cProperties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTimeString = modifiedTime.toStringLocalShortDate(); folder.setModifiedTime(modifiedTimeString); } catch(Exception e) { String modifiedTimeString = cProperties.getProperty(ResourceProperties.PROP_MODIFIED_DATE); folder.setModifiedTime(modifiedTimeString); } try { String modifiedBy = getUserProperty(cProperties, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); folder.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = cProperties.getProperty(ResourceProperties.PROP_MODIFIED_BY); folder.setModifiedBy(modifiedBy); } String url = contentService.getUrl(collectionId); folder.setUrl(url); try { int collection_size = contentService.getCollectionSize(collectionId); folder.setIsEmpty(collection_size < 1); folder.setIsTooBig(collection_size > EXPANDABLE_FOLDER_SIZE_LIMIT); } catch(RuntimeException e) { folder.setIsEmpty(true); folder.setIsTooBig(false); } folder.setDepth(depth); newItems.add(folder); if(need_to_expand_all || expandedCollections.containsKey (collectionId)) { // Get the collection members from the 'new' collection List newMembers = collection.getMemberResources (); Collections.sort (newMembers, ContentHostingService.newContentHostingComparator (sortedBy, Boolean.valueOf (sortedAsc).booleanValue ())); // loop thru the (possibly) new members and add to the list Iterator it = newMembers.iterator(); while(it.hasNext()) { ContentEntity resource = (ContentEntity) it.next(); ResourceProperties props = resource.getProperties(); String itemId = resource.getId(); if(resource.isCollection()) { List offspring = getBrowseItems(itemId, expandedCollections, highlightedItems, sortedBy, sortedAsc, folder, isLocal, state); if(! offspring.isEmpty()) { BrowseItem child = (BrowseItem) offspring.get(0); hasDeletableChildren = hasDeletableChildren || child.hasDeletableChildren(); hasCopyableChildren = hasCopyableChildren || child.hasCopyableChildren(); } // add all the items in the subfolder to newItems newItems.addAll(offspring); } else { AccessMode access_mode = ((GroupAwareEntity) resource).getAccess(); if(access_mode == null) { access_mode = AccessMode.INHERITED; } else if(access_mode == AccessMode.GROUPED) { if(! ContentHostingService.allowGetResource(resource.getId())) { continue; } } String itemType = ((ContentResource)resource).getContentType(); String itemName = props.getProperty(ResourceProperties.PROP_DISPLAY_NAME); BrowseItem newItem = new BrowseItem(itemId, itemName, itemType); newItem.setAccess(access_mode.toString()); newItem.setInheritedAccess(folder.getEffectiveAccess()); newItem.setInDropbox(isInDropbox); BasicRightsAssignment rightsObj2 = new BasicRightsAssignment(newItem.getItemNum(), props); newItem.setRights(rightsObj2); Collection groups = ((GroupAwareEntity) resource).getGroupObjects(); if(groups == null) { groups = new Vector(); } Collection inheritedGroups = folder.getGroups(); if(inheritedGroups == null || inheritedGroups.isEmpty()) { inheritedGroups = folder.getInheritedGroups(); } newItem.setGroups(groups); newItem.setInheritedGroups(inheritedGroups); newItem.setContainer(collectionId); newItem.setRoot(folder.getRoot()); newItem.setCanDelete(canDelete); newItem.setCanRevise(canRevise); newItem.setCanRead(canRead); newItem.setCanCopy(canRead); newItem.setCanAddItem(canAddItem); // true means this user can add an item in the folder containing this item (used for "duplicate") if(highlightedItems == null || highlightedItems.isEmpty()) { // do nothing } else if(folder.isHighlighted()) { newItem.setInheritsHighlight(true); newItem.setHighlighted(true); } else if(highlightedItems.contains(itemId)) { newItem.setHighlighted(true); newItem.setInheritsHighlight(false); } try { Time createdTime = props.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTimeString = createdTime.toStringLocalShortDate(); newItem.setCreatedTime(createdTimeString); } catch(Exception e) { String createdTimeString = props.getProperty(ResourceProperties.PROP_CREATION_DATE); newItem.setCreatedTime(createdTimeString); } try { String createdBy = getUserProperty(props, ResourceProperties.PROP_CREATOR).getDisplayName(); newItem.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = props.getProperty(ResourceProperties.PROP_CREATOR); newItem.setCreatedBy(createdBy); } try { Time modifiedTime = props.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTimeString = modifiedTime.toStringLocalShortDate(); newItem.setModifiedTime(modifiedTimeString); } catch(Exception e) { String modifiedTimeString = props.getProperty(ResourceProperties.PROP_MODIFIED_DATE); newItem.setModifiedTime(modifiedTimeString); } try { String modifiedBy = getUserProperty(props, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); newItem.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = props.getProperty(ResourceProperties.PROP_MODIFIED_BY); newItem.setModifiedBy(modifiedBy); } if(folder.isPubview() || folder.isPubviewInherited()) { newItem.setPubviewInherited(true); newItem.setPubview(false); } else if(ContentHostingService.isPubView(resource.getId())) { newItem.setPubview(true); } String size = props.getPropertyFormatted(ResourceProperties.PROP_CONTENT_LENGTH); newItem.setSize(size); String target = Validator.getResourceTarget(props.getProperty(ResourceProperties.PROP_CONTENT_TYPE)); newItem.setTarget(target); String newUrl = contentService.getUrl(itemId); newItem.setUrl(newUrl); try { boolean copyrightAlert = props.getBooleanProperty(ResourceProperties.PROP_COPYRIGHT_ALERT); newItem.setCopyrightAlert(copyrightAlert); } catch(Exception e) {} newItem.setDepth(depth + 1); if (checkItemFilter((ContentResource)resource, newItem, state)) { newItems.add(newItem); } } } } folder.seDeletableChildren(hasDeletableChildren); folder.setCopyableChildren(hasCopyableChildren); // return newItems; } catch (IdUnusedException ignore) { // this condition indicates a site that does not have a resources collection (mercury?) } catch (TypeException e) { addAlert(state, "TypeException."); } catch (PermissionException e) { // ignore -- we'll just skip this collection since user lacks permission to access it. //addAlert(state, "PermissionException"); } return newItems; } // getBrowseItems protected static boolean checkItemFilter(ContentResource resource, BrowseItem newItem, SessionState state) { ContentResourceFilter filter = (ContentResourceFilter)state.getAttribute(STATE_ATTACH_FILTER); if (filter != null) { if (newItem != null) { newItem.setCanSelect(filter.allowSelect(resource)); } return filter.allowView(resource); } else if (newItem != null) { newItem.setCanSelect(true); } return true; } protected static boolean checkSelctItemFilter(ContentResource resource, SessionState state) { ContentResourceFilter filter = (ContentResourceFilter)state.getAttribute(STATE_ATTACH_FILTER); if (filter != null) { return filter.allowSelect(resource); } return true; } /** * set the state name to be "copy" if any item has been selected for copying */ public void doCopyitem ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); String itemId = data.getParameters ().getString ("itemId"); if (itemId == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { try { ResourceProperties properties = ContentHostingService.getProperties (itemId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_COPY_FLAG, Boolean.TRUE.toString()); state.setAttribute (STATE_COPIED_ID, itemId); } // if-else } // if-else } // doCopyitem /** * Paste the previously copied item(s) */ public static void doPasteitems ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List items = (List) state.getAttribute(STATE_COPIED_IDS); String collectionId = params.getString ("collectionId"); Iterator itemIter = items.iterator(); while (itemIter.hasNext()) { // get the copied item to be pasted String itemId = (String) itemIter.next(); String originalDisplayName = NULL_STRING; try { String id = ContentHostingService.copyIntoFolder(itemId, collectionId); String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(id)); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { if(state.getAttribute(STATE_ATTACH_LINKS) == null) { attachItem(id, state); } else { attachLink(id, state); } } } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } catch(IdUsedException e) { addAlert(state, rb.getString("toomany")); } catch(IdLengthException e) { addAlert(state, rb.getString("toolong") + " " + e.getMessage()); } catch(IdUniquenessException e) { addAlert(state, "Could not add this item to this folder"); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(InconsistentException e) { addAlert(state, rb.getString("recursive") + " " + itemId); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doPasteitems ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } } } } // doPasteitems /** * Paste the item(s) selected to be moved */ public static void doMoveitems ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } List items = (List) state.getAttribute(STATE_MOVED_IDS); String collectionId = params.getString ("collectionId"); Iterator itemIter = items.iterator(); while (itemIter.hasNext()) { // get the copied item to be pasted String itemId = (String) itemIter.next(); String originalDisplayName = NULL_STRING; try { /* ResourceProperties properties = ContentHostingService.getProperties (itemId); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else */ { ContentHostingService.moveIntoFolder(itemId, collectionId); } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } catch (InconsistentException e) { addAlert(state, rb.getString("recursive") + " " + itemId); } catch(IdUsedException e) { addAlert(state, rb.getString("toomany")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doMoveitems ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_MOVE_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_MOVE_FLAG, Boolean.FALSE.toString()); } } } } // doMoveitems /** * Paste the previously copied item(s) */ public static void doPasteitem ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the copied item to be pasted String itemId = params.getString("itemId"); String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; try { ResourceProperties properties = ContentHostingService.getProperties (itemId); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { // paste the resource ContentResource resource = ContentHostingService.getResource (itemId); ResourceProperties p = ContentHostingService.getProperties(itemId); String displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String newItemId = ContentHostingService.copyIntoFolder(itemId, collectionId); ContentResourceEdit copy = ContentHostingService.editResource(newItemId); ResourcePropertiesEdit pedit = copy.getPropertiesEdit(); pedit.addProperty(ResourceProperties.PROP_DISPLAY_NAME, displayName); ContentHostingService.commitResource(copy, NotificationService.NOTI_NONE); } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + originalDisplayName + " " + rb.getString("used2")); } catch(IdLengthException e) { addAlert(state, rb.getString("toolong") + " " + e.getMessage()); } catch(IdUniquenessException e) { addAlert(state, "Could not add this item to this folder"); } catch (InconsistentException ee) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } catch(InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName + ". "); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } } } // doPasteitem /** * Fire up the permissions editor for the current folder's permissions */ public void doFolder_permissions(RunData data, Context context) { SessionState state = ((JetspeedRunData)data).getPortletSessionState(((JetspeedRunData)data).getJs_peid()); ParameterParser params = data.getParameters(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } // get the current collection id and the related site String collectionId = params.getString("collectionId"); //(String) state.getAttribute (STATE_COLLECTION_ID); String title = ""; try { title = ContentHostingService.getProperties(collectionId).getProperty(ResourceProperties.PROP_DISPLAY_NAME); } catch (PermissionException e) { addAlert(state, rb.getString("notread")); } catch (IdUnusedException e) { addAlert(state, rb.getString("notfindfol")); } // the folder to edit Reference ref = EntityManager.newReference(ContentHostingService.getReference(collectionId)); state.setAttribute(PermissionsHelper.TARGET_REF, ref.getReference()); // use the folder's context (as a site) for roles String siteRef = SiteService.siteReference(ref.getContext()); state.setAttribute(PermissionsHelper.ROLES_REF, siteRef); // ... with this description state.setAttribute(PermissionsHelper.DESCRIPTION, rb.getString("setpermis") + " " + title); // ... showing only locks that are prpefixed with this state.setAttribute(PermissionsHelper.PREFIX, "content."); // get into helper mode with this helper tool startHelper(data.getRequest(), "sakai.permissions.helper"); } // doFolder_permissions /** * Fire up the permissions editor for the tool's permissions */ public void doPermissions(RunData data, Context context) { SessionState state = ((JetspeedRunData)data).getPortletSessionState(((JetspeedRunData)data).getJs_peid()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } // should we save here? state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // get the current home collection id and the related site String collectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); Reference ref = EntityManager.newReference(ContentHostingService.getReference(collectionId)); String siteRef = SiteService.siteReference(ref.getContext()); // setup for editing the permissions of the site for this tool, using the roles of this site, too state.setAttribute(PermissionsHelper.TARGET_REF, siteRef); // ... with this description state.setAttribute(PermissionsHelper.DESCRIPTION, rb.getString("setpermis1") + SiteService.getSiteDisplay(ref.getContext())); // ... showing only locks that are prpefixed with this state.setAttribute(PermissionsHelper.PREFIX, "content."); // get into helper mode with this helper tool startHelper(data.getRequest(), "sakai.permissions.helper"); } // doPermissions /** * is notification enabled? */ protected boolean notificationEnabled(SessionState state) { return true; } // notificationEnabled /** * Processes the HTML document that is coming back from the browser * (from the formatted text editing widget). * @param state Used to pass in any user-visible alerts or errors when processing the text * @param strFromBrowser The string from the browser * @return The formatted text */ private String processHtmlDocumentFromBrowser(SessionState state, String strFromBrowser) { StringBuffer alertMsg = new StringBuffer(); String text = FormattedText.processHtmlDocument(strFromBrowser, alertMsg); if (alertMsg.length() > 0) addAlert(state, alertMsg.toString()); return text; } /** * * Whether a resource item can be replaced * @param p The ResourceProperties object for the resource item * @return true If it can be replaced; false otherwise */ private static boolean replaceable(ResourceProperties p) { boolean rv = true; if (p.getPropertyFormatted (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { rv = false; } else if (p.getProperty (ResourceProperties.PROP_CONTENT_TYPE).equals (ResourceProperties.TYPE_URL)) { rv = false; } String displayName = p.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); if (displayName.indexOf(SHORTCUT_STRING) != -1) { rv = false; } return rv; } // replaceable /** * * put copyright info into context */ private static void copyrightChoicesIntoContext(SessionState state, Context context) { boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnershipLabel = "Who created this resource?"; List ccOwnershipList = new Vector(); ccOwnershipList.add("-- Select --"); ccOwnershipList.add("I created this resource"); ccOwnershipList.add("Someone else created this resource"); String ccMyGrantLabel = "Terms of use"; List ccMyGrantOptions = new Vector(); ccMyGrantOptions.add("-- Select --"); ccMyGrantOptions.add("Use my copyright"); ccMyGrantOptions.add("Use Creative Commons License"); ccMyGrantOptions.add("Use Public Domain Dedication"); String ccCommercialLabel = "Allow commercial use?"; List ccCommercialList = new Vector(); ccCommercialList.add("Yes"); ccCommercialList.add("No"); String ccModificationLabel = "Allow Modifications?"; List ccModificationList = new Vector(); ccModificationList.add("Yes"); ccModificationList.add("Yes, share alike"); ccModificationList.add("No"); String ccOtherGrantLabel = "Terms of use"; List ccOtherGrantList = new Vector(); ccOtherGrantList.add("Subject to fair-use exception"); ccOtherGrantList.add("Public domain (created before copyright law applied)"); ccOtherGrantList.add("Public domain (copyright has expired)"); ccOtherGrantList.add("Public domain (government document not subject to copyright)"); String ccRightsYear = "Year"; String ccRightsOwner = "Copyright owner"; String ccAcknowledgeLabel = "Require users to acknowledge author's rights before access?"; List ccAcknowledgeList = new Vector(); ccAcknowledgeList.add("Yes"); ccAcknowledgeList.add("No"); String ccInfoUrl = ""; int year = TimeService.newTime().breakdownLocal().getYear(); String username = UserDirectoryService.getCurrentUser().getDisplayName(); context.put("usingCreativeCommons", Boolean.TRUE); context.put("ccOwnershipLabel", ccOwnershipLabel); context.put("ccOwnershipList", ccOwnershipList); context.put("ccMyGrantLabel", ccMyGrantLabel); context.put("ccMyGrantOptions", ccMyGrantOptions); context.put("ccCommercialLabel", ccCommercialLabel); context.put("ccCommercialList", ccCommercialList); context.put("ccModificationLabel", ccModificationLabel); context.put("ccModificationList", ccModificationList); context.put("ccOtherGrantLabel", ccOtherGrantLabel); context.put("ccOtherGrantList", ccOtherGrantList); context.put("ccRightsYear", ccRightsYear); context.put("ccRightsOwner", ccRightsOwner); context.put("ccAcknowledgeLabel", ccAcknowledgeLabel); context.put("ccAcknowledgeList", ccAcknowledgeList); context.put("ccInfoUrl", ccInfoUrl); context.put("ccThisYear", Integer.toString(year)); context.put("ccThisUser", username); } else { //copyright if (state.getAttribute(COPYRIGHT_FAIRUSE_URL) != null) { context.put("fairuseurl", state.getAttribute(COPYRIGHT_FAIRUSE_URL)); } if (state.getAttribute(NEW_COPYRIGHT_INPUT) != null) { context.put("newcopyrightinput", state.getAttribute(NEW_COPYRIGHT_INPUT)); } if (state.getAttribute(COPYRIGHT_TYPES) != null) { List copyrightTypes = (List) state.getAttribute(COPYRIGHT_TYPES); context.put("copyrightTypes", copyrightTypes); context.put("copyrightTypesSize", new Integer(copyrightTypes.size() - 1)); context.put("USE_THIS_COPYRIGHT", copyrightTypes.get(copyrightTypes.size() - 1)); } } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } context.put("preventPublicDisplay", preventPublicDisplay); } // copyrightChoicesIntoContext /** * Add variables and constants to the velocity context to render an editor * for inputing and modifying optional metadata properties about a resource. */ private static void metadataGroupsIntoContext(SessionState state, Context context) { context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && !metadataGroups.isEmpty()) { context.put("metadataGroups", metadataGroups); } } // metadataGroupsIntoContext /** * initialize the metadata context */ private static void initMetadataContext(SessionState state) { // define MetadataSets map List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups == null) { metadataGroups = new Vector(); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } // define DublinCore if( !metadataGroups.contains(new MetadataGroup(rb.getString("opt_props"))) ) { MetadataGroup dc = new MetadataGroup( rb.getString("opt_props") ); // dc.add(ResourcesMetadata.PROPERTY_DC_TITLE); // dc.add(ResourcesMetadata.PROPERTY_DC_DESCRIPTION); dc.add(ResourcesMetadata.PROPERTY_DC_ALTERNATIVE); dc.add(ResourcesMetadata.PROPERTY_DC_CREATOR); dc.add(ResourcesMetadata.PROPERTY_DC_PUBLISHER); dc.add(ResourcesMetadata.PROPERTY_DC_SUBJECT); dc.add(ResourcesMetadata.PROPERTY_DC_CREATED); dc.add(ResourcesMetadata.PROPERTY_DC_ISSUED); // dc.add(ResourcesMetadata.PROPERTY_DC_MODIFIED); // dc.add(ResourcesMetadata.PROPERTY_DC_TABLEOFCONTENTS); dc.add(ResourcesMetadata.PROPERTY_DC_ABSTRACT); dc.add(ResourcesMetadata.PROPERTY_DC_CONTRIBUTOR); // dc.add(ResourcesMetadata.PROPERTY_DC_TYPE); // dc.add(ResourcesMetadata.PROPERTY_DC_FORMAT); // dc.add(ResourcesMetadata.PROPERTY_DC_IDENTIFIER); // dc.add(ResourcesMetadata.PROPERTY_DC_SOURCE); // dc.add(ResourcesMetadata.PROPERTY_DC_LANGUAGE); // dc.add(ResourcesMetadata.PROPERTY_DC_COVERAGE); // dc.add(ResourcesMetadata.PROPERTY_DC_RIGHTS); dc.add(ResourcesMetadata.PROPERTY_DC_AUDIENCE); dc.add(ResourcesMetadata.PROPERTY_DC_EDULEVEL); metadataGroups.add(dc); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } /* // define DublinCore if(!metadataGroups.contains(new MetadataGroup("Test of Datatypes"))) { MetadataGroup dc = new MetadataGroup("Test of Datatypes"); dc.add(ResourcesMetadata.PROPERTY_DC_TITLE); dc.add(ResourcesMetadata.PROPERTY_DC_DESCRIPTION); dc.add(ResourcesMetadata.PROPERTY_DC_ANYURI); dc.add(ResourcesMetadata.PROPERTY_DC_DOUBLE); dc.add(ResourcesMetadata.PROPERTY_DC_DATETIME); dc.add(ResourcesMetadata.PROPERTY_DC_TIME); dc.add(ResourcesMetadata.PROPERTY_DC_DATE); dc.add(ResourcesMetadata.PROPERTY_DC_BOOLEAN); dc.add(ResourcesMetadata.PROPERTY_DC_INTEGER); metadataGroups.add(dc); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } */ } /** * Internal class that encapsulates all information about a resource that is needed in the browse mode */ public static class BrowseItem { protected static Integer seqnum = new Integer(0); private String m_itemnum; // attributes of all resources protected String m_name; protected String m_id; protected String m_type; protected SortedSet m_allSiteGroups; protected SortedSet m_inheritedGroupRefs; protected SortedSet m_entityGroupRefs; protected SortedSet m_allowedRemoveGroupRefs; protected SortedSet m_allowedAddGroupRefs; protected Map m_allSiteGroupsMap; protected boolean m_canRead; protected boolean m_canRevise; protected boolean m_canDelete; protected boolean m_canCopy; protected boolean m_isCopied; protected boolean m_canAddItem; protected boolean m_canAddFolder; protected boolean m_canSelect; protected boolean m_inDropbox; protected List m_members; protected boolean m_isEmpty; protected boolean m_isHighlighted; protected boolean m_inheritsHighlight; protected String m_createdBy; protected String m_createdTime; protected String m_modifiedBy; protected String m_modifiedTime; protected String m_size; protected String m_target; protected String m_container; protected String m_root; protected int m_depth; protected boolean m_hasDeletableChildren; protected boolean m_hasCopyableChildren; protected boolean m_copyrightAlert; protected String m_url; protected boolean m_isLocal; protected boolean m_isAttached; private boolean m_isMoved; private boolean m_canUpdate; private boolean m_toobig; protected String m_access; protected String m_inheritedAccess; protected Collection m_groups; protected Collection m_oldInheritedGroups; protected Collection m_oldPossibleGroups; protected BasicRightsAssignment m_rights; protected boolean m_pubview; protected boolean m_pubview_inherited; protected boolean m_pubview_possible; /** * @param id * @param name * @param type */ public BrowseItem(String id, String name, String type) { m_name = name; m_id = id; m_type = type; Integer snum; synchronized(seqnum) { snum = seqnum; seqnum = new Integer((seqnum.intValue() + 1) % 10000); } m_itemnum = "Item00000000".substring(0,10 - snum.toString().length()) + snum.toString(); m_allowedRemoveGroupRefs = new TreeSet(); m_allowedAddGroupRefs = new TreeSet(); m_allSiteGroups = new TreeSet(new Comparator() { protected final String DELIM = "::"; public int compare(Object arg0, Object arg1) { Group group0 = (Group) arg0; Group group1 = (Group) arg1; String string0 = group0.getTitle() + DELIM + group0.getId(); String string1 = group1.getTitle() + DELIM + group1.getId(); return string0.compareTo(string1); } }); m_entityGroupRefs = new TreeSet(); m_inheritedGroupRefs = new TreeSet(); m_allSiteGroupsMap = new Hashtable(); // set defaults m_rights = new BasicRightsAssignment(m_itemnum, false); m_members = new LinkedList(); m_canRead = false; m_canRevise = false; m_canDelete = false; m_canCopy = false; m_isEmpty = true; m_toobig = false; m_isCopied = false; m_isMoved = false; m_isAttached = false; m_canSelect = true; // default is true. m_hasDeletableChildren = false; m_hasCopyableChildren = false; m_createdBy = ""; m_modifiedBy = ""; // m_createdTime = TimeService.newTime().toStringLocalDate(); // m_modifiedTime = TimeService.newTime().toStringLocalDate(); m_size = ""; m_depth = 0; m_copyrightAlert = false; m_url = ""; m_target = ""; m_root = ""; m_pubview = false; m_pubview_inherited = false; m_pubview_possible = true; m_isHighlighted = false; m_inheritsHighlight = false; m_canAddItem = false; m_canAddFolder = false; m_canUpdate = false; m_access = AccessMode.INHERITED.toString(); m_groups = new Vector(); } public String getItemNum() { return m_itemnum; } public boolean isInherited(Group group) { return this.m_inheritedGroupRefs.contains(group.getReference()); } public boolean isLocal(Group group) { return this.m_entityGroupRefs.contains(group.getReference()); } public boolean isPossible(Group group) { boolean rv = false; if(AccessMode.GROUPED.toString().equals(this.m_inheritedAccess)) { rv = this.m_inheritedGroupRefs.contains(group.getReference()); } else { rv = this.m_allSiteGroupsMap.containsKey(group.getReference()); } return rv; } public boolean allowedRemove(Group group) { return this.m_allowedRemoveGroupRefs.contains(group.getReference()); } public SortedSet getAllowedRemoveGroupRefs() { return m_allowedRemoveGroupRefs; } public void setAllowedRemoveGroupRefs(Collection allowedRemoveGroupRefs) { importGroupRefs(allowedRemoveGroupRefs, this.m_allowedRemoveGroupRefs); } public void addAllowedRemoveGroupRef(String allowedRemoveGroupRef) { addGroupRefToCollection(allowedRemoveGroupRef, m_allowedRemoveGroupRefs); } public boolean allowedAdd(Group group) { return this.m_allowedAddGroupRefs.contains(group.getReference()); } public SortedSet getAllowedAddGroupRefs() { return m_allowedAddGroupRefs; } public void setAllowedAddGroupRefs(Collection allowedAddGroupRefs) { importGroupRefs(allowedAddGroupRefs, this.m_allowedAddGroupRefs); } public void addAllowedAddGroupRef(String allowedAddGroupRef) { addGroupRefToCollection(allowedAddGroupRef, m_allowedAddGroupRefs); } public List getAllSiteGroups() { return new Vector(m_allSiteGroups); } public void setAllSiteGroups(Collection allSiteGroups) { this.m_allSiteGroups.clear(); this.m_allSiteGroupsMap.clear(); addAllSiteGroups(allSiteGroups); } public void addAllSiteGroups(Collection allSiteGroups) { Iterator it = allSiteGroups.iterator(); while(it.hasNext()) { Group group = (Group) it.next(); if(! m_allSiteGroupsMap.containsKey(group.getReference())) { this.m_allSiteGroups.add(group); m_allSiteGroupsMap.put(group.getReference(), group); m_allSiteGroupsMap.put(group.getId(), group); } } } public SortedSet getEntityGroupRefs() { return m_entityGroupRefs; } public void setEntityGroupRefs(Collection entityGroupRefs) { importGroupRefs(entityGroupRefs, this.m_entityGroupRefs); } public void addEntityGroupRef(String entityGroupRef) { addGroupRefToCollection(entityGroupRef, m_entityGroupRefs); } public SortedSet getInheritedGroupRefs() { return m_inheritedGroupRefs; } public void setInheritedGroupRefs(Collection inheritedGroupRefs) { importGroupRefs(inheritedGroupRefs, this.m_inheritedGroupRefs); } public void addInheritedGroupRef(String inheritedGroupRef) { addGroupRefToCollection(inheritedGroupRef, m_inheritedGroupRefs); } protected void importGroupRefs(Collection groupRefs, Collection collection) { collection.clear(); Iterator it = groupRefs.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group) { addGroupRefToCollection(((Group) obj).getReference(), collection); } else if(obj instanceof String) { addGroupRefToCollection((String) obj, collection); } } } protected void addGroupRefToCollection(String groupRef, Collection collection) { Group group = (Group) m_allSiteGroupsMap.get(groupRef); if(group != null) { if(! collection.contains(group.getReference())) { collection.add(group.getReference()); } } } public void setIsTooBig(boolean toobig) { m_toobig = toobig; } public boolean isTooBig() { return m_toobig; } /** * @param name */ public void setName(String name) { m_name = name; } /** * @param root */ public void setRoot(String root) { m_root = root; } /** * @return */ public String getRoot() { return m_root; } /** * @return */ public List getMembers() { List rv = new LinkedList(); if(m_members != null) { rv.addAll(m_members); } return rv; } /** * @param members */ public void addMembers(Collection members) { if(m_members == null) { m_members = new LinkedList(); } m_members.addAll(members); } /** * @return */ public boolean canAddItem() { return m_canAddItem; } /** * @return */ public boolean canDelete() { return m_canDelete; } /** * @return */ public boolean canRead() { return m_canRead; } public boolean canSelect() { return m_canSelect; } /** * @return */ public boolean canRevise() { return m_canRevise; } /** * @return */ public String getId() { return m_id; } /** * @return */ public String getName() { return m_name; } /** * @return */ public int getDepth() { return m_depth; } /** * @param depth */ public void setDepth(int depth) { m_depth = depth; } /** * @param canCreate */ public void setCanAddItem(boolean canAddItem) { m_canAddItem = canAddItem; } /** * @param canDelete */ public void setCanDelete(boolean canDelete) { m_canDelete = canDelete; } /** * @param canRead */ public void setCanRead(boolean canRead) { m_canRead = canRead; } public void setCanSelect(boolean canSelect) { m_canSelect = canSelect; } /** * @param canRevise */ public void setCanRevise(boolean canRevise) { m_canRevise = canRevise; } /** * @return */ public boolean isFolder() { return TYPE_FOLDER.equals(m_type); } /** * @return */ public String getType() { return m_type; } /** * @return */ public boolean canAddFolder() { return m_canAddFolder; } /** * @param b */ public void setCanAddFolder(boolean canAddFolder) { m_canAddFolder = canAddFolder; } /** * @return */ public boolean canCopy() { return m_canCopy; } /** * @param canCopy */ public void setCanCopy(boolean canCopy) { m_canCopy = canCopy; } /** * @return */ public boolean hasCopyrightAlert() { return m_copyrightAlert; } /** * @param copyrightAlert */ public void setCopyrightAlert(boolean copyrightAlert) { m_copyrightAlert = copyrightAlert; } /** * @return */ public String getUrl() { return m_url; } /** * @param url */ public void setUrl(String url) { m_url = url; } /** * @return */ public boolean isCopied() { return m_isCopied; } /** * @param isCopied */ public void setCopied(boolean isCopied) { m_isCopied = isCopied; } /** * @return */ public boolean isMoved() { return m_isMoved; } /** * @param isCopied */ public void setMoved(boolean isMoved) { m_isMoved = isMoved; } /** * @return */ public String getCreatedBy() { return m_createdBy; } /** * @return */ public String getCreatedTime() { return m_createdTime; } /** * @return */ public String getModifiedBy() { return m_modifiedBy; } /** * @return */ public String getModifiedTime() { return m_modifiedTime; } /** * @return */ public String getSize() { if(m_size == null) { m_size = ""; } return m_size; } /** * @param creator */ public void setCreatedBy(String creator) { m_createdBy = creator; } /** * @param time */ public void setCreatedTime(String time) { m_createdTime = time; } /** * @param modifier */ public void setModifiedBy(String modifier) { m_modifiedBy = modifier; } /** * @param time */ public void setModifiedTime(String time) { m_modifiedTime = time; } /** * @param size */ public void setSize(String size) { m_size = size; } /** * @return */ public String getTarget() { return m_target; } /** * @param target */ public void setTarget(String target) { m_target = target; } /** * @return */ public boolean isEmpty() { return m_isEmpty; } /** * @param isEmpty */ public void setIsEmpty(boolean isEmpty) { m_isEmpty = isEmpty; } /** * @return */ public String getContainer() { return m_container; } /** * @param container */ public void setContainer(String container) { m_container = container; } public void setIsLocal(boolean isLocal) { m_isLocal = isLocal; } public boolean isLocal() { return m_isLocal; } /** * @return Returns the isAttached. */ public boolean isAttached() { return m_isAttached; } /** * @param isAttached The isAttached to set. */ public void setAttached(boolean isAttached) { this.m_isAttached = isAttached; } /** * @return Returns the hasCopyableChildren. */ public boolean hasCopyableChildren() { return m_hasCopyableChildren; } /** * @param hasCopyableChildren The hasCopyableChildren to set. */ public void setCopyableChildren(boolean hasCopyableChildren) { this.m_hasCopyableChildren = hasCopyableChildren; } /** * @return Returns the hasDeletableChildren. */ public boolean hasDeletableChildren() { return m_hasDeletableChildren; } /** * @param hasDeletableChildren The hasDeletableChildren to set. */ public void seDeletableChildren(boolean hasDeletableChildren) { this.m_hasDeletableChildren = hasDeletableChildren; } /** * @return Returns the canUpdate. */ public boolean canUpdate() { return m_canUpdate; } /** * @param canUpdate The canUpdate to set. */ public void setCanUpdate(boolean canUpdate) { m_canUpdate = canUpdate; } public void setHighlighted(boolean isHighlighted) { m_isHighlighted = isHighlighted; } public boolean isHighlighted() { return m_isHighlighted; } public void setInheritsHighlight(boolean inheritsHighlight) { m_inheritsHighlight = inheritsHighlight; } public boolean inheritsHighlighted() { return m_inheritsHighlight; } /** * Access the access mode for this item. * @return The access mode. */ public String getAccess() { return m_access; } /** * Access the access mode for this item. * @return The access mode. */ public String getInheritedAccess() { return m_inheritedAccess; } public String getEntityAccess() { String rv = AccessMode.INHERITED.toString(); boolean sameGroups = true; if(AccessMode.GROUPED.toString().equals(m_access)) { Iterator it = getGroups().iterator(); while(sameGroups && it.hasNext()) { Group g = (Group) it.next(); sameGroups = inheritsGroup(g.getReference()); } it = getInheritedGroups().iterator(); while(sameGroups && it.hasNext()) { Group g = (Group) it.next(); sameGroups = hasGroup(g.getReference()); } if(!sameGroups) { rv = AccessMode.GROUPED.toString(); } } return rv; } public String getEffectiveAccess() { String rv = this.m_access; if(AccessMode.INHERITED.toString().equals(rv)) { rv = this.m_inheritedAccess; } if(AccessMode.INHERITED.toString().equals(rv)) { rv = AccessMode.SITE.toString(); } return rv; } public String getEffectiveGroups() { String rv = rb.getString("access.site1"); if(this.isPubviewInherited()) { rv = rb.getString("access.public1"); } else if(this.isPubview()) { rv = rb.getString("access.public1"); } else if(this.isInDropbox()) { rv = rb.getString("access.dropbox1"); } else if(AccessMode.GROUPED.toString().equals(getEffectiveAccess())) { rv = (String) rb.getFormattedMessage("access.group1", new Object[]{getGroupNames()}); } return rv; } public Collection getPossibleGroups() { return m_oldPossibleGroups; } public void setPossibleGroups(Collection groups) { m_oldPossibleGroups = groups; } public String getGroupNames() { String rv = ""; Collection groupRefs = this.m_entityGroupRefs; if(groupRefs == null || groupRefs.isEmpty()) { groupRefs = this.m_inheritedGroupRefs; } Iterator it = groupRefs.iterator(); while(it.hasNext()) { String groupRef = (String) it.next(); Group group = (Group) this.m_allSiteGroupsMap.get(groupRef); if(group != null) { if(rv.length() == 0) { rv += group.getTitle(); } else { rv += ", " + group.getTitle(); } } } // TODO: After updating getBrowserItems, get rid of this part if(rv.length() == 0) { Collection groups = getGroups(); if(groups == null || groups.isEmpty()) { groups = getInheritedGroups(); } Iterator grit = groups.iterator(); while(grit.hasNext()) { Group g = (Group) grit.next(); rv += g.getTitle(); if(grit.hasNext()) { rv += ", "; } } } return rv; } /** * Set the access mode for this item. * @param access */ public void setAccess(String access) { m_access = access; } /** * Set the access mode for this item. * @param access */ public void setInheritedAccess(String access) { m_inheritedAccess = access; } /** * Access a list of Group objects that can access this item. * @return Returns the groups. */ public List getGroups() { if(m_groups == null) { m_groups = new Vector(); } return new Vector(m_groups); } /** * Access a list of Group objects that can access this item. * @return Returns the groups. */ public List getInheritedGroups() { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } return new Vector(m_oldInheritedGroups); } /** * Determine whether a group has access to this item. * @param groupRef The internal reference string that uniquely identifies the group. * @return true if the group has access, false otherwise. */ public boolean hasGroup(String groupRef) { if(m_groups == null) { m_groups = new Vector(); } boolean found = false; Iterator it = m_groups.iterator(); while(it.hasNext() && !found) { Group gr = (Group) it.next(); found = gr.getReference().equals(groupRef); } return found; } /** * Determine whether a group has access to this item. * @param groupRef The internal reference string that uniquely identifies the group. * @return true if the group has access, false otherwise. */ public boolean inheritsGroup(String groupRef) { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } boolean found = false; Iterator it = m_oldInheritedGroups.iterator(); while(it.hasNext() && !found) { Group gr = (Group) it.next(); found = gr.getReference().equals(groupRef); } return found; } /** * Replace the current list of groups with this list of Group objects representing the groups that have access to this item. * @param groups The groups to set. */ public void setGroups(Collection groups) { if(groups == null) { return; } if(m_groups == null) { m_groups = new Vector(); } m_groups.clear(); Iterator it = groups.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group && ! hasGroup(((Group) obj).getReference())) { m_groups.add(obj); } else if(obj instanceof String && ! hasGroup((String) obj)) { addGroup((String) obj); } } } /** * Replace the current list of groups with this list of Group objects representing the groups that have access to this item. * @param groups The groups to set. */ public void setInheritedGroups(Collection groups) { if(groups == null) { return; } if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } m_oldInheritedGroups.clear(); Iterator it = groups.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group && ! inheritsGroup(((Group) obj).getReference())) { m_oldInheritedGroups.add(obj); } else if(obj instanceof String && ! hasGroup((String) obj)) { addInheritedGroup((String) obj); } } } /** * Add a string reference identifying a Group to the list of groups that have access to this item. * @param groupRef */ public void addGroup(String groupId) { if(m_groups == null) { m_groups = new Vector(); } if(m_container == null) { if(m_id == null) { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } else { m_container = ContentHostingService.getContainingCollectionId(m_id); } if(m_container == null || m_container.trim() == "") { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } } boolean found = false; Collection groups = ContentHostingService.getGroupsWithReadAccess(m_container); Iterator it = groups.iterator(); while( it.hasNext() && !found ) { Group group = (Group) it.next(); if(group.getId().equals(groupId)) { if(! hasGroup(group.getReference())) { m_groups.add(group); } found = true; } } } /** * Add a Group to the list of groups that have access to this item. * @param group The Group object to be added */ public void addGroup(Group group) { if(m_groups == null) { m_groups = new Vector(); } if(! hasGroup(group.getReference())) { m_groups.add(group); } } /** * Add a string reference identifying a Group to the list of groups that have access to this item. * @param groupRef */ public void addInheritedGroup(String groupId) { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } if(m_container == null) { if(m_id == null) { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } else { m_container = ContentHostingService.getContainingCollectionId(m_id); } if(m_container == null || m_container.trim() == "") { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } } boolean found = false; Collection groups = ContentHostingService.getGroupsWithReadAccess(m_container); Iterator it = groups.iterator(); while( it.hasNext() && !found ) { Group group = (Group) it.next(); String gid = group.getId(); String gref = group.getReference(); if(gid.equals(groupId) || gref.equals(groupId)) { if(! inheritsGroup(group.getReference())) { m_oldInheritedGroups.add(group); } found = true; } } } /** * Remove all groups from the item. */ public void clearGroups() { if(this.m_groups == null) { m_groups = new Vector(); } m_groups.clear(); } /** * Remove all inherited groups from the item. */ public void clearInheritedGroups() { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } m_oldInheritedGroups.clear(); } /** * @return Returns the pubview. */ public boolean isPubview() { return m_pubview; } /** * @param pubview The pubview to set. */ public void setPubview(boolean pubview) { m_pubview = pubview; } /** * @param pubview The pubview to set. */ public void setPubviewPossible(boolean possible) { m_pubview_possible = possible; } /** * @return Returns the pubviewset. */ public boolean isPubviewInherited() { return m_pubview_inherited; } /** * * */ public boolean isPubviewPossible() { return m_pubview_possible; } /** * @param pubviewset The pubviewset to set. */ public void setPubviewInherited(boolean pubviewset) { m_pubview_inherited = pubviewset; } /** * @return Returns the rights. */ public BasicRightsAssignment getRights() { return m_rights; } /** * @param rights The rights to set. */ public void setRights(BasicRightsAssignment rights) { this.m_rights = rights; } /** * @return Returns true if the item is in a dropbox (assuming it's been initialized correctly). */ public boolean isInDropbox() { return m_inDropbox; } /** * @param inDropbox The value for inDropbox to set. */ public void setInDropbox(boolean inDropbox) { this.m_inDropbox = inDropbox; } } // inner class BrowseItem /** * Inner class encapsulates information about resources (folders and items) for editing */ public static class EditItem extends BrowseItem { protected String m_copyrightStatus; protected String m_copyrightInfo; // protected boolean m_copyrightAlert; protected String m_filename; protected byte[] m_content; protected String m_encoding; protected String m_mimetype; protected String m_description; protected Map m_metadata; protected boolean m_hasQuota; protected boolean m_canSetQuota; protected String m_quota; protected boolean m_isUrl; protected boolean m_contentHasChanged; protected boolean m_contentTypeHasChanged; protected int m_notification = NotificationService.NOTI_NONE; protected String m_formtype; protected String m_rootname; protected Map m_structuredArtifact; protected List m_properties; protected Set m_metadataGroupsShowing; protected Set m_missingInformation; protected boolean m_hasBeenAdded; protected ResourcesMetadata m_form; protected boolean m_isBlank; protected String m_instruction; protected String m_ccRightsownership; protected String m_ccLicense; protected String m_ccCommercial; protected String m_ccModification; protected String m_ccRightsOwner; protected String m_ccRightsYear; /** * @param id * @param name * @param type */ public EditItem(String id, String name, String type) { super(id, name, type); m_filename = ""; m_contentHasChanged = false; m_contentTypeHasChanged = false; m_metadata = new Hashtable(); m_structuredArtifact = new Hashtable(); m_metadataGroupsShowing = new HashSet(); m_mimetype = type; m_content = null; m_encoding = "UTF-8"; m_notification = NotificationService.NOTI_NONE; m_hasQuota = false; m_canSetQuota = false; m_formtype = ""; m_rootname = ""; m_missingInformation = new HashSet(); m_hasBeenAdded = false; m_properties = new Vector(); m_isBlank = true; m_instruction = ""; m_ccRightsownership = ""; m_ccLicense = ""; // m_copyrightStatus = ServerConfigurationService.getString("default.copyright"); } public SortedSet convertToRefs(Collection groupIds) { SortedSet groupRefs = new TreeSet(); Iterator it = groupIds.iterator(); while(it.hasNext()) { String groupId = (String) it.next(); Group group = (Group) this.m_allSiteGroupsMap.get(groupId); if(group != null) { groupRefs.add(group.getReference()); } } return groupRefs; } public void setRightsowner(String ccRightsOwner) { m_ccRightsOwner = ccRightsOwner; } public String getRightsowner() { return m_ccRightsOwner; } public void setRightstyear(String ccRightsYear) { m_ccRightsYear = ccRightsYear; } public String getRightsyear() { return m_ccRightsYear; } public void setAllowModifications(String ccModification) { m_ccModification = ccModification; } public String getAllowModifications() { return m_ccModification; } public void setAllowCommercial(String ccCommercial) { m_ccCommercial = ccCommercial; } public String getAllowCommercial() { return m_ccCommercial; } /** * * @param license */ public void setLicense(String license) { m_ccLicense = license; } /** * * @return */ public String getLicense() { return m_ccLicense; } /** * Record a value for instructions to be displayed to the user in the editor (for Form Items). * @param instruction The value of the instructions. */ public void setInstruction(String instruction) { if(instruction == null) { instruction = ""; } m_instruction = instruction.trim(); } /** * Access instructions to be displayed to the user in the editor (for Form Items). * @return The instructions. */ public String getInstruction() { return m_instruction; } /** * Set the character encoding type that will be used when converting content body between strings and byte arrays. * Default is "UTF-8". * @param encoding A valid name for a character set encoding scheme (@see java.lang.Charset) */ public void setEncoding(String encoding) { m_encoding = encoding; } /** * Get the character encoding type that is used when converting content body between strings and byte arrays. * Default is "UTF-8". * @return The name of the character set encoding scheme (@see java.lang.Charset) */ public String getEncoding() { return m_encoding; } /** * Set marker indicating whether current item is a blank entry * @param isBlank */ public void markAsBlank(boolean isBlank) { m_isBlank = isBlank; } /** * Access marker indicating whether current item is a blank entry * @return true if current entry is blank, false otherwise */ public boolean isBlank() { return m_isBlank; } /** * Change the root ResourcesMetadata object that defines the form for a Structured Artifact. * @param form */ public void setForm(ResourcesMetadata form) { m_form = form; } /** * Access the root ResourcesMetadata object that defines the form for a Structured Artifact. * @return the form. */ public ResourcesMetadata getForm() { return m_form; } /** * @param properties */ public void setProperties(List properties) { m_properties = properties; } public List getProperties() { return m_properties; } /** * Replace current values of Structured Artifact with new values. * @param map The new values. */ public void setValues(Map map) { m_structuredArtifact = map; } /** * Access the entire set of values stored in the Structured Artifact * @return The set of values. */ public Map getValues() { return m_structuredArtifact; } /** * @param id * @param name * @param type */ public EditItem(String type) { this(null, "", type); } /** * @param id */ public void setId(String id) { m_id = id; } /** * Show the indicated metadata group for the item * @param group */ public void showMetadataGroup(String group) { m_metadataGroupsShowing.add(group); } /** * Hide the indicated metadata group for the item * @param group */ public void hideMetadataGroup(String group) { m_metadataGroupsShowing.remove(group); m_metadataGroupsShowing.remove(Validator.escapeUrl(group)); } /** * Query whether the indicated metadata group is showing for the item * @param group * @return true if the metadata group is showing, false otherwise */ public boolean isGroupShowing(String group) { return m_metadataGroupsShowing.contains(group) || m_metadataGroupsShowing.contains(Validator.escapeUrl(group)); } /** * @return */ public boolean isFileUpload() { return !isFolder() && !isUrl() && !isHtml() && !isPlaintext() && !isStructuredArtifact(); } /** * @param type */ public void setType(String type) { m_type = type; } /** * @param mimetype */ public void setMimeType(String mimetype) { m_mimetype = mimetype; } public String getRightsownership() { return m_ccRightsownership; } public void setRightsownership(String owner) { m_ccRightsownership = owner; } /** * @return */ public String getMimeType() { return m_mimetype; } public String getMimeCategory() { if(this.m_mimetype == null || this.m_mimetype.equals("")) { return ""; } int index = this.m_mimetype.indexOf("/"); if(index < 0) { return this.m_mimetype; } return this.m_mimetype.substring(0, index); } public String getMimeSubtype() { if(this.m_mimetype == null || this.m_mimetype.equals("")) { return ""; } int index = this.m_mimetype.indexOf("/"); if(index < 0 || index + 1 == this.m_mimetype.length()) { return ""; } return this.m_mimetype.substring(index + 1); } /** * @param formtype */ public void setFormtype(String formtype) { m_formtype = formtype; } /** * @return */ public String getFormtype() { return m_formtype; } /** * @return Returns the copyrightInfo. */ public String getCopyrightInfo() { return m_copyrightInfo; } /** * @param copyrightInfo The copyrightInfo to set. */ public void setCopyrightInfo(String copyrightInfo) { m_copyrightInfo = copyrightInfo; } /** * @return Returns the copyrightStatus. */ public String getCopyrightStatus() { return m_copyrightStatus; } /** * @param copyrightStatus The copyrightStatus to set. */ public void setCopyrightStatus(String copyrightStatus) { m_copyrightStatus = copyrightStatus; } /** * @return Returns the description. */ public String getDescription() { return m_description; } /** * @param description The description to set. */ public void setDescription(String description) { m_description = description; } /** * @return Returns the filename. */ public String getFilename() { return m_filename; } /** * @param filename The filename to set. */ public void setFilename(String filename) { m_filename = filename; } /** * @return Returns the metadata. */ public Map getMetadata() { return m_metadata; } /** * @param metadata The metadata to set. */ public void setMetadata(Map metadata) { m_metadata = metadata; } /** * @param name * @param value */ public void setMetadataItem(String name, Object value) { m_metadata.put(name, value); } public boolean isSitePossible() { return !m_pubview_inherited && !isGroupInherited() && !isSingleGroupInherited(); } public boolean isGroupPossible() { // Collection groups = getPossibleGroups(); // return ! groups.isEmpty(); return this.m_allowedAddGroupRefs != null && ! this.m_allowedAddGroupRefs.isEmpty(); } public boolean isGroupInherited() { return AccessMode.INHERITED.toString().equals(this.m_access) && AccessMode.GROUPED.toString().equals(m_inheritedAccess); } /** * Does this entity inherit grouped access mode with a single group that has access? * @return true if this entity inherits grouped access mode with a single group that has access, and false otherwise. */ public boolean isSingleGroupInherited() { //Collection groups = getInheritedGroups(); return // AccessMode.INHERITED.toString().equals(this.m_access) && AccessMode.GROUPED.toString().equals(this.m_inheritedAccess) && this.m_inheritedGroupRefs != null && this.m_inheritedGroupRefs.size() == 1; // && this.m_oldInheritedGroups != null // && this.m_oldInheritedGroups.size() == 1; } public String getSingleGroupTitle() { return (String) rb.getFormattedMessage("access.title4", new Object[]{getGroupNames()}); } /** * Is this entity's access restricted to the site (not pubview) and are there no groups defined for the site? * @return */ public boolean isSiteOnly() { boolean isSiteOnly = false; isSiteOnly = !isGroupPossible() && !isPubviewPossible(); return isSiteOnly; } /** * @return Returns the content. */ public byte[] getContent() { return m_content; } /** * @return Returns the content as a String. */ public String getContentstring() { String rv = ""; if(m_content != null && m_content.length > 0) { try { rv = new String( m_content, m_encoding ); } catch(UnsupportedEncodingException e) { rv = new String( m_content ); } } return rv; } /** * @param content The content to set. */ public void setContent(byte[] content) { m_content = content; } /** * @param content The content to set. */ public void setContent(String content) { try { m_content = content.getBytes(m_encoding); } catch(UnsupportedEncodingException e) { m_content = content.getBytes(); } } /** * @return Returns the canSetQuota. */ public boolean canSetQuota() { return m_canSetQuota; } /** * @param canSetQuota The canSetQuota to set. */ public void setCanSetQuota(boolean canSetQuota) { m_canSetQuota = canSetQuota; } /** * @return Returns the hasQuota. */ public boolean hasQuota() { return m_hasQuota; } /** * @param hasQuota The hasQuota to set. */ public void setHasQuota(boolean hasQuota) { m_hasQuota = hasQuota; } /** * @return Returns the quota. */ public String getQuota() { return m_quota; } /** * @param quota The quota to set. */ public void setQuota(String quota) { m_quota = quota; } /** * @return true if content-type of item indicates it represents a URL, false otherwise */ public boolean isUrl() { return TYPE_URL.equals(m_type) || ResourceProperties.TYPE_URL.equals(m_mimetype); } /** * @return true if content-type of item indicates it represents a URL, false otherwise */ public boolean isStructuredArtifact() { return TYPE_FORM.equals(m_type); } /** * @return true if content-type of item is "text/text" (plain text), false otherwise */ public boolean isPlaintext() { return MIME_TYPE_DOCUMENT_PLAINTEXT.equals(m_mimetype) || MIME_TYPE_DOCUMENT_PLAINTEXT.equals(m_type); } /** * @return true if content-type of item is "text/html" (an html document), false otherwise */ public boolean isHtml() { return MIME_TYPE_DOCUMENT_HTML.equals(m_mimetype) || MIME_TYPE_DOCUMENT_HTML.equals(m_type); } public boolean contentHasChanged() { return m_contentHasChanged; } public void setContentHasChanged(boolean changed) { m_contentHasChanged = changed; } public boolean contentTypeHasChanged() { return m_contentTypeHasChanged; } public void setContentTypeHasChanged(boolean changed) { m_contentTypeHasChanged = changed; } public void setNotification(int notification) { m_notification = notification; } public int getNotification() { return m_notification; } /** * @return Returns the artifact. */ public Map getStructuredArtifact() { return m_structuredArtifact; } /** * @param artifact The artifact to set. */ public void setStructuredArtifact(Map artifact) { this.m_structuredArtifact = artifact; } /** * @param name * @param value */ public void setValue(String name, Object value) { setValue(name, 0, value); } /** * @param name * @param index * @param value */ public void setValue(String name, int index, Object value) { List list = getList(name); try { list.set(index, value); } catch(ArrayIndexOutOfBoundsException e) { list.add(value); } m_structuredArtifact.put(name, list); } /** * Access a value of a structured artifact field of type String. * @param name The name of the field to access. * @return the value, or null if the named field is null or not a String. */ public String getString(String name) { if(m_structuredArtifact == null) { m_structuredArtifact = new Hashtable(); } Object value = m_structuredArtifact.get(name); String rv = ""; if(value == null) { // do nothing } else if(value instanceof String) { rv = (String) value; } else { rv = value.toString(); } return rv; } public Object getValue(String name, int index) { List list = getList(name); Object rv = null; try { rv = list.get(index); } catch(ArrayIndexOutOfBoundsException e) { // return null } return rv; } public Object getPropertyValue(String name) { return getPropertyValue(name, 0); } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public Object getPropertyValue(String name, int index) { String[] names = name.split(ResourcesMetadata.DOT); Object rv = null; if(m_properties == null) { m_properties = new Vector(); } Iterator it = m_properties.iterator(); while(rv == null && it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); if(name.equals(prop.getDottedname())) { rv = prop.getValue(index); } } return rv; } public void setPropertyValue(String name, Object value) { setPropertyValue(name, 0, value); } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public void setPropertyValue(String name, int index, Object value) { if(m_properties == null) { m_properties = new Vector(); } boolean found = false; Iterator it = m_properties.iterator(); while(!found && it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); if(name.equals(prop.getDottedname())) { found = true; prop.setValue(index, value); } } } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public Object getValue(String name) { String[] names = name.split(ResourcesMetadata.DOT); Object rv = m_structuredArtifact; if(rv != null && (rv instanceof Map) && ((Map) rv).isEmpty()) { rv = null; } for(int i = 1; rv != null && i < names.length; i++) { if(rv instanceof Map) { rv = ((Map) rv).get(names[i]); } else { rv = null; } } return rv; } /** * Access a list of values associated with a named property of a structured artifact. * @param name The name of the property. * @return The list of values associated with that name, or an empty list if the property is not defined. */ public List getList(String name) { if(m_structuredArtifact == null) { m_structuredArtifact = new Hashtable(); } Object value = m_structuredArtifact.get(name); List rv = new Vector(); if(value == null) { m_structuredArtifact.put(name, rv); } else if(value instanceof Collection) { rv.addAll((Collection)value); } else { rv.add(value); } return rv; } /** * @return */ /* public Element exportStructuredArtifact(List properties) { return null; } */ /** * @return Returns the name of the root of a structured artifact definition. */ public String getRootname() { return m_rootname; } /** * @param rootname The name to be assigned for the root of a structured artifact. */ public void setRootname(String rootname) { m_rootname = rootname; } /** * Add a property name to the list of properties missing from the input. * @param propname The name of the property. */ public void setMissing(String propname) { m_missingInformation.add(propname); } /** * Query whether a particular property is missing * @param propname The name of the property * @return The value "true" if the property is missing, "false" otherwise. */ public boolean isMissing(String propname) { return m_missingInformation.contains(propname) || m_missingInformation.contains(Validator.escapeUrl(propname)); } /** * Empty the list of missing properties. */ public void clearMissing() { m_missingInformation.clear(); } public void setAdded(boolean added) { m_hasBeenAdded = added; } public boolean hasBeenAdded() { return m_hasBeenAdded; } } // inner class EditItem /** * Inner class encapsulates information about folders (and final item?) in a collection path (a.k.a. breadcrumb) */ public static class PathItem { protected String m_url; protected String m_name; protected String m_id; protected boolean m_canRead; protected boolean m_isFolder; protected boolean m_isLast; protected String m_root; protected boolean m_isLocal; public PathItem(String id, String name) { m_id = id; m_name = name; m_canRead = false; m_isFolder = false; m_isLast = false; m_url = ""; m_isLocal = true; } /** * @return */ public boolean canRead() { return m_canRead; } /** * @return */ public String getId() { return m_id; } /** * @return */ public boolean isFolder() { return m_isFolder; } /** * @return */ public boolean isLast() { return m_isLast; } /** * @return */ public String getName() { return m_name; } /** * @param canRead */ public void setCanRead(boolean canRead) { m_canRead = canRead; } /** * @param id */ public void setId(String id) { m_id = id; } /** * @param isFolder */ public void setIsFolder(boolean isFolder) { m_isFolder = isFolder; } /** * @param isLast */ public void setLast(boolean isLast) { m_isLast = isLast; } /** * @param name */ public void setName(String name) { m_name = name; } /** * @return */ public String getUrl() { return m_url; } /** * @param url */ public void setUrl(String url) { m_url = url; } /** * @param root */ public void setRoot(String root) { m_root = root; } /** * @return */ public String getRoot() { return m_root; } public void setIsLocal(boolean isLocal) { m_isLocal = isLocal; } public boolean isLocal() { return m_isLocal; } } // inner class PathItem /** * * inner class encapsulates information about groups of metadata tags (such as DC, LOM, etc.) * */ public static class MetadataGroup extends Vector { /** * */ private static final long serialVersionUID = -821054142728929236L; protected String m_name; protected boolean m_isShowing; /** * @param name */ public MetadataGroup(String name) { super(); m_name = name; m_isShowing = false; } /** * @return */ public boolean isShowing() { return m_isShowing; } /** * @param isShowing */ public void setShowing(boolean isShowing) { m_isShowing = isShowing; } /** * @return */ public String getName() { return m_name; } /** * @param name */ public void setName(String name) { m_name = name; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) * needed to determine List.contains() */ public boolean equals(Object obj) { MetadataGroup mg = (MetadataGroup) obj; boolean rv = (obj != null) && (m_name.equals(mg)); return rv; } } public static class AttachItem { protected String m_id; protected String m_displayName; protected String m_accessUrl; protected String m_collectionId; protected String m_contentType; /** * @param id * @param displayName * @param collectionId * @param accessUrl */ public AttachItem(String id, String displayName, String collectionId, String accessUrl) { m_id = id; m_displayName = displayName; m_collectionId = collectionId; m_accessUrl = accessUrl; } /** * @return Returns the accessUrl. */ public String getAccessUrl() { return m_accessUrl; } /** * @param accessUrl The accessUrl to set. */ public void setAccessUrl(String accessUrl) { m_accessUrl = accessUrl; } /** * @return Returns the collectionId. */ public String getCollectionId() { return m_collectionId; } /** * @param collectionId The collectionId to set. */ public void setCollectionId(String collectionId) { m_collectionId = collectionId; } /** * @return Returns the id. */ public String getId() { return m_id; } /** * @param id The id to set. */ public void setId(String id) { m_id = id; } /** * @return Returns the name. */ public String getDisplayName() { String displayName = m_displayName; if(displayName == null || displayName.trim().equals("")) { displayName = isolateName(m_id); } return displayName; } /** * @param name The name to set. */ public void setDisplayName(String name) { m_displayName = name; } /** * @return Returns the contentType. */ public String getContentType() { return m_contentType; } /** * @param contentType */ public void setContentType(String contentType) { this.m_contentType = contentType; } } // Inner class AttachItem public static class ElementCarrier { protected Element element; protected String parent; public ElementCarrier(Element element, String parent) { this.element = element; this.parent = parent; } public Element getElement() { return element; } public void setElement(Element element) { this.element = element; } public String getParent() { return parent; } public void setParent(String parent) { this.parent = parent; } } public static class SaveArtifactAttempt { protected EditItem item; protected List errors; protected SchemaNode schema; public SaveArtifactAttempt(EditItem item, SchemaNode schema) { this.item = item; this.schema = schema; } /** * @return Returns the errors. */ public List getErrors() { return errors; } /** * @param errors The errors to set. */ public void setErrors(List errors) { this.errors = errors; } /** * @return Returns the item. */ public EditItem getItem() { return item; } /** * @param item The item to set. */ public void setItem(EditItem item) { this.item = item; } /** * @return Returns the schema. */ public SchemaNode getSchema() { return schema; } /** * @param schema The schema to set. */ public void setSchema(SchemaNode schema) { this.schema = schema; } } /** * Develop a list of all the site collections that there are to page. * Sort them as appropriate, and apply search criteria. */ protected static List readAllResources(SessionState state) { List other_sites = new Vector(); String collectionId = (String) state.getAttribute (STATE_ATTACH_COLLECTION_ID); if(collectionId == null) { collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); Boolean showRemove = (Boolean) state.getAttribute(STATE_SHOW_REMOVE_ACTION); boolean showRemoveAction = showRemove != null && showRemove.booleanValue(); Boolean showMove = (Boolean) state.getAttribute(STATE_SHOW_MOVE_ACTION); boolean showMoveAction = showMove != null && showMove.booleanValue(); Boolean showCopy = (Boolean) state.getAttribute(STATE_SHOW_COPY_ACTION); boolean showCopyAction = showCopy != null && showCopy.booleanValue(); Set highlightedItems = (Set) state.getAttribute(STATE_HIGHLIGHTED_ITEMS); // add user's personal workspace User user = UserDirectoryService.getCurrentUser(); String userId = user.getId(); String userName = user.getDisplayName(); String wsId = SiteService.getUserSiteId(userId); String wsCollectionId = ContentHostingService.getSiteCollection(wsId); if(! collectionId.equals(wsCollectionId)) { List members = getBrowseItems(wsCollectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); showRemoveAction = showRemoveAction || root.hasDeletableChildren(); showMoveAction = showMoveAction || root.hasDeletableChildren(); showCopyAction = showCopyAction || root.hasCopyableChildren(); root.addMembers(members); root.setName(userName + " " + rb.getString("gen.wsreso")); other_sites.add(root); } } // add all other sites user has access to /* * NOTE: This does not (and should not) get all sites for admin. * Getting all sites for admin is too big a request and * would result in too big a display to render in html. */ Map othersites = ContentHostingService.getCollectionMap(); Iterator siteIt = othersites.keySet().iterator(); SortedSet sort = new TreeSet(); while(siteIt.hasNext()) { String collId = (String) siteIt.next(); String displayName = (String) othersites.get(collId); sort.add(displayName + DELIM + collId); } Iterator sortIt = sort.iterator(); while(sortIt.hasNext()) { String item = (String) sortIt.next(); String displayName = item.substring(0, item.lastIndexOf(DELIM)); String collId = item.substring(item.lastIndexOf(DELIM) + 1); if(! collectionId.equals(collId) && ! wsCollectionId.equals(collId)) { List members = getBrowseItems(collId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); root.addMembers(members); root.setName(displayName); other_sites.add(root); } } } return other_sites; } /** * Prepare the current page of site collections to display. * @return List of BrowseItem objects to display on this page. */ protected static List prepPage(SessionState state) { List rv = new Vector(); // access the page size int pageSize = ((Integer) state.getAttribute(STATE_PAGESIZE)).intValue(); // cleanup prior prep state.removeAttribute(STATE_NUM_MESSAGES); // are we going next or prev, first or last page? boolean goNextPage = state.getAttribute(STATE_GO_NEXT_PAGE) != null; boolean goPrevPage = state.getAttribute(STATE_GO_PREV_PAGE) != null; boolean goFirstPage = state.getAttribute(STATE_GO_FIRST_PAGE) != null; boolean goLastPage = state.getAttribute(STATE_GO_LAST_PAGE) != null; state.removeAttribute(STATE_GO_NEXT_PAGE); state.removeAttribute(STATE_GO_PREV_PAGE); state.removeAttribute(STATE_GO_FIRST_PAGE); state.removeAttribute(STATE_GO_LAST_PAGE); // are we going next or prev message? boolean goNext = state.getAttribute(STATE_GO_NEXT) != null; boolean goPrev = state.getAttribute(STATE_GO_PREV) != null; state.removeAttribute(STATE_GO_NEXT); state.removeAttribute(STATE_GO_PREV); // read all channel messages List allMessages = readAllResources(state); if (allMessages == null) { return rv; } String messageIdAtTheTopOfThePage = null; Object topMsgId = state.getAttribute(STATE_TOP_PAGE_MESSAGE); if(topMsgId == null) { // do nothing } else if(topMsgId instanceof Integer) { messageIdAtTheTopOfThePage = ((Integer) topMsgId).toString(); } else if(topMsgId instanceof String) { messageIdAtTheTopOfThePage = (String) topMsgId; } // if we have no prev page and do have a top message, then we will stay "pinned" to the top boolean pinToTop = ( (messageIdAtTheTopOfThePage != null) && (state.getAttribute(STATE_PREV_PAGE_EXISTS) == null) && !goNextPage && !goPrevPage && !goNext && !goPrev && !goFirstPage && !goLastPage); // if we have no next page and do have a top message, then we will stay "pinned" to the bottom boolean pinToBottom = ( (messageIdAtTheTopOfThePage != null) && (state.getAttribute(STATE_NEXT_PAGE_EXISTS) == null) && !goNextPage && !goPrevPage && !goNext && !goPrev && !goFirstPage && !goLastPage); // how many messages, total int numMessages = allMessages.size(); if (numMessages == 0) { return rv; } // save the number of messges state.setAttribute(STATE_NUM_MESSAGES, new Integer(numMessages)); // find the position of the message that is the top first on the page int posStart = 0; if (messageIdAtTheTopOfThePage != null) { // find the next page posStart = findResourceInList(allMessages, messageIdAtTheTopOfThePage); // if missing, start at the top if (posStart == -1) { posStart = 0; } } // if going to the next page, adjust if (goNextPage) { posStart += pageSize; } // if going to the prev page, adjust else if (goPrevPage) { posStart -= pageSize; if (posStart < 0) posStart = 0; } // if going to the first page, adjust else if (goFirstPage) { posStart = 0; } // if going to the last page, adjust else if (goLastPage) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // pinning if (pinToTop) { posStart = 0; } else if (pinToBottom) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // get the last page fully displayed if (posStart + pageSize > numMessages) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // compute the end to a page size, adjusted for the number of messages available int posEnd = posStart + (pageSize-1); if (posEnd >= numMessages) posEnd = numMessages-1; int numMessagesOnThisPage = (posEnd - posStart) + 1; // select the messages on this page for (int i = posStart; i <= posEnd; i++) { rv.add(allMessages.get(i)); } // save which message is at the top of the page BrowseItem itemAtTheTopOfThePage = (BrowseItem) allMessages.get(posStart); state.setAttribute(STATE_TOP_PAGE_MESSAGE, itemAtTheTopOfThePage.getId()); state.setAttribute(STATE_TOP_MESSAGE_INDEX, new Integer(posStart)); // which message starts the next page (if any) int next = posStart + pageSize; if (next < numMessages) { state.setAttribute(STATE_NEXT_PAGE_EXISTS, ""); } else { state.removeAttribute(STATE_NEXT_PAGE_EXISTS); } // which message ends the prior page (if any) int prev = posStart - 1; if (prev >= 0) { state.setAttribute(STATE_PREV_PAGE_EXISTS, ""); } else { state.removeAttribute(STATE_PREV_PAGE_EXISTS); } if (state.getAttribute(STATE_VIEW_ID) != null) { int viewPos = findResourceInList(allMessages, (String) state.getAttribute(STATE_VIEW_ID)); // are we moving to the next message if (goNext) { // advance viewPos++; if (viewPos >= numMessages) viewPos = numMessages-1; } // are we moving to the prev message if (goPrev) { // retreat viewPos--; if (viewPos < 0) viewPos = 0; } // update the view message state.setAttribute(STATE_VIEW_ID, ((BrowseItem) allMessages.get(viewPos)).getId()); // if the view message is no longer on the current page, adjust the page // Note: next time through this will get processed if (viewPos < posStart) { state.setAttribute(STATE_GO_PREV_PAGE, ""); } else if (viewPos > posEnd) { state.setAttribute(STATE_GO_NEXT_PAGE, ""); } if (viewPos > 0) { state.setAttribute(STATE_PREV_EXISTS,""); } else { state.removeAttribute(STATE_PREV_EXISTS); } if (viewPos < numMessages-1) { state.setAttribute(STATE_NEXT_EXISTS,""); } else { state.removeAttribute(STATE_NEXT_EXISTS); } } return rv; } // prepPage /** * Find the resource with this id in the list. * @param messages The list of messages. * @param id The message id. * @return The index position in the list of the message with this id, or -1 if not found. */ protected static int findResourceInList(List resources, String id) { for (int i = 0; i < resources.size(); i++) { // if this is the one, return this index if (((BrowseItem) (resources.get(i))).getId().equals(id)) return i; } // not found return -1; } // findResourceInList protected static User getUserProperty(ResourceProperties props, String name) { String id = props.getProperty(name); if (id != null) { try { return UserDirectoryService.getUser(id); } catch (UserNotDefinedException e) { } } return null; } /** * Find the resource name of a given resource id or filepath. * * @param id * The resource id. * @return the resource name. */ protected static String isolateName(String id) { if (id == null) return null; if (id.length() == 0) return null; // take after the last resource path separator, not counting one at the very end if there boolean lastIsSeparator = id.charAt(id.length() - 1) == '/'; return id.substring(id.lastIndexOf('/', id.length() - 2) + 1, (lastIsSeparator ? id.length() - 1 : id.length())); } // isolateName } // ResourcesAction
content/content-tool/tool/src/java/org/sakaiproject/content/tool/ResourcesAction.java
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.content.tool; import java.io.IOException; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.Stack; import java.util.TreeSet; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.sakaiproject.authz.api.PermissionsHelper; import org.sakaiproject.authz.cover.AuthzGroupService; import org.sakaiproject.authz.cover.SecurityService; import org.sakaiproject.cheftool.Context; import org.sakaiproject.cheftool.JetspeedRunData; import org.sakaiproject.cheftool.PagedResourceHelperAction; import org.sakaiproject.cheftool.PortletConfig; import org.sakaiproject.cheftool.RunData; import org.sakaiproject.cheftool.VelocityPortlet; import org.sakaiproject.cheftool.VelocityPortletPaneledAction; import org.sakaiproject.component.cover.ComponentManager; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentCollectionEdit; import org.sakaiproject.content.api.ContentEntity; import org.sakaiproject.content.api.ContentResource; import org.sakaiproject.content.api.ContentResourceEdit; import org.sakaiproject.content.api.ContentResourceFilter; import org.sakaiproject.content.api.FilePickerHelper; import org.sakaiproject.content.api.GroupAwareEdit; import org.sakaiproject.content.api.GroupAwareEntity; import org.sakaiproject.content.api.GroupAwareEntity.AccessMode; import org.sakaiproject.content.cover.ContentHostingService; import org.sakaiproject.content.cover.ContentTypeImageService; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.EntityPropertyNotDefinedException; import org.sakaiproject.entity.api.EntityPropertyTypeException; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.entity.api.ResourcePropertiesEdit; import org.sakaiproject.entity.cover.EntityManager; import org.sakaiproject.event.api.SessionState; import org.sakaiproject.event.api.UsageSession; import org.sakaiproject.event.cover.NotificationService; import org.sakaiproject.event.cover.UsageSessionService; import org.sakaiproject.exception.IdInvalidException; import org.sakaiproject.exception.IdLengthException; import org.sakaiproject.exception.IdUniquenessException; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.exception.IdUsedException; import org.sakaiproject.exception.InUseException; import org.sakaiproject.exception.InconsistentException; import org.sakaiproject.exception.OverQuotaException; import org.sakaiproject.exception.PermissionException; import org.sakaiproject.exception.ServerOverloadException; import org.sakaiproject.exception.TypeException; import org.sakaiproject.metaobj.shared.control.SchemaBean; import org.sakaiproject.metaobj.shared.mgt.HomeFactory; import org.sakaiproject.metaobj.shared.mgt.StructuredArtifactValidationService; import org.sakaiproject.metaobj.shared.mgt.home.StructuredArtifactHomeInterface; import org.sakaiproject.metaobj.shared.model.ElementBean; import org.sakaiproject.metaobj.shared.model.ValidationError; import org.sakaiproject.metaobj.utils.xml.SchemaNode; import org.sakaiproject.site.api.Group; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.time.api.Time; import org.sakaiproject.time.api.TimeBreakdown; import org.sakaiproject.time.cover.TimeService; import org.sakaiproject.tool.cover.ToolManager; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.FileItem; import org.sakaiproject.util.FormattedText; import org.sakaiproject.util.ParameterParser; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.util.StringUtil; import org.sakaiproject.util.Validator; import org.sakaiproject.util.Xml; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; /** * <p>ResourceAction is a ContentHosting application</p> * * @author University of Michigan, CHEF Software Development Team * @version $Revision$ */ public class ResourcesAction extends PagedResourceHelperAction // VelocityPortletPaneledAction { /** Resource bundle using current language locale */ private static ResourceLoader rb = new ResourceLoader("content"); private static final Log logger = LogFactory.getLog(ResourcesAction.class); /** Name of state attribute containing a list of opened/expanded collections */ private static final String STATE_EXPANDED_COLLECTIONS = "resources.expanded_collections"; /** Name of state attribute for status of initialization. */ private static final String STATE_INITIALIZED = "resources.initialized"; /** The content hosting service in the State. */ private static final String STATE_CONTENT_SERVICE = "resources.content_service"; /** The content type image lookup service in the State. */ private static final String STATE_CONTENT_TYPE_IMAGE_SERVICE = "resources.content_type_image_service"; /** The resources, helper or dropbox mode. */ public static final String STATE_MODE_RESOURCES = "resources.resources_mode"; /** The resources, helper or dropbox mode. */ public static final String STATE_RESOURCES_HELPER_MODE = "resources.resources_helper_mode"; /** state attribute for the maximum size for file upload */ private static final String STATE_FILE_UPLOAD_MAX_SIZE = "resources.file_upload_max_size"; /** state attribute indicating whether users in current site should be denied option of making resources public */ private static final String STATE_PREVENT_PUBLIC_DISPLAY = "resources.prevent_public_display"; /** The name of a state attribute indicating whether the resources tool/helper is allowed to show all sites the user has access to */ public static final String STATE_SHOW_ALL_SITES = "resources.allow_user_to_see_all_sites"; /** The name of a state attribute indicating whether the wants to see other sites if that is enabled */ public static final String STATE_SHOW_OTHER_SITES = "resources.user_chooses_to_see_other_sites"; /** The user copyright string */ private static final String STATE_MY_COPYRIGHT = "resources.mycopyright"; /** copyright path -- MUST have same value as AccessServlet.COPYRIGHT_PATH */ public static final String COPYRIGHT_PATH = Entity.SEPARATOR + "copyright"; /** The collection id being browsed. */ private static final String STATE_COLLECTION_ID = "resources.collection_id"; /** The id of the "home" collection (can't go up from here.) */ private static final String STATE_HOME_COLLECTION_ID = "resources.collection_home"; /** The display name of the "home" collection (can't go up from here.) */ private static final String STATE_HOME_COLLECTION_DISPLAY_NAME = "resources.collection_home_display_name"; /** The inqualified input field */ private static final String STATE_UNQUALIFIED_INPUT_FIELD = "resources.unqualified_input_field"; /** The collection id path */ private static final String STATE_COLLECTION_PATH = "resources.collection_path"; /** The name of the state attribute containing BrowseItems for all content collections the user has access to */ private static final String STATE_COLLECTION_ROOTS = "resources.collection_rootie_tooties"; /** The sort by */ private static final String STATE_SORT_BY = "resources.sort_by"; /** The sort ascending or decending */ private static final String STATE_SORT_ASC = "resources.sort_asc"; /** The copy flag */ private static final String STATE_COPY_FLAG = "resources.copy_flag"; /** The cut flag */ private static final String STATE_CUT_FLAG = "resources.cut_flag"; /** The can-paste flag */ private static final String STATE_PASTE_ALLOWED_FLAG = "resources.can_paste_flag"; /** The move flag */ private static final String STATE_MOVE_FLAG = "resources.move_flag"; /** The select all flag */ private static final String STATE_SELECT_ALL_FLAG = "resources.select_all_flag"; /** The name of the state attribute indicating whether the hierarchical list is expanded */ private static final String STATE_EXPAND_ALL_FLAG = "resources.expand_all_flag"; /** The name of the state attribute indicating whether the hierarchical list needs to be expanded */ private static final String STATE_NEED_TO_EXPAND_ALL = "resources.need_to_expand_all"; /** The name of the state attribute containing a java.util.Set with the id's of selected items */ private static final String STATE_LIST_SELECTIONS = "resources.ignore_delete_selections"; /** The root of the navigation breadcrumbs for a folder, either the home or another site the user belongs to */ private static final String STATE_NAVIGATION_ROOT = "resources.navigation_root"; /************** the more context *****************************************/ /** The more id */ private static final String STATE_MORE_ID = "resources.more_id"; /** The more collection id */ private static final String STATE_MORE_COLLECTION_ID = "resources.more_collection_id"; /************** the edit context *****************************************/ /** The edit id */ public static final String STATE_EDIT_ID = "resources.edit_id"; public static final String STATE_STACK_EDIT_ID = "resources.stack_edit_id"; public static final String STATE_EDIT_COLLECTION_ID = "resources.stack_edit_collection_id"; public static final String STATE_STACK_EDIT_COLLECTION_ID = "resources.stack_edit_collection_id"; private static final String STATE_EDIT_ALERTS = "resources.edit_alerts"; private static final String STATE_STACK_EDIT_ITEM = "resources.stack_edit_item"; private static final String STATE_STACK_EDIT_INTENT = "resources.stack_edit_intent"; private static final String STATE_SHOW_FORM_ITEMS = "resources.show_form_items"; private static final String STATE_STACK_EDIT_ITEM_TITLE = "resources.stack_title"; /************** the create contexts *****************************************/ public static final String STATE_SUSPENDED_OPERATIONS_STACK = "resources.suspended_operations_stack"; public static final String STATE_SUSPENDED_OPERATIONS_STACK_DEPTH = "resources.suspended_operations_stack_depth"; public static final String STATE_CREATE_TYPE = "resources.create_type"; public static final String STATE_CREATE_COLLECTION_ID = "resources.create_collection_id"; public static final String STATE_CREATE_NUMBER = "resources.create_number"; public static final String STATE_STRUCTOBJ_TYPE = "resources.create_structured_object_type"; public static final String STATE_STRUCTOBJ_TYPE_READONLY = "resources.create_structured_object_type_readonly"; public static final String STATE_STACK_CREATE_TYPE = "resources.stack_create_type"; public static final String STATE_STACK_CREATE_COLLECTION_ID = "resources.stack_create_collection_id"; public static final String STATE_STACK_CREATE_NUMBER = "resources.stack_create_number"; public static final String STATE_STACK_STRUCTOBJ_TYPE = "resources.stack_create_structured_object_type"; public static final String STATE_STACK_STRUCTOBJ_TYPE_READONLY = "resources.stack_create_structured_object_type_readonly"; private static final String STATE_STACK_CREATE_ITEMS = "resources.stack_create_items"; private static final String STATE_STACK_CREATE_ACTUAL_COUNT = "resources.stack_create_actual_count"; private static final String STATE_STACK_STRUCTOBJ_ROOTNAME = "resources.stack_create_structured_object_root"; private static final String STATE_CREATE_ALERTS = "resources.create_alerts"; protected static final String STATE_CREATE_MESSAGE = "resources.create_message"; private static final String STATE_CREATE_MISSING_ITEM = "resources.create_missing_item"; private static final String STATE_STRUCTOBJ_HOMES = "resources.create_structured_object_home"; private static final String STATE_STACK_STRUCT_OBJ_SCHEMA = "resources.stack_create_structured_object_schema"; private static final String MIME_TYPE_DOCUMENT_PLAINTEXT = "text/plain"; private static final String MIME_TYPE_DOCUMENT_HTML = "text/html"; public static final String MIME_TYPE_STRUCTOBJ = "application/x-osp"; public static final String TYPE_FOLDER = "folder"; public static final String TYPE_UPLOAD = "file"; public static final String TYPE_URL = "Url"; public static final String TYPE_FORM = MIME_TYPE_STRUCTOBJ; public static final String TYPE_HTML = MIME_TYPE_DOCUMENT_HTML; public static final String TYPE_TEXT = MIME_TYPE_DOCUMENT_PLAINTEXT; private static final int CREATE_MAX_ITEMS = 10; private static final int INTEGER_WIDGET_LENGTH = 12; private static final int DOUBLE_WIDGET_LENGTH = 18; private static final Pattern INDEXED_FORM_FIELD_PATTERN = Pattern.compile("(.+)\\.(\\d+)"); /************** the metadata extension of edit/create contexts *****************************************/ private static final String STATE_METADATA_GROUPS = "resources.metadata.types"; private static final String INTENT_REVISE_FILE = "revise"; private static final String INTENT_REPLACE_FILE = "replace"; /** State attribute for where there is at least one attachment before invoking attachment tool */ public static final String STATE_HAS_ATTACHMENT_BEFORE = "resources.has_attachment_before"; /** The name of the state attribute containing a list of new items to be attached */ private static final String STATE_HELPER_NEW_ITEMS = "resources.helper_new_items"; /** The name of the state attribute indicating that the list of new items has changed */ private static final String STATE_HELPER_CHANGED = "resources.helper_changed"; /** The name of the optional state attribute indicating the id of the collection that should be treated as the "home" collection */ public static final String STATE_ATTACH_COLLECTION_ID = "resources.attach_collection_id"; /** The name of the state attribute containing the name of the tool that invoked Resources as attachment helper */ public static final String STATE_ATTACH_TOOL_NAME = "resources.attach_tool_name"; /** The name of the state attribute for "new-item" attachment indicating the type of item */ public static final String STATE_ATTACH_TEXT = "resources.attach_text"; /** The name of the state attribute for "new-item" attachment indicating the id of the item to edit */ public static final String STATE_ATTACH_ITEM_ID = "resources.attach_collection_id"; /** The name of the state attribute for "new-item" attachment indicating the id of the form-type if item-type * is TYPE_FORM (ignored otherwise) */ public static final String STATE_ATTACH_FORM_ID = "resources.attach_form_id"; /** The name of the state attribute indicating which form field a resource should be attached to */ public static final String STATE_ATTACH_FORM_FIELD = "resources.attach_form_field"; /************** the helper context (file-picker) *****************************************/ /** * State attribute for the Vector of References, one for each attachment. * Using tools can pre-populate, and can read the results from here. */ public static final String STATE_ATTACHMENTS = "resources.state_attachments"; /** * The name of the state attribute indicating that the file picker should return links to * existing resources in an existing collection rather than copying it to the hidden attachments * area. If this value is not set, all attachments are to copies in the hidden attachments area. */ public static final String STATE_ATTACH_LINKS = "resources.state_attach_links"; /** * The name of the state attribute for the maximum number of items to attach. The attribute value will be an Integer, * usually CARDINALITY_SINGLE or CARDINALITY_MULTIPLE. */ public static final String STATE_ATTACH_CARDINALITY = "resources.state_attach_cardinality"; /** A constant indicating maximum of one item can be attached. */ public static final Integer CARDINALITY_SINGLE = FilePickerHelper.CARDINALITY_SINGLE; /** A constant indicating any the number of attachments is unlimited. */ public static final Integer CARDINALITY_MULTIPLE = FilePickerHelper.CARDINALITY_MULTIPLE; /** * The name of the state attribute for the title when a tool uses Resources as attachment helper (for create or attach but not for edit mode) */ public static final String STATE_ATTACH_TITLE = "resources.state_attach_title_text"; /** * The name of the state attribute for the instructions when a tool uses Resources as attachment helper * (for create or attach but not for edit mode) */ public static final String STATE_ATTACH_INSTRUCTION = "resources.state_attach_instruction_text"; /** * State Attribute for the org.sakaiproject.content.api.ContentResourceFilter * object that the current filter should honor. If this is set to null, then all files will * be selectable and viewable */ public static final String STATE_ATTACH_FILTER = "resources.state_attach_filter"; /** * @deprecated use STATE_ATTACH_TITLE and STATE_ATTACH_INSTRUCTION instead */ public static final String STATE_FROM_TEXT = "attachment.from_text"; /** * the name of the state attribute indicating that the user canceled out of the helper. Is set only if the user canceled out of the helper. */ public static final String STATE_HELPER_CANCELED_BY_USER = "resources.state_attach_canceled_by_user"; /** * The name of the state attribute indicating that dropboxes should be shown as places from which * to select attachments. The value should be a List of user-id's. The file picker will attempt to show * the dropbox for each user whose id is included in the list. If this */ public static final String STATE_ATTACH_SHOW_DROPBOXES = "resources.state_attach_show_dropboxes"; /** * The name of the state attribute indicating that the current user's workspace Resources collection * should be shown as places from which to select attachments. The value should be "true". The file picker will attempt to show * the workspace if this attribute is set to "true". */ public static final String STATE_ATTACH_SHOW_WORKSPACE = "resources.state_attach_show_workspace"; /************** the delete context *****************************************/ /** The delete ids */ private static final String STATE_DELETE_IDS = "resources.delete_ids"; /** The not empty delete ids */ private static final String STATE_NOT_EMPTY_DELETE_IDS = "resource.not_empty_delete_ids"; /** The name of the state attribute containing a list of BrowseItem objects corresponding to resources selected for deletion */ private static final String STATE_DELETE_ITEMS = "resources.delete_items"; /** The name of the state attribute containing a list of BrowseItem objects corresponding to nonempty folders selected for deletion */ private static final String STATE_DELETE_ITEMS_NOT_EMPTY = "resources.delete_items_not_empty"; /** The name of the state attribute containing a list of BrowseItem objects selected for deletion that cannot be deleted */ private static final String STATE_DELETE_ITEMS_CANNOT_DELETE = "resources.delete_items_cannot_delete"; /************** the cut items context *****************************************/ /** The cut item ids */ private static final String STATE_CUT_IDS = "resources.revise_cut_ids"; /************** the copied items context *****************************************/ /** The copied item ids */ private static final String STATE_COPIED_IDS = "resources.revise_copied_ids"; /** The copied item id */ private static final String STATE_COPIED_ID = "resources.revise_copied_id"; /************** the moved items context *****************************************/ /** The copied item ids */ private static final String STATE_MOVED_IDS = "resources.revise_moved_ids"; /** Modes. */ private static final String MODE_LIST = "list"; private static final String MODE_EDIT = "edit"; private static final String MODE_DAV = "webdav"; private static final String MODE_CREATE = "create"; public static final String MODE_HELPER = "helper"; private static final String MODE_DELETE_CONFIRM = "deleteConfirm"; private static final String MODE_MORE = "more"; private static final String MODE_PROPERTIES = "properties"; /** modes for attachment helper */ public static final String MODE_ATTACHMENT_SELECT = "resources.attachment_select"; public static final String MODE_ATTACHMENT_CREATE = "resources.attachment_create"; public static final String MODE_ATTACHMENT_NEW_ITEM = "resources.attachment_new_item"; public static final String MODE_ATTACHMENT_EDIT_ITEM = "resources.attachment_edit_item"; public static final String MODE_ATTACHMENT_CONFIRM = "resources.attachment_confirm"; public static final String MODE_ATTACHMENT_SELECT_INIT = "resources.attachment_select_initialized"; public static final String MODE_ATTACHMENT_CREATE_INIT = "resources.attachment_create_initialized"; public static final String MODE_ATTACHMENT_NEW_ITEM_INIT = "resources.attachment_new_item_initialized"; public static final String MODE_ATTACHMENT_EDIT_ITEM_INIT = "resources.attachment_edit_item_initialized"; public static final String MODE_ATTACHMENT_CONFIRM_INIT = "resources.attachment_confirm_initialized"; public static final String MODE_ATTACHMENT_DONE = "resources.attachment_done"; /** vm files for each mode. */ private static final String TEMPLATE_LIST = "content/chef_resources_list"; private static final String TEMPLATE_EDIT = "content/chef_resources_edit"; private static final String TEMPLATE_CREATE = "content/chef_resources_create"; private static final String TEMPLATE_DAV = "content/chef_resources_webdav"; private static final String TEMPLATE_ITEMTYPE = "content/chef_resources_itemtype"; private static final String TEMPLATE_SELECT = "content/chef_resources_select"; private static final String TEMPLATE_ATTACH = "content/chef_resources_attach"; private static final String TEMPLATE_MORE = "content/chef_resources_more"; private static final String TEMPLATE_DELETE_CONFIRM = "content/chef_resources_deleteConfirm"; private static final String TEMPLATE_PROPERTIES = "content/chef_resources_properties"; // private static final String TEMPLATE_REPLACE = "_replace"; /** the site title */ private static final String STATE_SITE_TITLE = "site_title"; /** copyright related info */ private static final String COPYRIGHT_TYPES = "copyright_types"; private static final String COPYRIGHT_TYPE = "copyright_type"; private static final String DEFAULT_COPYRIGHT = "default_copyright"; private static final String COPYRIGHT_ALERT = "copyright_alert"; private static final String DEFAULT_COPYRIGHT_ALERT = "default_copyright_alert"; private static final String COPYRIGHT_FAIRUSE_URL = "copyright_fairuse_url"; private static final String NEW_COPYRIGHT_INPUT = "new_copyright_input"; private static final String COPYRIGHT_SELF_COPYRIGHT = rb.getString("cpright2"); private static final String COPYRIGHT_NEW_COPYRIGHT = rb.getString("cpright3"); private static final String COPYRIGHT_ALERT_URL = ServerConfigurationService.getAccessUrl() + COPYRIGHT_PATH; /** state attribute indicating whether we're using the Creative Commons dialog instead of the "old" copyright dialog */ protected static final String STATE_USING_CREATIVE_COMMONS = "resources.usingCreativeCommons"; private static final int MAXIMUM_ATTEMPTS_FOR_UNIQUENESS = 100; /** The default value for whether to show all sites in file-picker (used if global value can't be read from server config service) */ public static final boolean SHOW_ALL_SITES_IN_FILE_PICKER = false; /** The default value for whether to show all sites in resources tool (used if global value can't be read from server config service) */ private static final boolean SHOW_ALL_SITES_IN_RESOURCES = false; /** The default value for whether to show all sites in dropbox (used if global value can't be read from server config service) */ private static final boolean SHOW_ALL_SITES_IN_DROPBOX = false; /** The number of members for a collection at which this tool should refuse to expand the collection */ protected static final int EXPANDABLE_FOLDER_SIZE_LIMIT = 256; protected static final String STATE_SHOW_REMOVE_ACTION = "resources.show_remove_action"; protected static final String STATE_SHOW_MOVE_ACTION = "resources.show_move_action"; protected static final String STATE_SHOW_COPY_ACTION = "resources.show_copy_action"; protected static final String STATE_HIGHLIGHTED_ITEMS = "resources.highlighted_items"; /** The default number of site collections per page. */ protected static final int DEFAULT_PAGE_SIZE = 50; protected static final String PARAM_PAGESIZE = "collections_per_page"; protected static final String STATE_TOP_MESSAGE_INDEX = "resources.top_message_index"; protected static final String STATE_REMOVED_ATTACHMENTS = "resources.removed_attachments"; /********* Global constants *********/ /** The null/empty string */ private static final String NULL_STRING = ""; /** The string used when pasting the same resource to the same folder */ private static final String DUPLICATE_STRING = rb.getString("copyof") + " "; /** The string used when pasting shirtcut of the same resource to the same folder */ private static final String SHORTCUT_STRING = rb.getString("shortcut"); /** The copyright character (Note: could be "\u00a9" if we supported UNICODE for specials -ggolden */ private static final String COPYRIGHT_SYMBOL = rb.getString("cpright1"); /** The String of new copyright */ private static final String NEW_COPYRIGHT = "newcopyright"; /** The resource not exist string */ private static final String RESOURCE_NOT_EXIST_STRING = rb.getString("notexist1"); /** The title invalid string */ private static final String RESOURCE_INVALID_TITLE_STRING = rb.getString("titlecannot"); /** The copy, cut, paste not operate on collection string */ private static final String RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING = rb.getString("notsupported"); /** The maximum number of suspended operations that can be on the stack. */ private static final int MAXIMUM_SUSPENDED_OPERATIONS_STACK_DEPTH = 10; /** portlet configuration parameter values**/ public static final String RESOURCES_MODE_RESOURCES = "resources"; public static final String RESOURCES_MODE_DROPBOX = "dropbox"; public static final String RESOURCES_MODE_HELPER = "helper"; /** The from state name */ private static final String STATE_FROM = "resources.from"; private static final String STATE_ENCODING = "resources.encoding"; private static final String DELIM = "@"; /** string used to represent "public" access mode in UI elements */ protected static final String PUBLIC_ACCESS = "public"; /** * Build the context for normal display */ public String buildMainPanelContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("copyright_alert_url", COPYRIGHT_ALERT_URL); String template = null; // place if notification is enabled and current site is not of My Workspace type boolean isUserSite = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("notification", new Boolean(!isUserSite && notificationEnabled(state))); // get the mode String mode = (String) state.getAttribute (STATE_MODE); String helper_mode = (String) state.getAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE); if (!MODE_HELPER.equals(mode) && helper_mode != null) { // not in helper mode, but a helper context is needed // if the mode is not done, defer to the helper context if (!mode.equals(ResourcesAction.MODE_ATTACHMENT_DONE)) { template = ResourcesAction.buildHelperContext(portlet, context, data, state); // template = AttachmentAction.buildHelperContext(portlet, context, runData, sstate); return template; } // clean up state.removeAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE); state.removeAttribute(ResourcesAction.STATE_ATTACHMENTS); } if (mode.equals (MODE_LIST)) { // build the context for add item template = buildListContext (portlet, context, data, state); } else if (mode.equals (MODE_HELPER)) { // build the context for add item template = buildHelperContext (portlet, context, data, state); } else if (mode.equals (MODE_CREATE)) { // build the context for add item template = buildCreateContext (portlet, context, data, state); } else if (mode.equals (MODE_DELETE_CONFIRM)) { // build the context for the basic step of delete confirm page template = buildDeleteConfirmContext (portlet, context, data, state); } else if (mode.equals (MODE_MORE)) { // build the context to display the property list template = buildMoreContext (portlet, context, data, state); } else if (mode.equals (MODE_EDIT)) { // build the context to display the property list template = buildEditContext (portlet, context, data, state); } else if (mode.equals (MODE_OPTIONS)) { template = buildOptionsPanelContext (portlet, context, data, state); } else if(mode.equals(MODE_DAV)) { template = buildWebdavContext (portlet, context, data, state); } return template; } // buildMainPanelContext /** * Build the context for the list view */ public String buildListContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); context.put("expandedCollections", state.getAttribute(STATE_EXPANDED_COLLECTIONS)); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); Set selectedItems = (Set) state.getAttribute(STATE_LIST_SELECTIONS); if(selectedItems == null) { selectedItems = new TreeSet(); state.setAttribute(STATE_LIST_SELECTIONS, selectedItems); } context.put("selectedItems", selectedItems); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); context.put ("service", contentService); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); boolean atHome = false; // %%STATE_MODE_RESOURCES%% boolean dropboxMode = RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES)); if (dropboxMode) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } else { //context.put("dropboxMode", Boolean.FALSE); } // make sure the channedId is set String collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); context.put ("collectionId", collectionId); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String siteTitle = (String) state.getAttribute (STATE_SITE_TITLE); if (collectionId.equals(homeCollectionId)) { atHome = true; context.put ("collectionDisplayName", state.getAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME)); } else { // should be not PermissionException thrown at this time, when the user can successfully navigate to this collection try { context.put("collectionDisplayName", contentService.getCollection(collectionId).getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME)); } catch (IdUnusedException e){} catch (TypeException e) {} catch (PermissionException e) {} } if(!inMyWorkspace && !dropboxMode && atHome && SiteService.allowUpdateSite(ToolManager.getCurrentPlacement().getContext())) { context.put("showPermissions", Boolean.TRUE.toString()); //buildListMenu(portlet, context, data, state); } context.put("atHome", Boolean.toString(atHome)); List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); context.put ("currentSortedBy", sortedBy); context.put ("currentSortAsc", sortedAsc); context.put("TRUE", Boolean.TRUE.toString()); boolean showRemoveAction = false; boolean showMoveAction = false; boolean showCopyAction = false; Set highlightedItems = new TreeSet(); try { try { contentService.checkCollection (collectionId); context.put ("collectionFlag", Boolean.TRUE.toString()); } catch(IdUnusedException ex) { logger.warn(this + "IdUnusedException: " + collectionId); try { ContentCollectionEdit coll = contentService.addCollection(collectionId); contentService.commitCollection(coll); } catch(IdUsedException inner) { // how can this happen?? logger.warn(this + "IdUsedException: " + collectionId); throw ex; } catch(IdInvalidException inner) { logger.warn(this + "IdInvalidException: " + collectionId); // what now? throw ex; } catch(InconsistentException inner) { logger.warn(this + "InconsistentException: " + collectionId); // what now? throw ex; } } catch(TypeException ex) { logger.warn(this + "TypeException."); throw ex; } catch(PermissionException ex) { logger.warn(this + "PermissionException."); throw ex; } String copyFlag = (String) state.getAttribute (STATE_COPY_FLAG); if (copyFlag.equals (Boolean.TRUE.toString())) { context.put ("copyFlag", copyFlag); List copiedItems = (List) state.getAttribute(STATE_COPIED_IDS); // context.put ("copiedItem", state.getAttribute (STATE_COPIED_ID)); highlightedItems.addAll(copiedItems); // context.put("copiedItems", copiedItems); } String moveFlag = (String) state.getAttribute (STATE_MOVE_FLAG); if (moveFlag.equals (Boolean.TRUE.toString())) { context.put ("moveFlag", moveFlag); List movedItems = (List) state.getAttribute(STATE_MOVED_IDS); highlightedItems.addAll(movedItems); // context.put ("copiedItem", state.getAttribute (STATE_COPIED_ID)); // context.put("movedItems", movedItems); } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); List all_roots = new Vector(); List this_site = new Vector(); List members = getBrowseItems(collectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, navRoot.equals(homeCollectionId), state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); showRemoveAction = showRemoveAction || root.hasDeletableChildren(); showMoveAction = showMoveAction || root.hasDeletableChildren(); showCopyAction = showCopyAction || root.hasCopyableChildren(); if(atHome && dropboxMode) { root.setName(siteTitle + " " + rb.getString("gen.drop")); } else if(atHome) { root.setName(siteTitle + " " + rb.getString("gen.reso")); } context.put("site", root); root.addMembers(members); this_site.add(root); all_roots.add(root); } context.put ("this_site", this_site); boolean show_all_sites = false; List other_sites = new Vector(); String allowed_to_see_other_sites = (String) state.getAttribute(STATE_SHOW_ALL_SITES); String show_other_sites = (String) state.getAttribute(STATE_SHOW_OTHER_SITES); context.put("show_other_sites", show_other_sites); if(Boolean.TRUE.toString().equals(allowed_to_see_other_sites)) { context.put("allowed_to_see_other_sites", Boolean.TRUE.toString()); show_all_sites = Boolean.TRUE.toString().equals(show_other_sites); } if(atHome && show_all_sites) { state.setAttribute(STATE_HIGHLIGHTED_ITEMS, highlightedItems); // TODO: see call to prepPage below. That also calls readAllResources. Are both calls necessary? other_sites.addAll(readAllResources(state)); all_roots.addAll(other_sites); List messages = prepPage(state); context.put("other_sites", messages); if (state.getAttribute(STATE_NUM_MESSAGES) != null) { context.put("allMsgNumber", state.getAttribute(STATE_NUM_MESSAGES).toString()); context.put("allMsgNumberInt", state.getAttribute(STATE_NUM_MESSAGES)); } context.put("pagesize", ((Integer) state.getAttribute(STATE_PAGESIZE)).toString()); // find the position of the message that is the top first on the page if ((state.getAttribute(STATE_TOP_MESSAGE_INDEX) != null) && (state.getAttribute(STATE_PAGESIZE) != null)) { int topMsgPos = ((Integer)state.getAttribute(STATE_TOP_MESSAGE_INDEX)).intValue() + 1; context.put("topMsgPos", Integer.toString(topMsgPos)); int btmMsgPos = topMsgPos + ((Integer)state.getAttribute(STATE_PAGESIZE)).intValue() - 1; if (state.getAttribute(STATE_NUM_MESSAGES) != null) { int allMsgNumber = ((Integer)state.getAttribute(STATE_NUM_MESSAGES)).intValue(); if (btmMsgPos > allMsgNumber) btmMsgPos = allMsgNumber; } context.put("btmMsgPos", Integer.toString(btmMsgPos)); } boolean goPPButton = state.getAttribute(STATE_PREV_PAGE_EXISTS) != null; context.put("goPPButton", Boolean.toString(goPPButton)); boolean goNPButton = state.getAttribute(STATE_NEXT_PAGE_EXISTS) != null; context.put("goNPButton", Boolean.toString(goNPButton)); /* boolean goFPButton = state.getAttribute(STATE_FIRST_PAGE_EXISTS) != null; context.put("goFPButton", Boolean.toString(goFPButton)); boolean goLPButton = state.getAttribute(STATE_LAST_PAGE_EXISTS) != null; context.put("goLPButton", Boolean.toString(goLPButton)); */ context.put("pagesize", state.getAttribute(STATE_PAGESIZE)); // context.put("pagesizes", PAGESIZES); } // context.put ("other_sites", other_sites); state.setAttribute(STATE_COLLECTION_ROOTS, all_roots); // context.put ("root", root); if(state.getAttribute(STATE_PASTE_ALLOWED_FLAG) != null) { context.put("paste_place_showing", state.getAttribute(STATE_PASTE_ALLOWED_FLAG)); } if(showRemoveAction) { context.put("showRemoveAction", Boolean.TRUE.toString()); } if(showMoveAction) { context.put("showMoveAction", Boolean.TRUE.toString()); } if(showCopyAction) { context.put("showCopyAction", Boolean.TRUE.toString()); } } catch (IdUnusedException e) { addAlert(state, rb.getString("cannotfind")); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(TypeException e) { logger.warn(this + "TypeException."); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(PermissionException e) { addAlert(state, rb.getString("notpermis1")); context.put ("collectionFlag", Boolean.FALSE.toString()); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", contentService.newResourceProperties ()); try { // TODO: why 'site' here? Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); context.put("siteTitle", site.getTitle()); } catch (IdUnusedException e) { // logger.warn(this + e.toString()); } context.put("expandallflag", state.getAttribute(STATE_EXPAND_ALL_FLAG)); state.removeAttribute(STATE_NEED_TO_EXPAND_ALL); // inform the observing courier that we just updated the page... // if there are pending requests to do so they can be cleared justDelivered(state); // pick the "show" template based on the standard template name // String template = (String) getContext(data).get("template"); return TEMPLATE_LIST; } // buildListContext /** * Build the context for the helper view */ public static String buildHelperContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { if(state.getAttribute(STATE_INITIALIZED) == null) { initStateAttributes(state, portlet); if(state.getAttribute(ResourcesAction.STATE_HELPER_CANCELED_BY_USER) != null) { state.removeAttribute(ResourcesAction.STATE_HELPER_CANCELED_BY_USER); } } String mode = (String) state.getAttribute(STATE_MODE); if(state.getAttribute(STATE_MODE_RESOURCES) == null && MODE_HELPER.equals(mode)) { state.setAttribute(ResourcesAction.STATE_MODE_RESOURCES, ResourcesAction.MODE_HELPER); } Set selectedItems = (Set) state.getAttribute(STATE_LIST_SELECTIONS); if(selectedItems == null) { selectedItems = new TreeSet(); state.setAttribute(STATE_LIST_SELECTIONS, selectedItems); } context.put("selectedItems", selectedItems); String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); boolean need_to_push = false; if(MODE_ATTACHMENT_SELECT.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_SELECT_INIT; } else if(MODE_ATTACHMENT_CREATE.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_CREATE_INIT; } else if(MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_NEW_ITEM_INIT; } else if(MODE_ATTACHMENT_EDIT_ITEM.equals(helper_mode)) { need_to_push = true; helper_mode = MODE_ATTACHMENT_EDIT_ITEM_INIT; } Map current_stack_frame = null; if(need_to_push) { current_stack_frame = pushOnStack(state); current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); if(MODE_ATTACHMENT_EDIT_ITEM_INIT.equals(helper_mode)) { String attachmentId = (String) state.getAttribute(STATE_EDIT_ID); if(attachmentId != null) { current_stack_frame.put(STATE_STACK_EDIT_ID, attachmentId); String collectionId = ContentHostingService.getContainingCollectionId(attachmentId); current_stack_frame.put(STATE_STACK_EDIT_COLLECTION_ID, collectionId); EditItem item = getEditItem(attachmentId, collectionId, data); if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } } } else { List attachments = (List) state.getAttribute(STATE_ATTACHMENTS); if(attachments == null) { attachments = EntityManager.newReferenceList(); } List attached = new Vector(); Iterator it = attachments.iterator(); while(it.hasNext()) { try { Reference ref = (Reference) it.next(); String itemId = ref.getId(); ResourceProperties properties = ref.getProperties(); String displayName = properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String containerId = ref.getContainer(); String accessUrl = ContentHostingService.getUrl(itemId); String contentType = properties.getProperty(ResourceProperties.PROP_CONTENT_TYPE); AttachItem item = new AttachItem(itemId, displayName, containerId, accessUrl); item.setContentType(contentType); attached.add(item); } catch(Exception ignore) {} } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, attached); } } else { current_stack_frame = peekAtStack(state); if(current_stack_frame.get(STATE_STACK_EDIT_INTENT) == null) { current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); } } if(helper_mode == null) { helper_mode = (String) current_stack_frame.get(STATE_RESOURCES_HELPER_MODE); } else { current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); } String helper_title = (String) current_stack_frame.get(STATE_ATTACH_TITLE); if(helper_title == null) { helper_title = (String) state.getAttribute(STATE_ATTACH_TITLE); if(helper_title != null) { current_stack_frame.put(STATE_ATTACH_TITLE, helper_title); } } if(helper_title != null) { context.put("helper_title", helper_title); } String helper_instruction = (String) current_stack_frame.get(STATE_ATTACH_INSTRUCTION); if(helper_instruction == null) { helper_instruction = (String) state.getAttribute(STATE_ATTACH_INSTRUCTION); if(helper_instruction != null) { current_stack_frame.put(STATE_ATTACH_INSTRUCTION, helper_instruction); } } if(helper_instruction != null) { context.put("helper_instruction", helper_instruction); } String title = (String) current_stack_frame.get(STATE_STACK_EDIT_ITEM_TITLE); if(title == null) { title = (String) state.getAttribute(STATE_ATTACH_TEXT); if(title != null) { current_stack_frame.put(STATE_STACK_EDIT_ITEM_TITLE, title); } } if(title != null && title.trim().length() > 0) { context.put("helper_subtitle", title); } String template = null; if(MODE_ATTACHMENT_SELECT_INIT.equals(helper_mode)) { template = buildSelectAttachmentContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_CREATE_INIT.equals(helper_mode)) { template = buildCreateContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { template = buildItemTypeContext(portlet, context, data, state); } else if(MODE_ATTACHMENT_EDIT_ITEM_INIT.equals(helper_mode)) { template = buildEditContext(portlet, context, data, state); } return template; } public static String buildItemTypeContext(VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); initStateAttributes(state, portlet); Map current_stack_frame = peekAtStack(state); String mode = (String) state.getAttribute(STATE_MODE); if(mode == null || mode.trim().length() == 0) { mode = MODE_HELPER; state.setAttribute(STATE_MODE, mode); } String helper_mode = null; if(MODE_HELPER.equals(mode)) { helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode == null || helper_mode.trim().length() == 0) { helper_mode = MODE_ATTACHMENT_NEW_ITEM; state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); } current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); if(MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { context.put("attaching_this_item", Boolean.TRUE.toString()); } state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); } String msg = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (msg != null) { context.put("itemAlertMessage", msg); state.removeAttribute(STATE_CREATE_MESSAGE); } context.put("max_upload_size", state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE)); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } context.put("collectionId", collectionId); String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || "".equals(itemType)) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || "".equals(itemType)) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } context.put("itemType", itemType); Integer numberOfItems = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(numberOfItems == null) { numberOfItems = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, numberOfItems); } if(numberOfItems == null) { numberOfItems = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, numberOfItems); } context.put("numberOfItems", numberOfItems); context.put("max_number", new Integer(1)); Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); // TODO: does this method filter groups for this subcollection?? if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } Site site; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); } catch (IdUnusedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } String encoding = data.getRequest().getCharacterEncoding(); List inherited_access_groups = new Vector(); AccessMode inherited_access = AccessMode.INHERITED; try { ContentCollection parent = ContentHostingService.getCollection(collectionId); inherited_access = parent.getInheritedAccess(); inherited_access_groups.addAll(parent.getInheritedGroups()); } catch (IdUnusedException e) { } catch (TypeException e) { } catch (PermissionException e) { } boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); } context.put("new_items", new_items); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); String show_form_items = (String) current_stack_frame.get(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("TYPE_HTML", TYPE_HTML); context.put("TYPE_TEXT", TYPE_TEXT); context.put("TYPE_URL", TYPE_URL); context.put("TYPE_FORM", TYPE_FORM); // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); if(TYPE_FORM.equals(itemType)) { List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } context.put("formtype", formtype); String formtype_readonly = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = Boolean.FALSE.toString(); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE_READONLY, formtype_readonly); } if(formtype_readonly != null && formtype_readonly.equals(Boolean.TRUE.toString())) { context.put("formtype_readonly", formtype_readonly); } String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("DOT", ResourcesMetadata.DOT); } return TEMPLATE_ITEMTYPE; } /** * Access the top item on the suspended-operations stack * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The top item on the stack, or null if the stack is empty. */ private static Map peekAtStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(! operations_stack.isEmpty()) { current_stack_frame = (Map) operations_stack.peek(); } return current_stack_frame; } /** * Returns true if the suspended operations stack contains no elements. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return true if the suspended operations stack contains no elements */ private static boolean isStackEmpty(SessionState state) { Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } return operations_stack.isEmpty(); } /** * Push an item of the suspended-operations stack. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The new item that has just been added to the stack, or null if depth limit is exceeded. */ private static Map pushOnStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(operations_stack.size() < MAXIMUM_SUSPENDED_OPERATIONS_STACK_DEPTH) { current_stack_frame = (Map) operations_stack.push(new Hashtable()); } Object helper_mode = state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null) { current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, helper_mode); } return current_stack_frame; } /** * Remove and return the top item from the suspended-operations stack. * @param state The current session state, including the STATE_SUSPENDED_OPERATIONS_STACK attribute. * @return The item that has just been removed from the stack, or null if the stack was empty. */ private static Map popFromStack(SessionState state) { Map current_stack_frame = null; Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); if(operations_stack == null) { operations_stack = new Stack(); state.setAttribute(STATE_SUSPENDED_OPERATIONS_STACK, operations_stack); } if(! operations_stack.isEmpty()) { current_stack_frame = (Map) operations_stack.pop(); if(operations_stack.isEmpty()) { String canceled = (String) current_stack_frame.get(STATE_HELPER_CANCELED_BY_USER); if(canceled != null) { state.setAttribute(STATE_HELPER_CANCELED_BY_USER, canceled); } } } return current_stack_frame; } private static void resetCurrentMode(SessionState state) { String mode = (String) state.getAttribute(STATE_MODE); if(isStackEmpty(state)) { if(MODE_HELPER.equals(mode)) { cleanupState(state); state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_DONE); } else { state.setAttribute(STATE_MODE, MODE_LIST); state.removeAttribute(STATE_RESOURCES_HELPER_MODE); } return; } Map current_stack_frame = peekAtStack(state); String helper_mode = (String) current_stack_frame.get(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, helper_mode); } } /** * Build the context for selecting attachments */ public static String buildSelectAttachmentContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); initStateAttributes(state, portlet); Map current_stack_frame = peekAtStack(state); if(current_stack_frame == null) { current_stack_frame = pushOnStack(state); } state.setAttribute(VelocityPortletPaneledAction.STATE_HELPER, ResourcesAction.class.getName()); Set highlightedItems = new TreeSet(); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } context.put("attached", new_items); context.put("last", new Integer(new_items.size() - 1)); Integer max_cardinality = (Integer) current_stack_frame.get(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = (Integer) state.getAttribute(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = CARDINALITY_MULTIPLE; } current_stack_frame.put(STATE_ATTACH_CARDINALITY, max_cardinality); } context.put("max_cardinality", max_cardinality); if(new_items.size() >= max_cardinality.intValue()) { context.put("disable_attach_links", Boolean.TRUE.toString()); } if(state.getAttribute(STATE_HELPER_CHANGED) != null) { context.put("list_has_changed", "true"); } String form_field = (String) current_stack_frame.get(ResourcesAction.STATE_ATTACH_FORM_FIELD); if(form_field == null) { form_field = (String) state.getAttribute(ResourcesAction.STATE_ATTACH_FORM_FIELD); if(form_field != null) { current_stack_frame.put(ResourcesAction.STATE_ATTACH_FORM_FIELD, form_field); state.removeAttribute(ResourcesAction.STATE_ATTACH_FORM_FIELD); } } // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // context.put ("service", contentService); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); // context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); boolean atHome = false; // %%STATE_MODE_RESOURCES%% boolean dropboxMode = RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES)); // make sure the channedId is set String collectionId = (String) state.getAttribute(STATE_ATTACH_COLLECTION_ID); if(collectionId == null) { collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); } context.put ("collectionId", collectionId); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String siteTitle = (String) state.getAttribute (STATE_SITE_TITLE); if (collectionId.equals(homeCollectionId)) { atHome = true; //context.put ("collectionDisplayName", state.getAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME)); } else { /* // should be not PermissionException thrown at this time, when the user can successfully navigate to this collection try { context.put("collectionDisplayName", contentService.getCollection(collectionId).getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME)); } catch (IdUnusedException e){} catch (TypeException e) {} catch (PermissionException e) {} */ } List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); context.put ("currentSortedBy", sortedBy); context.put ("currentSortAsc", sortedAsc); context.put("TRUE", Boolean.TRUE.toString()); // String current_user_id = UserDirectoryService.getCurrentUser().getId(); try { try { contentService.checkCollection (collectionId); context.put ("collectionFlag", Boolean.TRUE.toString()); } catch(IdUnusedException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : IdUnusedException: " + collectionId); try { ContentCollectionEdit coll = contentService.addCollection(collectionId); contentService.commitCollection(coll); } catch(IdUsedException inner) { // how can this happen?? logger.warn("ResourcesAction.buildSelectAttachment (static) : IdUsedException: " + collectionId); throw ex; } catch(IdInvalidException inner) { logger.warn("ResourcesAction.buildSelectAttachment (static) : IdInvalidException: " + collectionId); // what now? throw ex; } catch(InconsistentException inner) { logger.warn("ResourcesAction.buildSelectAttachment (static) : InconsistentException: " + collectionId); // what now? throw ex; } } catch(TypeException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : TypeException."); throw ex; } catch(PermissionException ex) { logger.warn("ResourcesAction.buildSelectAttachment (static) : PermissionException."); throw ex; } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); List this_site = new Vector(); User[] submitters = (User[]) state.getAttribute(STATE_ATTACH_SHOW_DROPBOXES); if(submitters != null) { String dropboxId = ContentHostingService.getDropboxCollection(); if(dropboxId == null) { ContentHostingService.createDropboxCollection(); dropboxId = ContentHostingService.getDropboxCollection(); } if(dropboxId == null) { // do nothing } else if(ContentHostingService.isDropboxMaintainer()) { for(int i = 0; i < submitters.length; i++) { User submitter = submitters[i]; String dbId = dropboxId + StringUtil.trimToZero(submitter.getId()) + "/"; try { ContentCollection db = ContentHostingService.getCollection(dbId); expandedCollections.put(dbId, db); List dbox = getBrowseItems(dbId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(dbox != null && dbox.size() > 0) { BrowseItem root = (BrowseItem) dbox.remove(0); // context.put("site", root); root.setName(submitter.getDisplayName() + " " + rb.getString("gen.drop")); root.addMembers(dbox); this_site.add(root); } } catch(IdUnusedException e) { // ignore a user's dropbox if it's not defined } } } else { try { ContentCollection db = ContentHostingService.getCollection(dropboxId); expandedCollections.put(dropboxId, db); List dbox = getBrowseItems(dropboxId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(dbox != null && dbox.size() > 0) { BrowseItem root = (BrowseItem) dbox.remove(0); // context.put("site", root); root.setName(ContentHostingService.getDropboxDisplayName()); root.addMembers(dbox); this_site.add(root); } } catch(IdUnusedException e) { // if an id is unused, ignore it } } } List members = getBrowseItems(collectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, navRoot.equals(homeCollectionId), state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); if(atHome && dropboxMode) { root.setName(siteTitle + " " + rb.getString("gen.drop")); } else if(atHome) { root.setName(siteTitle + " " + rb.getString("gen.reso")); } context.put("site", root); root.addMembers(members); this_site.add(root); } context.put ("this_site", this_site); List other_sites = new Vector(); boolean show_all_sites = false; String allowed_to_see_other_sites = (String) state.getAttribute(STATE_SHOW_ALL_SITES); String show_other_sites = (String) state.getAttribute(STATE_SHOW_OTHER_SITES); context.put("show_other_sites", show_other_sites); if(Boolean.TRUE.toString().equals(allowed_to_see_other_sites)) { context.put("allowed_to_see_other_sites", Boolean.TRUE.toString()); show_all_sites = Boolean.TRUE.toString().equals(show_other_sites); } if(show_all_sites) { state.setAttribute(STATE_HIGHLIGHTED_ITEMS, highlightedItems); other_sites.addAll(readAllResources(state)); List messages = prepPage(state); context.put("other_sites", messages); if (state.getAttribute(STATE_NUM_MESSAGES) != null) { context.put("allMsgNumber", state.getAttribute(STATE_NUM_MESSAGES).toString()); context.put("allMsgNumberInt", state.getAttribute(STATE_NUM_MESSAGES)); } context.put("pagesize", ((Integer) state.getAttribute(STATE_PAGESIZE)).toString()); // find the position of the message that is the top first on the page if ((state.getAttribute(STATE_TOP_MESSAGE_INDEX) != null) && (state.getAttribute(STATE_PAGESIZE) != null)) { int topMsgPos = ((Integer)state.getAttribute(STATE_TOP_MESSAGE_INDEX)).intValue() + 1; context.put("topMsgPos", Integer.toString(topMsgPos)); int btmMsgPos = topMsgPos + ((Integer)state.getAttribute(STATE_PAGESIZE)).intValue() - 1; if (state.getAttribute(STATE_NUM_MESSAGES) != null) { int allMsgNumber = ((Integer)state.getAttribute(STATE_NUM_MESSAGES)).intValue(); if (btmMsgPos > allMsgNumber) btmMsgPos = allMsgNumber; } context.put("btmMsgPos", Integer.toString(btmMsgPos)); } boolean goPPButton = state.getAttribute(STATE_PREV_PAGE_EXISTS) != null; context.put("goPPButton", Boolean.toString(goPPButton)); boolean goNPButton = state.getAttribute(STATE_NEXT_PAGE_EXISTS) != null; context.put("goNPButton", Boolean.toString(goNPButton)); /* boolean goFPButton = state.getAttribute(STATE_FIRST_PAGE_EXISTS) != null; context.put("goFPButton", Boolean.toString(goFPButton)); boolean goLPButton = state.getAttribute(STATE_LAST_PAGE_EXISTS) != null; context.put("goLPButton", Boolean.toString(goLPButton)); */ context.put("pagesize", state.getAttribute(STATE_PAGESIZE)); // context.put("pagesizes", PAGESIZES); // List other_sites = new Vector(); /* * NOTE: This does not (and should not) get all sites for admin. * Getting all sites for admin is too big a request and * would result in too big a display to render in html. */ /* Map othersites = ContentHostingService.getCollectionMap(); Iterator siteIt = othersites.keySet().iterator(); while(siteIt.hasNext()) { String displayName = (String) siteIt.next(); String collId = (String) othersites.get(displayName); if(! collectionId.equals(collId)) { members = getBrowseItems(collId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); root.addMembers(members); root.setName(displayName); other_sites.add(root); } } } context.put ("other_sites", other_sites); */ } // context.put ("root", root); context.put("expandedCollections", expandedCollections); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); } catch (IdUnusedException e) { addAlert(state, rb.getString("cannotfind")); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(TypeException e) { // logger.warn(this + "TypeException."); context.put ("collectionFlag", Boolean.FALSE.toString()); } catch(PermissionException e) { addAlert(state, rb.getString("notpermis1")); context.put ("collectionFlag", Boolean.FALSE.toString()); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", contentService.newResourceProperties ()); try { // TODO: why 'site' here? Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); context.put("siteTitle", site.getTitle()); } catch (IdUnusedException e) { // logger.warn(this + e.toString()); } context.put("expandallflag", state.getAttribute(STATE_EXPAND_ALL_FLAG)); state.removeAttribute(STATE_NEED_TO_EXPAND_ALL); // inform the observing courier that we just updated the page... // if there are pending requests to do so they can be cleared // justDelivered(state); // pick the template based on whether client wants links or copies String template = TEMPLATE_SELECT; Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { // user wants copies in hidden attachments area template = TEMPLATE_ATTACH; } return template; } // buildSelectAttachmentContext /** * Expand all the collection resources and put in EXPANDED_COLLECTIONS attribute. */ public void doList ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute (STATE_MODE, MODE_LIST); } // doList /** * Build the context for add display */ public String buildWebdavContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); boolean inMyWorkspace = SiteService.isUserSite(ToolManager.getCurrentPlacement().getContext()); context.put("inMyWorkspace", Boolean.toString(inMyWorkspace)); context.put("server_url", ServerConfigurationService.getServerUrl()); context.put("site_id", ToolManager.getCurrentPlacement().getContext()); context.put("site_title", state.getAttribute(STATE_SITE_TITLE)); context.put("user_id", UserDirectoryService.getCurrentUser().getId()); context.put ("dav_group", "/dav/group/"); context.put ("dav_user", "/dav/user/"); String webdav_instructions = ServerConfigurationService.getString("webdav.instructions.url"); context.put("webdav_instructions" ,webdav_instructions); // TODO: get browser id from somewhere. //Session session = SessionManager.getCurrentSession(); //String browserId = session.; String browserID = UsageSessionService.getSession().getBrowserId(); if(browserID.equals(UsageSession.WIN_IE)) { context.put("isWinIEBrowser", Boolean.TRUE.toString()); } return TEMPLATE_DAV; } // buildWebdavContext /** * Build the context for delete confirmation page */ public String buildDeleteConfirmContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("collectionId", state.getAttribute (STATE_COLLECTION_ID) ); //%%%% FIXME context.put ("collectionPath", state.getAttribute (STATE_COLLECTION_PATH)); List deleteItems = (List) state.getAttribute(STATE_DELETE_ITEMS); List nonEmptyFolders = (List) state.getAttribute(STATE_DELETE_ITEMS_NOT_EMPTY); context.put ("deleteItems", deleteItems); Iterator it = nonEmptyFolders.iterator(); while(it.hasNext()) { BrowseItem folder = (BrowseItem) it.next(); addAlert(state, rb.getString("folder2") + " " + folder.getName() + " " + rb.getString("contain2") + " "); } // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("service", state.getAttribute (STATE_CONTENT_SERVICE)); // %%STATE_MODE_RESOURCES%% //not show the public option when in dropbox mode if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // not show the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("homeCollection", (String) state.getAttribute (STATE_HOME_COLLECTION_ID)); context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); context.put ("resourceProperties", ContentHostingService.newResourceProperties ()); // String template = (String) getContext(data).get("template"); return TEMPLATE_DELETE_CONFIRM; } // buildDeleteConfirmContext /** * Build the context to show the list of resource properties */ public static String buildMoreContext ( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService service = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); context.put ("service", service); Map current_stack_frame = peekAtStack(state); String id = (String) current_stack_frame.get(STATE_MORE_ID); context.put ("id", id); String collectionId = (String) current_stack_frame.get(STATE_MORE_COLLECTION_ID); context.put ("collectionId", collectionId); String homeCollectionId = (String) (String) state.getAttribute (STATE_HOME_COLLECTION_ID); context.put("homeCollectionId", homeCollectionId); List cPath = getCollectionPath(state); context.put ("collectionPath", cPath); EditItem item = getEditItem(id, collectionId, data); context.put("item", item); // for the resources of type URL or plain text, show the content also try { ResourceProperties properties = service.getProperties (id); context.put ("properties", properties); String isCollection = properties.getProperty (ResourceProperties.PROP_IS_COLLECTION); if ((isCollection != null) && isCollection.equals (Boolean.FALSE.toString())) { String copyrightAlert = properties.getProperty(properties.getNamePropCopyrightAlert()); context.put("hasCopyrightAlert", copyrightAlert); String type = properties.getProperty (ResourceProperties.PROP_CONTENT_TYPE); if (type.equalsIgnoreCase (MIME_TYPE_DOCUMENT_PLAINTEXT) || type.equalsIgnoreCase (MIME_TYPE_DOCUMENT_HTML) || type.equalsIgnoreCase (ResourceProperties.TYPE_URL)) { ContentResource moreResource = service.getResource (id); // read the body String body = ""; byte[] content = null; try { content = moreResource.getContent(); if (content != null) { body = new String(content); } } catch(ServerOverloadException e) { // this represents server's file system is temporarily unavailable // report problem to user? log problem? } context.put ("content", body); } // if } // if else { // setup for quota - ADMIN only, collection only if (SecurityService.isSuperUser()) { try { // Getting the quota as a long validates the property long quota = properties.getLongProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); context.put("hasQuota", Boolean.TRUE); context.put("quota", properties.getProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA)); } catch (Exception any) {} } } } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); context.put("notExistFlag", new Boolean(true)); } catch (TypeException e) { addAlert(state, rb.getString("typeex") + " "); } catch (PermissionException e) { addAlert(state," " + rb.getString("notpermis2") + " " + id + ". "); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { context.put("notExistFlag", new Boolean(false)); } if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } else { context.put("dropboxMode", Boolean.FALSE); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } context.put("preventPublicDisplay", preventPublicDisplay); if(preventPublicDisplay.equals(Boolean.FALSE)) { // find out about pubview boolean pubview = ContentHostingService.isInheritingPubView(id); if (!pubview) pubview = ContentHostingService.isPubView(id); context.put("pubview", new Boolean(pubview)); } } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); if (state.getAttribute(COPYRIGHT_TYPES) != null) { List copyrightTypes = (List) state.getAttribute(COPYRIGHT_TYPES); context.put("copyrightTypes", copyrightTypes); } metadataGroupsIntoContext(state, context); // String template = (String) getContext(data).get("template"); return TEMPLATE_MORE; } // buildMoreContext /** * Build the context to edit the editable list of resource properties */ public static String buildEditContext (VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service Map current_stack_frame = peekAtStack(state); context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put ("from", state.getAttribute (STATE_FROM)); context.put ("mycopyright", (String) state.getAttribute (STATE_MY_COPYRIGHT)); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); String collectionId = (String) current_stack_frame.get(STATE_STACK_EDIT_COLLECTION_ID); context.put ("collectionId", collectionId); String id = (String) current_stack_frame.get(STATE_STACK_EDIT_ID); if(id == null) { id = (String) state.getAttribute(STATE_EDIT_ID); if(id == null) { id = ""; } current_stack_frame.put(STATE_STACK_EDIT_ID, id); } context.put ("id", id); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); if(homeCollectionId == null) { homeCollectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); state.setAttribute(STATE_HOME_COLLECTION_ID, homeCollectionId); } context.put("homeCollectionId", homeCollectionId); List collectionPath = getCollectionPath(state); context.put ("collectionPath", collectionPath); if(homeCollectionId.equals(id)) { context.put("atHome", Boolean.TRUE.toString()); } String intent = (String) current_stack_frame.get(STATE_STACK_EDIT_INTENT); if(intent == null) { intent = INTENT_REVISE_FILE; current_stack_frame.put(STATE_STACK_EDIT_INTENT, intent); } context.put("intent", intent); context.put("REVISE", INTENT_REVISE_FILE); context.put("REPLACE", INTENT_REPLACE_FILE); String show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); // TODO: does this method filter groups for this subcollection?? if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); context.put("theGroupsInThisSite", groups); } // put the item into context EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); if(item == null) { item = getEditItem(id, collectionId, data); if(item == null) { // what?? } if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } } item.setPossibleGroups(groups); context.put("item", item); if(item.isStructuredArtifact()) { context.put("formtype", item.getFormtype()); current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, item.getFormtype()); List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { ResourcesAction.setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype_readonly = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE_READONLY); if(formtype_readonly == null) { formtype_readonly = Boolean.FALSE.toString(); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE_READONLY, formtype_readonly); } if(formtype_readonly != null && formtype_readonly.equals(Boolean.TRUE.toString())) { context.put("formtype_readonly", formtype_readonly); } String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("TRUE", Boolean.TRUE.toString()); } // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); // String template = (String) getContext(data).get("template"); return TEMPLATE_EDIT; } // buildEditContext /** * Navigate in the resource hireachy */ public static void doNavigate ( RunData data ) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); if (state.getAttribute (STATE_SELECT_ALL_FLAG)!=null && state.getAttribute (STATE_SELECT_ALL_FLAG).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } if (state.getAttribute (STATE_EXPAND_ALL_FLAG)!=null && state.getAttribute (STATE_EXPAND_ALL_FLAG).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); } // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String collectionId = data.getParameters().getString ("collectionId"); String navRoot = data.getParameters().getString("navRoot"); state.setAttribute(STATE_NAVIGATION_ROOT, navRoot); // the exception message try { ContentHostingService.checkCollection(collectionId); } catch(PermissionException e) { addAlert(state, " " + rb.getString("notpermis3") + " " ); } catch (IdUnusedException e) { addAlert(state, " " + rb.getString("notexist2") + " "); } catch (TypeException e) { addAlert(state," " + rb.getString("notexist2") + " "); } if (state.getAttribute(STATE_MESSAGE) == null) { String oldCollectionId = (String) state.getAttribute(STATE_COLLECTION_ID); // update this folder id in the set to be event-observed removeObservingPattern(oldCollectionId, state); addObservingPattern(collectionId, state); state.setAttribute(STATE_COLLECTION_ID, collectionId); state.setAttribute(STATE_EXPANDED_COLLECTIONS, new HashMap()); } } // doNavigate /** * Show information about WebDAV */ public void doShow_webdav ( RunData data ) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); state.setAttribute (STATE_MODE, MODE_DAV); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } } // doShow_webdav /** * initiate creation of one or more resource items (folders, file uploads, html docs, text docs, or urls) * default type is folder */ public static void doCreate(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemType = params.getString("itemType"); if(itemType == null || "".equals(itemType)) { itemType = TYPE_UPLOAD; } String stackOp = params.getString("suspended-operations-stack"); Map current_stack_frame = null; if(stackOp != null && stackOp.equals("peek")) { current_stack_frame = peekAtStack(state); } else { current_stack_frame = pushOnStack(state); } String encoding = data.getRequest().getCharacterEncoding(); String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); String collectionId = params.getString ("collectionId"); current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); List new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, new Integer(1)); state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); current_stack_frame.put(STATE_CREATE_MISSING_ITEM, new HashSet()); current_stack_frame.remove(STATE_STACK_STRUCTOBJ_TYPE); current_stack_frame.put(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_CREATE_INIT); state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_CREATE_INIT); } // doCreate protected static List newEditItems(String collectionId, String itemtype, String encoding, String defaultCopyrightStatus, boolean preventPublicDisplay, int number) { List new_items = new Vector(); ContentCollection collection = null; AccessMode inheritedAccess = AccessMode.INHERITED; // Collection inheritedGroups = new Vector(); try { collection = ContentHostingService.getCollection(collectionId); inheritedAccess = collection.getAccess(); // inheritedGroups = collection.getGroups(); if(AccessMode.INHERITED == inheritedAccess) { inheritedAccess = collection.getInheritedAccess(); // inheritedGroups = collection.getInheritedGroups(); } } catch(PermissionException e) { //alerts.add(rb.getString("notpermis4")); e.printStackTrace(); } catch (IdUnusedException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TypeException e) { // TODO Auto-generated catch block e.printStackTrace(); } boolean pubviewset = ContentHostingService.isInheritingPubView(collectionId) || ContentHostingService.isPubView(collectionId); //Collection possibleGroups = ContentHostingService.getGroupsWithReadAccess(collectionId); boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); Collection possibleGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); Site site = null; Collection site_groups = null; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); } catch (IdUnusedException e) { // TODO Auto-generated catch block e.printStackTrace(); } if(site != null) { site_groups = site.getGroups(); } else { site_groups = new Vector(); } Collection inherited_access_groups = collection.getGroups(); if(inherited_access_groups == null || inherited_access_groups.isEmpty()) { inherited_access_groups = collection.getInheritedGroups(); } if(inherited_access_groups == null) { inherited_access_groups = new Vector(); } Collection allowedAddGroups = null; if(AccessMode.GROUPED == inheritedAccess) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); } else { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(ContentHostingService.getSiteCollection(site.getId())); } if(allowedAddGroups == null) { allowedAddGroups = new Vector(); } for(int i = 0; i < CREATE_MAX_ITEMS; i++) { EditItem item = new EditItem(itemtype); if(encoding != null) { item.setEncoding(encoding); } item.setInDropbox(isInDropbox); if(inheritedAccess == null || AccessMode.SITE == inheritedAccess) { item.setInheritedAccess(AccessMode.INHERITED.toString()); } else { item.setInheritedAccess(inheritedAccess.toString()); } item.setAllSiteGroups(site_groups); item.setInheritedGroupRefs(inherited_access_groups); item.setAllowedAddGroupRefs(allowedAddGroups); item.setCopyrightStatus(defaultCopyrightStatus); new_items.add(item); // item.setPossibleGroups(new Vector(possibleGroups)); // if(inheritedGroups != null) // { // item.setInheritedGroups(inheritedGroups); // } if(preventPublicDisplay) { item.setPubviewPossible(false); item.setPubviewInherited(false); item.setPubview(false); } else { item.setPubviewPossible(true); item.setPubviewInherited(pubviewset); //item.setPubview(pubviewset); } } return new_items; } public static void addCreateContextAlert(SessionState state, String message) { String soFar = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (soFar != null) { soFar = soFar + " " + message; } else { soFar = message; } state.setAttribute(STATE_CREATE_MESSAGE, soFar); } // addItemTypeContextAlert /** * initiate creation of one or more resource items (file uploads, html docs, text docs, or urls -- not folders) * default type is file upload */ /** * @param data */ public static void doCreateitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); Map current_stack_frame = peekAtStack(state); boolean pop = false; String collectionId = params.getString("collectionId"); String itemType = params.getString("itemType"); String flow = params.getString("flow"); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Set missing = new HashSet(); if(flow == null || flow.equals("cancel")) { pop = true; } else if(flow.equals("updateNumber")) { captureMultipleValues(state, params, false); int number = params.getInt("numberOfItems"); Integer numberOfItems = new Integer(number); current_stack_frame.put(ResourcesAction.STATE_STACK_CREATE_NUMBER, numberOfItems); // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); List items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); } current_stack_frame.put(STATE_STACK_CREATE_ITEMS, items); Iterator it = items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } state.removeAttribute(STATE_MESSAGE); } else if(flow.equals("create") && TYPE_FOLDER.equals(itemType)) { // Get the items captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { // Save the items createFolders(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && TYPE_UPLOAD.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && MIME_TYPE_DOCUMENT_HTML.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && MIME_TYPE_DOCUMENT_PLAINTEXT.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createFiles(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop =true; } } } else if(flow.equals("create") && TYPE_URL.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { createUrls(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create") && TYPE_FORM.equals(itemType)) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } if(alerts.isEmpty()) { createStructuredArtifacts(state); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts.isEmpty()) { pop = true; } } } else if(flow.equals("create")) { captureMultipleValues(state, params, true); alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } alerts.add("Invalid item type"); state.setAttribute(STATE_CREATE_ALERTS, alerts); } else if(flow.equals("updateDocType")) { // captureMultipleValues(state, params, false); String formtype = params.getString("formtype"); if(formtype == null || formtype.equals("")) { alerts.add("Must select a form type"); missing.add("formtype"); } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); setupStructuredObjects(state); } else if(flow.equals("addInstance")) { captureMultipleValues(state, params, false); String field = params.getString("field"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } EditItem item = (EditItem) new_items.get(0); addInstance(field, item.getProperties()); ResourcesMetadata form = item.getForm(); List flatList = form.getFlatList(); item.setProperties(flatList); } else if(flow.equals("linkResource") && TYPE_FORM.equals(itemType)) { captureMultipleValues(state, params, false); createLink(data, state); } else if(flow.equals("showOptional")) { captureMultipleValues(state, params, false); int twiggleNumber = params.getInt("twiggleNumber", 0); String metadataGroup = params.getString("metadataGroup"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } if(new_items != null && new_items.size() > twiggleNumber) { EditItem item = (EditItem) new_items.get(twiggleNumber); if(item != null) { item.showMetadataGroup(metadataGroup); } } // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); Iterator it = new_items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } } else if(flow.equals("hideOptional")) { captureMultipleValues(state, params, false); int twiggleNumber = params.getInt("twiggleNumber", 0); String metadataGroup = params.getString("metadataGroup"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } if(new_items != null && new_items.size() > twiggleNumber) { EditItem item = (EditItem) new_items.get(twiggleNumber); if(item != null) { item.hideMetadataGroup(metadataGroup); } } // clear display of error messages state.setAttribute(STATE_CREATE_ALERTS, new HashSet()); Iterator it = new_items.iterator(); while(it.hasNext()) { EditItem item = (EditItem) it.next(); item.clearMissing(); } } alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Iterator alertIt = alerts.iterator(); while(alertIt.hasNext()) { String alert = (String) alertIt.next(); addCreateContextAlert(state, alert); //addAlert(state, alert); } alerts.clear(); current_stack_frame.put(STATE_CREATE_MISSING_ITEM, missing); if(pop) { List new_items = (List) current_stack_frame.get(ResourcesAction.STATE_HELPER_NEW_ITEMS); String helper_changed = (String) state.getAttribute(STATE_HELPER_CHANGED); if(Boolean.TRUE.toString().equals(helper_changed)) { // get list of attachments? if(new_items != null) { List attachments = (List) state.getAttribute(STATE_ATTACHMENTS); if(attachments == null) { attachments = EntityManager.newReferenceList(); state.setAttribute(STATE_ATTACHMENTS, attachments); } Iterator it = new_items.iterator(); while(it.hasNext()) { AttachItem item = (AttachItem) it.next(); try { ContentResource resource = ContentHostingService.getResource(item.getId()); if (checkSelctItemFilter(resource, state)) { attachments.add(resource.getReference()); } else { it.remove(); addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } } catch (PermissionException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } catch (IdUnusedException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } catch (TypeException e) { addAlert(state, (String) rb.getFormattedMessage("filter", new Object[]{item.getDisplayName()})); } Reference ref = EntityManager.newReference(ContentHostingService.getReference(item.getId())); } } } popFromStack(state); resetCurrentMode(state); if(!ResourcesAction.isStackEmpty(state) && new_items != null) { current_stack_frame = peekAtStack(state); List old_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(old_items == null) { old_items = new Vector(); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, old_items); } old_items.addAll(new_items); } } } // doCreateitem private static void createLink(RunData data, SessionState state) { ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String field = params.getString("field"); if(field == null) { } else { current_stack_frame.put(ResourcesAction.STATE_ATTACH_FORM_FIELD, field); } //state.setAttribute(ResourcesAction.STATE_MODE, ResourcesAction.MODE_HELPER); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); state.setAttribute(ResourcesAction.STATE_ATTACH_CARDINALITY, ResourcesAction.CARDINALITY_SINGLE); // put a copy of the attachments into the state // state.setAttribute(ResourcesAction.STATE_ATTACHMENTS, EntityManager.newReferenceList()); // whether there is already an attachment /* if (attachments.size() > 0) { sstate.setAttribute(ResourcesAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.TRUE); } else { sstate.setAttribute(ResourcesAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.FALSE); } */ // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } } /** * Add a new StructuredArtifact to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ private static void createStructuredArtifacts(SessionState state) { Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null) { itemType = ResourcesAction.TYPE_FORM; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } String encoding = (String) state.getAttribute(STATE_ENCODING); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, true, CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); if(number == null) { number = new Integer(1); } current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfItems = number.intValue(); SchemaBean rootSchema = (SchemaBean) current_stack_frame.get(STATE_STACK_STRUCT_OBJ_SCHEMA); SchemaNode rootNode = rootSchema.getSchema(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } SaveArtifactAttempt attempt = new SaveArtifactAttempt(item, rootNode); validateStructuredArtifact(attempt); List errors = attempt.getErrors(); if(errors.isEmpty()) { try { ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty(ResourceProperties.PROP_CONTENT_ENCODING, "UTF-8"); resourceProperties.addProperty(ResourceProperties.PROP_STRUCTOBJ_TYPE, item.getFormtype()); resourceProperties.addProperty(ContentHostingService.PROP_ALTERNATE_REFERENCE, org.sakaiproject.metaobj.shared.mgt.MetaobjEntityManager.METAOBJ_ENTITY_PREFIX); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); String filename = Validator.escapeResourceName(item.getName()).trim(); String extension = ".xml"; int attemptNum = 0; String attemptStr = ""; String newResourceId = collectionId + filename + attemptStr + extension; if(newResourceId.length() > ContentHostingService.MAXIMUM_RESOURCE_ID_LENGTH) { alerts.add(rb.getString("toolong") + " " + newResourceId); continue outerloop; } SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (filename + extension, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, MIME_TYPE_STRUCTOBJ, item.getContent(), resourceProperties, groups, item.getNotification()); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(! preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } } catch(RuntimeException e) { logger.warn("ResourcesAction.createStructuredArtifacts ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(currentMap == null) { // do nothing } else if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); } else { Iterator errorIt = errors.iterator(); while(errorIt.hasNext()) { ValidationError error = (ValidationError) errorIt.next(); alerts.add(error.getDefaultMessage()); } } } state.setAttribute(STATE_CREATE_ALERTS, alerts); } /** * Convert from a hierarchical list of ResourcesMetadata objects to an org.w3.dom.Document, * then to a string representation, then to a metaobj ElementBean. Validate the ElementBean * against a SchemaBean. If it validates, save the string representation. Otherwise, on * return, the parameter contains a non-empty list of ValidationError objects describing the * problems. * @param attempt A wrapper for the EditItem object which contains the hierarchical list of * ResourcesMetadata objects for this form. Also contains an initially empty list of * ValidationError objects that describes any of the problems found in validating the form. */ private static void validateStructuredArtifact(SaveArtifactAttempt attempt) { EditItem item = attempt.getItem(); ResourcesMetadata form = item.getForm(); Stack processStack = new Stack(); processStack.push(form); Map parents = new Hashtable(); Document doc = Xml.createDocument(); int count = 0; while(!processStack.isEmpty()) { Object object = processStack.pop(); if(object instanceof ResourcesMetadata) { ResourcesMetadata element = (ResourcesMetadata) object; Element node = doc.createElement(element.getLocalname()); if(element.isNested()) { processStack.push(new ElementCarrier(node, element.getDottedname())); List children = element.getNestedInstances(); //List children = element.getNested(); for(int k = children.size() - 1; k >= 0; k--) { ResourcesMetadata child = (ResourcesMetadata) children.get(k); processStack.push(child); parents.put(child.getDottedname(), node); } } else { List values = element.getInstanceValues(); Iterator valueIt = values.iterator(); while(valueIt.hasNext()) { Object value = valueIt.next(); if(value == null) { // do nothing } else if(value instanceof String) { node.appendChild(doc.createTextNode((String)value)); } else if(value instanceof Time) { Time time = (Time) value; TimeBreakdown breakdown = time.breakdownLocal(); int year = breakdown.getYear(); int month = breakdown.getMonth(); int day = breakdown.getDay(); String date = "" + year + (month < 10 ? "-0" : "-") + month + (day < 10 ? "-0" : "-") + day; node.appendChild(doc.createTextNode(date)); } else if(value instanceof Date) { Date date = (Date) value; SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); String formatted = df.format(date); node.appendChild(doc.createTextNode(formatted)); } else if(value instanceof Reference) { node.appendChild(doc.createTextNode(((Reference)value).getId())); } else { node.appendChild(doc.createTextNode(value.toString())); } } Element parent = (Element) parents.get(element.getDottedname()); if(parent == null) { doc.appendChild(node); count++; } else { parent.appendChild(node); } } } else if(object instanceof ElementCarrier) { ElementCarrier carrier = (ElementCarrier) object; Element node = carrier.getElement(); Element parent = (Element) parents.get(carrier.getParent()); if(parent == null) { doc.appendChild(node); count++; } else { parent.appendChild(node); } } } String content = Xml.writeDocumentToString(doc); item.setContent(content); StructuredArtifactValidationService validator = (StructuredArtifactValidationService) ComponentManager.get("org.sakaiproject.metaobj.shared.mgt.StructuredArtifactValidationService"); List errors = new ArrayList(); // convert the String representation to an ElementBean object. If that fails, // add an error and return. ElementBean bean = null; SAXBuilder builder = new SAXBuilder(); StringReader reader = new StringReader(content); try { org.jdom.Document jdoc = builder.build(reader); bean = new ElementBean(jdoc.getRootElement(), attempt.getSchema(), true); } catch (JDOMException e) { // add message to list of errors errors.add(new ValidationError("","",null,"JDOMException")); } catch (IOException e) { // add message to list of errors errors.add(new ValidationError("","",null,"IOException")); } // call this.validate(bean, rootSchema, errors) and add results to errors list. if(bean == null) { // add message to list of errors errors.add(new ValidationError("","",null,"Bean is null")); } else { errors.addAll(validator.validate(bean)); } attempt.setErrors(errors); } // validateStructuredArtifact /** * Add a new folder to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createFolders(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, TYPE_FOLDER, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfFolders = 1; numberOfFolders = number.intValue(); outerloop: for(int i = 0; i < numberOfFolders; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } String newCollectionId = collectionId + Validator.escapeResourceName(item.getName()) + Entity.SEPARATOR; if(newCollectionId.length() > ContentHostingService.MAXIMUM_RESOURCE_ID_LENGTH) { alerts.add(rb.getString("toolong") + " " + newCollectionId); continue outerloop; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); try { resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); ContentCollection collection = ContentHostingService.addCollection (newCollectionId, resourceProperties, groups); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(collection.getId(), true); } } catch (IdUsedException e) { alerts.add(rb.getString("resotitle") + " " + item.getName() + " " + rb.getString("used4")); } catch (IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); } catch (PermissionException e) { alerts.add(rb.getString("notpermis5") + " " + item.getName()); } catch (InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); } // try-catch } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createFolders /** * Add a new file to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createFiles(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, TYPE_FOLDER, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } int numberOfItems = 1; numberOfItems = number.intValue(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty (ResourceProperties.PROP_COPYRIGHT, item.getCopyrightInfo()); resourceProperties.addProperty(ResourceProperties.PROP_COPYRIGHT_CHOICE, item.getCopyrightStatus()); if (item.hasCopyrightAlert()) { resourceProperties.addProperty (ResourceProperties.PROP_COPYRIGHT_ALERT, Boolean.toString(item.hasCopyrightAlert())); } else { resourceProperties.removeProperty (ResourceProperties.PROP_COPYRIGHT_ALERT); } BasicRightsAssignment rightsObj = item.getRights(); rightsObj.addResourceProperties(resourceProperties); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); if(item.isHtml()) { resourceProperties.addProperty(ResourceProperties.PROP_CONTENT_ENCODING, "UTF-8"); } List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); String filename = Validator.escapeResourceName(item.getFilename().trim()); if("".equals(filename)) { filename = Validator.escapeResourceName(item.getName().trim()); } resourceProperties.addProperty(ResourceProperties.PROP_ORIGINAL_FILENAME, filename); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (filename, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, item.getMimeType(), item.getContent(), resourceProperties, groups, item.getNotification()); item.setAdded(true); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM_INIT.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } catch(RuntimeException e) { logger.warn("ResourcesAction.createFiles ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); continue outerloop; } } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(currentMap == null) { // do nothing } else { if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); } state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createFiles /** * Process user's request to add an instance of a particular field to a structured object. * @param data */ public static void doInsertValue(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); captureMultipleValues(state, params, false); Map current_stack_frame = peekAtStack(state); String field = params.getString("field"); EditItem item = null; String mode = (String) state.getAttribute(STATE_MODE); if (MODE_CREATE.equals(mode)) { int index = params.getInt("index"); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items != null) { item = (EditItem) new_items.get(index); } } else if(MODE_EDIT.equals(mode)) { item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); } if(item != null) { addInstance(field, item.getProperties()); } } // doInsertValue /** * Search a flat list of ResourcesMetadata properties for one whose localname matches "field". * If found and the field can have additional instances, increment the count for that item. * @param field * @param properties * @return true if the field is found, false otherwise. */ protected static boolean addInstance(String field, List properties) { Iterator propIt = properties.iterator(); boolean found = false; while(!found && propIt.hasNext()) { ResourcesMetadata property = (ResourcesMetadata) propIt.next(); if(field.equals(property.getDottedname())) { found = true; property.incrementCount(); } } return found; } public static void doAttachitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemId = params.getString("itemId"); Map current_stack_frame = peekAtStack(state); Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(itemId, state); } else { attachLink(itemId, state); } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); // popFromStack(state); // resetCurrentMode(state); } public static void doAttachupload(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1024 * 1024; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1024 * 1024; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1024 * 1024; } FileItem fileitem = null; try { fileitem = params.getFileItem("upload"); } catch(Exception e) { } if(fileitem == null) { // "The user submitted a file to upload but it was too big!" addAlert(state, rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { addAlert(state, rb.getString("choosefile7")); } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contentType = fileitem.getContentType(); if(bytes.length >= max_bytes) { addAlert(state, rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else if(bytes.length > 0) { // we just want the file name part - strip off any drive and path stuff String name = Validator.getFileName(filename); String resourceId = Validator.escapeResourceName(name); // make a set of properties to add for the new resource ResourcePropertiesEdit props = ContentHostingService.newResourceProperties(); props.addProperty(ResourceProperties.PROP_DISPLAY_NAME, name); props.addProperty(ResourceProperties.PROP_DESCRIPTION, filename); // make an attachment resource for this URL try { String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(resourceId, siteId, toolName, contentType, bytes, props); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } String containerId = ContentHostingService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), filename, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); //check -- jim state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.doAttachupload ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } else { addAlert(state, rb.getString("choosefile7")); } } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); //popFromStack(state); //resetCurrentMode(state); } // doAttachupload public static void doAttachurl(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); String url = params.getCleanString("url"); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, url); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, url); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); try { url = validateURL(url); byte[] newUrl = url.getBytes(); String newResourceId = Validator.escapeResourceName(url); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(newResourceId, siteId, toolName, ResourceProperties.TYPE_URL, newUrl, resourceProperties); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } String containerId = ContentHostingService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), url, containerId, accessUrl); item.setContentType(ResourceProperties.TYPE_URL); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } catch(MalformedURLException e) { // invalid url addAlert(state, rb.getString("validurl") + " \"" + url + "\" " + rb.getString("invalid")); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.doAttachurl ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT_INIT); // popFromStack(state); // resetCurrentMode(state); } public static void doRemoveitem(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = peekAtStack(state); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String itemId = params.getString("itemId"); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } AttachItem item = null; boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(found && item != null) { new_items.remove(item); List removed = (List) state.getAttribute(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = new Vector(); state.setAttribute(STATE_REMOVED_ATTACHMENTS, removed); } removed.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } } // doRemoveitem public static void doAddattachments(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } List removed = (List) current_stack_frame.get(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = (List) state.getAttribute(STATE_REMOVED_ATTACHMENTS); if(removed == null) { removed = new Vector(); } current_stack_frame.put(STATE_REMOVED_ATTACHMENTS, removed); } Iterator removeIt = removed.iterator(); while(removeIt.hasNext()) { AttachItem item = (AttachItem) removeIt.next(); try { if(ContentHostingService.isAttachmentResource(item.getId())) { ContentResourceEdit edit = ContentHostingService.editResource(item.getId()); ContentHostingService.removeResource(edit); ContentCollectionEdit coll = ContentHostingService.editCollection(item.getCollectionId()); ContentHostingService.removeCollection(coll); } } catch(Exception ignore) { // log failure } } state.removeAttribute(STATE_REMOVED_ATTACHMENTS); // add to the attachments vector List attachments = EntityManager.newReferenceList(); Iterator it = new_items.iterator(); while(it.hasNext()) { AttachItem item = (AttachItem) it.next(); try { Reference ref = EntityManager.newReference(ContentHostingService.getReference(item.getId())); attachments.add(ref); } catch(Exception e) { } } cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); // end up in main mode popFromStack(state); resetCurrentMode(state); current_stack_frame = peekAtStack(state); String field = null; // if there is at least one attachment if (attachments.size() > 0) { //check -- jim state.setAttribute(AttachmentAction.STATE_HAS_ATTACHMENT_BEFORE, Boolean.TRUE); if(current_stack_frame == null) { } else { field = (String) current_stack_frame.get(STATE_ATTACH_FORM_FIELD); } } if(field != null) { int index = 0; String fieldname = field; Matcher matcher = INDEXED_FORM_FIELD_PATTERN.matcher(field.trim()); if(matcher.matches()) { fieldname = matcher.group(0); index = Integer.parseInt(matcher.group(1)); } // we are trying to attach a link to a form field and there is at least one attachment if(new_items == null) { new_items = (List) current_stack_frame.get(ResourcesAction.STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(ResourcesAction.STATE_HELPER_NEW_ITEMS); } } EditItem edit_item = null; List edit_items = (List) current_stack_frame.get(ResourcesAction.STATE_STACK_CREATE_ITEMS); if(edit_items == null) { edit_item = (EditItem) current_stack_frame.get(ResourcesAction.STATE_STACK_EDIT_ITEM); } else { edit_item = (EditItem) edit_items.get(0); } if(edit_item != null) { Reference ref = (Reference) attachments.get(0); edit_item.setPropertyValue(fieldname, index, ref); } } } public static void attachItem(String itemId, SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { AttachItem item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(!found) { try { ContentResource res = contentService.getResource(itemId); ResourceProperties props = res.getProperties(); ResourcePropertiesEdit newprops = contentService.newResourceProperties(); newprops.set(props); byte[] bytes = res.getContent(); String contentType = res.getContentType(); String filename = Validator.getFileName(itemId); String resourceId = Validator.escapeResourceName(filename); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } ContentResource attachment = ContentHostingService.addAttachmentResource(resourceId, siteId, toolName, contentType, bytes, props); String displayName = newprops.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); String containerId = contentService.getContainingCollectionId (attachment.getId()); String accessUrl = attachment.getUrl(); AttachItem item = new AttachItem(attachment.getId(), displayName, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(IdInvalidException ignore) { // other exceptions should be caught earlier } catch(TypeException ignore) { // other exceptions should be caught earlier } catch(IdUnusedException ignore) { // other exceptions should be caught earlier } catch(IdUsedException ignore) { // other exceptions should be caught earlier } catch(InconsistentException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.attachItem ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } public static void attachLink(String itemId, SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = (List) state.getAttribute(STATE_HELPER_NEW_ITEMS); if(new_items == null) { new_items = new Vector(); } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } Integer max_cardinality = (Integer) current_stack_frame.get(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = (Integer) state.getAttribute(STATE_ATTACH_CARDINALITY); if(max_cardinality == null) { max_cardinality = CARDINALITY_MULTIPLE; } current_stack_frame.put(STATE_ATTACH_CARDINALITY, max_cardinality); } boolean found = false; Iterator it = new_items.iterator(); while(!found && it.hasNext()) { AttachItem item = (AttachItem) it.next(); if(item.getId().equals(itemId)) { found = true; } } if(!found) { try { ContentResource res = contentService.getResource(itemId); ResourceProperties props = res.getProperties(); String contentType = res.getContentType(); String filename = Validator.getFileName(itemId); String resourceId = Validator.escapeResourceName(filename); String siteId = ToolManager.getCurrentPlacement().getContext(); String toolName = (String) current_stack_frame.get(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = (String) state.getAttribute(STATE_ATTACH_TOOL_NAME); if(toolName == null) { toolName = ToolManager.getCurrentTool().getTitle(); } current_stack_frame.put(STATE_ATTACH_TOOL_NAME, toolName); } String displayName = props.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); String containerId = contentService.getContainingCollectionId (itemId); String accessUrl = res.getUrl(); AttachItem item = new AttachItem(itemId, displayName, containerId, accessUrl); item.setContentType(contentType); new_items.add(item); state.setAttribute(STATE_HELPER_CHANGED, Boolean.TRUE.toString()); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis4")); } catch(TypeException ignore) { // other exceptions should be caught earlier } catch(IdUnusedException ignore) { // other exceptions should be caught earlier } catch(RuntimeException e) { logger.warn("ResourcesAction.attachItem ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } current_stack_frame.put(STATE_HELPER_NEW_ITEMS, new_items); } /** * Add a new URL to ContentHosting for each EditItem in the state attribute named STATE_STACK_CREATE_ITEMS. * The number of items to be added is indicated by the state attribute named STATE_STACK_CREATE_NUMBER, and * the items are added to the collection identified by the state attribute named STATE_STACK_CREATE_COLLECTION_ID. * @param state */ protected static void createUrls(SessionState state) { Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } Map current_stack_frame = peekAtStack(state); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } int numberOfItems = 1; numberOfItems = number.intValue(); outerloop: for(int i = 0; i < numberOfItems; i++) { EditItem item = (EditItem) new_items.get(i); if(item.isBlank()) { continue; } ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); resourceProperties.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); resourceProperties.addProperty(ResourceProperties.PROP_IS_COLLECTION, Boolean.FALSE.toString()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); saveMetadata(resourceProperties, metadataGroups, item); byte[] newUrl = item.getFilename().getBytes(); String name = Validator.escapeResourceName(item.getName()); SortedSet groups = new TreeSet(item.getEntityGroupRefs()); groups.retainAll(item.getAllowedAddGroupRefs()); try { ContentResource resource = ContentHostingService.addResource (name, collectionId, MAXIMUM_ATTEMPTS_FOR_UNIQUENESS, item.getMimeType(), newUrl, resourceProperties, groups, item.getNotification()); item.setAdded(true); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(!preventPublicDisplay.booleanValue() && item.isPubview()) { ContentHostingService.setPubView(resource.getId(), true); } String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(resource.getId())); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { Object attach_links = current_stack_frame.get(STATE_ATTACH_LINKS); if(attach_links == null) { attach_links = state.getAttribute(STATE_ATTACH_LINKS); if(attach_links != null) { current_stack_frame.put(STATE_ATTACH_LINKS, attach_links); } } if(attach_links == null) { attachItem(resource.getId(), state); } else { attachLink(resource.getId(), state); } } } } catch(PermissionException e) { alerts.add(rb.getString("notpermis12")); continue outerloop; } catch(IdInvalidException e) { alerts.add(rb.getString("title") + " " + e.getMessage ()); continue outerloop; } catch(IdLengthException e) { alerts.add(rb.getString("toolong") + " " + e.getMessage()); continue outerloop; } catch(IdUniquenessException e) { alerts.add("Could not add this item to this folder"); continue outerloop; } catch(InconsistentException e) { alerts.add(RESOURCE_INVALID_TITLE_STRING); continue outerloop; } catch(OverQuotaException e) { alerts.add(rb.getString("overquota")); continue outerloop; } catch(ServerOverloadException e) { alerts.add(rb.getString("failed")); continue outerloop; } catch(RuntimeException e) { logger.warn("ResourcesAction.createFiles ***** Unknown Exception ***** " + e.getMessage()); alerts.add(rb.getString("failed")); continue outerloop; } } HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(!currentMap.containsKey(collectionId)) { try { currentMap.put (collectionId,ContentHostingService.getCollection (collectionId)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(collectionId, state); } catch (IdUnusedException ignore) { } catch (TypeException ignore) { } catch (PermissionException ignore) { } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); state.setAttribute(STATE_CREATE_ALERTS, alerts); } // createUrls /** * Build the context for creating folders and items */ public static String buildCreateContext (VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); // find the ContentTypeImage service context.put ("contentTypeImageService", state.getAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE)); context.put("TYPE_FOLDER", TYPE_FOLDER); context.put("TYPE_UPLOAD", TYPE_UPLOAD); context.put("TYPE_HTML", TYPE_HTML); context.put("TYPE_TEXT", TYPE_TEXT); context.put("TYPE_URL", TYPE_URL); context.put("TYPE_FORM", TYPE_FORM); context.put("SITE_ACCESS", AccessMode.SITE.toString()); context.put("GROUP_ACCESS", AccessMode.GROUPED.toString()); context.put("INHERITED_ACCESS", AccessMode.INHERITED.toString()); context.put("PUBLIC_ACCESS", PUBLIC_ACCESS); context.put("max_upload_size", state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE)); Map current_stack_frame = peekAtStack(state); String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } context.put("itemType", itemType); String collectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); if(collectionId == null || collectionId.trim().length() == 0) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } current_stack_frame.put(STATE_STACK_CREATE_COLLECTION_ID, collectionId); } context.put("collectionId", collectionId); String field = (String) current_stack_frame.get(STATE_ATTACH_FORM_FIELD); if(field == null) { field = (String) state.getAttribute(STATE_ATTACH_FORM_FIELD); if(field != null) { current_stack_frame.put(STATE_ATTACH_FORM_FIELD, field); state.removeAttribute(STATE_ATTACH_FORM_FIELD); } } String msg = (String) state.getAttribute(STATE_CREATE_MESSAGE); if (msg != null) { context.put("createAlertMessage", msg); state.removeAttribute(STATE_CREATE_MESSAGE); } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String encoding = data.getRequest().getCharacterEncoding(); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } context.put("new_items", new_items); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } context.put("numberOfItems", number); context.put("max_number", new Integer(CREATE_MAX_ITEMS)); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); context.put("homeCollectionId", homeCollectionId); List collectionPath = getCollectionPath(state); context.put ("collectionPath", collectionPath); if(homeCollectionId.equals(collectionId)) { context.put("atHome", Boolean.TRUE.toString()); } Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); List theGroupsInThisSite = new Vector(); for(int i = 0; i < CREATE_MAX_ITEMS; i++) { theGroupsInThisSite.add(groups.iterator()); } context.put("theGroupsInThisSite", theGroupsInThisSite); } String show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items == null) { show_form_items = (String) state.getAttribute(STATE_SHOW_FORM_ITEMS); if(show_form_items != null) { current_stack_frame.put(STATE_SHOW_FORM_ITEMS,show_form_items); } } if(show_form_items != null) { context.put("show_form_items", show_form_items); } // copyright copyrightChoicesIntoContext(state, context); // put schema for metadata into context metadataGroupsIntoContext(state, context); // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_RESOURCES.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { context.put("dropboxMode", Boolean.FALSE); } else if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // notshow the public option or notification when in dropbox mode context.put("dropboxMode", Boolean.TRUE); } context.put("siteTitle", state.getAttribute(STATE_SITE_TITLE)); /* Collection groups = ContentHostingService.getGroupsWithReadAccess(collectionId); if(! groups.isEmpty()) { context.put("siteHasGroups", Boolean.TRUE.toString()); context.put("theGroupsInThisSite", groups); } */ if(TYPE_FORM.equals(itemType)) { List listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); if(listOfHomes == null) { setupStructuredObjects(state); listOfHomes = (List) current_stack_frame.get(STATE_STRUCTOBJ_HOMES); } context.put("homes", listOfHomes); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } context.put("formtype", formtype); String rootname = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_ROOTNAME); context.put("rootname", rootname); context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("ENUM", ResourcesMetadata.WIDGET_ENUM); context.put("NESTED", ResourcesMetadata.WIDGET_NESTED); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); context.put("DOT", ResourcesMetadata.DOT); } Set missing = (Set) current_stack_frame.remove(STATE_CREATE_MISSING_ITEM); context.put("missing", missing); // String template = (String) getContext(data).get("template"); return TEMPLATE_CREATE; } /** * show the resource properties */ public static void doMore ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); Map current_stack_frame = pushOnStack(state); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // the hosted item ID String id = NULL_STRING; // the collection id String collectionId = NULL_STRING; try { id = params.getString ("id"); if (id!=null) { // set the collection/resource id for more context current_stack_frame.put(STATE_MORE_ID, id); } else { // get collection/resource id from the state object id =(String) current_stack_frame.get(STATE_MORE_ID); } collectionId = params.getString ("collectionId"); current_stack_frame.put(STATE_MORE_COLLECTION_ID, collectionId); if (collectionId.equals ((String) state.getAttribute(STATE_HOME_COLLECTION_ID))) { try { // this is a test to see if the collection exists. If not, it is created. ContentCollection collection = ContentHostingService.getCollection (collectionId); } catch (IdUnusedException e ) { try { // default copyright String mycopyright = (String) state.getAttribute (STATE_MY_COPYRIGHT); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); String homeCollectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, ContentHostingService.getProperties (homeCollectionId).getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME)); ContentCollection collection = ContentHostingService.addCollection (homeCollectionId, resourceProperties); } catch (IdUsedException ee) { addAlert(state, rb.getString("idused")); } catch (IdUnusedException ee) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (IdInvalidException ee) { addAlert(state, rb.getString("title") + " " + ee.getMessage ()); } catch (PermissionException ee) { addAlert(state, rb.getString("permisex")); } catch (InconsistentException ee) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } } catch (TypeException e ) { addAlert(state, rb.getString("typeex")); } catch (PermissionException e ) { addAlert(state, rb.getString("permisex")); } } } catch (NullPointerException eE) { addAlert(state," " + rb.getString("nullex") + " " + id + ". "); } EditItem item = getEditItem(id, collectionId, data); // is there no error? if (state.getAttribute(STATE_MESSAGE) == null) { // go to the more state state.setAttribute(STATE_MODE, MODE_MORE); } // if-else } // doMore /** * doDelete to delete the selected collection or resource items */ public void doDelete ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } ParameterParser params = data.getParameters (); List Items = (List) state.getAttribute(STATE_DELETE_ITEMS); // Vector deleteIds = (Vector) state.getAttribute (STATE_DELETE_IDS); // delete the lowest item in the hireachy first Hashtable deleteItems = new Hashtable(); // String collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); int maxDepth = 0; int depth = 0; Iterator it = Items.iterator(); while(it.hasNext()) { BrowseItem item = (BrowseItem) it.next(); depth = ContentHostingService.getDepth(item.getId(), item.getRoot()); if (depth > maxDepth) { maxDepth = depth; } List v = (List) deleteItems.get(new Integer(depth)); if(v == null) { v = new Vector(); } v.add(item); deleteItems.put(new Integer(depth), v); } boolean isCollection = false; for (int j=maxDepth; j>0; j--) { List v = (List) deleteItems.get(new Integer(j)); if (v==null) { v = new Vector(); } Iterator itemIt = v.iterator(); while(itemIt.hasNext()) { BrowseItem item = (BrowseItem) itemIt.next(); try { if (item.isFolder()) { ContentHostingService.removeCollection(item.getId()); } else { ContentHostingService.removeResource(item.getId()); } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis6") + " " + item.getName() + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("deleteres") + " " + item.getName() + " " + rb.getString("wrongtype")); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (InUseException e) { addAlert(state, rb.getString("deleteres") + " " + item.getName() + " " + rb.getString("locked")); }// try - catch catch(RuntimeException e) { logger.warn("ResourcesAction.doDelete ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // for } // for if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful state.setAttribute (STATE_MODE, MODE_LIST); if (((String) state.getAttribute (STATE_SELECT_ALL_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } } // if-else } // doDelete /** * doCancel to return to the previous state */ public static void doCancel ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); if(!isStackEmpty(state)) { Map current_stack_frame = peekAtStack(state); current_stack_frame.put(STATE_HELPER_CANCELED_BY_USER, Boolean.TRUE.toString()); popFromStack(state); } resetCurrentMode(state); } // doCancel /** * Paste the previously copied/cutted item(s) */ public void doHandlepaste ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the cut items to be pasted Vector pasteCutItems = (Vector) state.getAttribute (STATE_CUT_IDS); // get the copied items to be pasted Vector pasteCopiedItems = (Vector) state.getAttribute (STATE_COPIED_IDS); String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; // handle cut and paste if (((String) state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { for (int i = 0; i < pasteCutItems.size (); i++) { String currentPasteCutItem = (String) pasteCutItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteCutItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); /* if (Boolean.TRUE.toString().equals(properties.getProperty (ResourceProperties.PROP_IS_COLLECTION))) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { */ // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteCutItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteCutItem); String id = collectionId + Validator.escapeResourceName(p.getProperty(ResourceProperties.PROP_DISPLAY_NAME)); // cut-paste to the same collection? boolean cutPasteSameCollection = false; String displayName = p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); // till paste successfully or it fails ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if (!properties.isLiveProperty (propertyName)) { if (propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME)&&(displayName.length ()>0)) { resourceProperties.addProperty (propertyName, displayName); } else { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } // if-else } // if } // while try { // paste the cutted resource to the new collection - no notification ContentResource newResource = ContentHostingService.addResource (id, resource.getContentType (), resource.getContent (), resourceProperties, resource.getGroups(), NotificationService.NOTI_NONE); String uuid = ContentHostingService.getUuid(resource.getId()); ContentHostingService.setUuid(id, uuid); } catch (InconsistentException e) { addAlert(state,RESOURCE_INVALID_TITLE_STRING); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (IdInvalidException e) { addAlert(state, rb.getString("title") + " " + e.getMessage ()); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { // cut and paste to the same collection; stop adding new resource if (id.equals(currentPasteCutItem)) { cutPasteSameCollection = true; } else { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // pasted to the same folder as before; add "Copy of "/ "copy (n) of" to the id if (countNumber==1) { displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); } else { displayName = "Copy (" + countNumber + ") of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); } countNumber++; */ } } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (!cutPasteSameCollection) { // remove the cutted resource ContentHostingService.removeResource (currentPasteCutItem); } // } // if-else } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName + ". "); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis7") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // for } // cut // handling copy and paste if (Boolean.toString(true).equalsIgnoreCase((String) state.getAttribute (STATE_COPY_FLAG))) { for (int i = 0; i < pasteCopiedItems.size (); i++) { String currentPasteCopiedItem = (String) pasteCopiedItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteCopiedItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteCopiedItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteCopiedItem); String displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String id = collectionId + Validator.escapeResourceName(displayName); ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if (!properties.isLiveProperty (propertyName)) { if (propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME)&&(displayName.length ()>0)) { resourceProperties.addProperty (propertyName, displayName); } else { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } } } try { // paste the copied resource to the new collection ContentResource newResource = ContentHostingService.addResource (id, resource.getContentType (), resource.getContent (), resourceProperties, resource.getGroups(), NotificationService.NOTI_NONE); } catch (InconsistentException e) { addAlert(state,RESOURCE_INVALID_TITLE_STRING); } catch (IdInvalidException e) { addAlert(state,rb.getString("title") + " " + e.getMessage ()); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // copying // pasted to the same folder as before; add "Copy of " to the id if (countNumber > 1) { displayName = "Copy (" + countNumber + ") of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); } else if (countNumber == 1) { displayName = "Copy of " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); } id = collectionId + Validator.escapeResourceName(displayName); countNumber++; */ } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepaste ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch } // for } // copy if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful state.setAttribute (STATE_MODE, MODE_LIST); // reset the cut flag if (((String)state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_CUT_FLAG, Boolean.FALSE.toString()); } // reset the copy flag if (Boolean.toString(true).equalsIgnoreCase((String)state.getAttribute (STATE_COPY_FLAG))) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } } } // doHandlepaste /** * Paste the shortcut(s) of previously copied item(s) */ public void doHandlepasteshortcut ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the items to be pasted Vector pasteItems = new Vector (); if (((String) state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { pasteItems = (Vector) ( (Vector) state.getAttribute (STATE_COPIED_IDS)).clone (); } if (((String) state.getAttribute (STATE_CUT_FLAG)).equals (Boolean.TRUE.toString())) { addAlert(state, rb.getString("choosecp")); } if (state.getAttribute(STATE_MESSAGE) == null) { String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; for (int i = 0; i < pasteItems.size (); i++) { String currentPasteItem = (String) pasteItems.get (i); try { ResourceProperties properties = ContentHostingService.getProperties (currentPasteItem); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { // paste the collection } else { // paste the resource ContentResource resource = ContentHostingService.getResource (currentPasteItem); ResourceProperties p = ContentHostingService.getProperties(currentPasteItem); String displayName = SHORTCUT_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String id = collectionId + Validator.escapeResourceName(displayName); //int countNumber = 2; ResourcePropertiesEdit resourceProperties = ContentHostingService.newResourceProperties (); // add the properties of the pasted item Iterator propertyNames = properties.getPropertyNames (); while ( propertyNames.hasNext ()) { String propertyName = (String) propertyNames.next (); if ((!properties.isLiveProperty (propertyName)) && (!propertyName.equals (ResourceProperties.PROP_DISPLAY_NAME))) { resourceProperties.addProperty (propertyName, properties.getProperty (propertyName)); } } // %%%%% should be _blank for items that can be displayed in browser, _self for others // resourceProperties.addProperty (ResourceProperties.PROP_OPEN_NEWWINDOW, "_self"); resourceProperties.addProperty (ResourceProperties.PROP_DISPLAY_NAME, displayName); try { ContentResource referedResource= ContentHostingService.getResource (currentPasteItem); ContentResource newResource = ContentHostingService.addResource (id, ResourceProperties.TYPE_URL, referedResource.getUrl().getBytes (), resourceProperties, referedResource.getGroups(), NotificationService.NOTI_NONE); } catch (InconsistentException e) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch (IdInvalidException e) { addAlert(state, rb.getString("title") + " " + e.getMessage ()); } catch (ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + id + rb.getString("used2")); /* // pasted shortcut to the same folder as before; add countNumber to the id displayName = "Shortcut (" + countNumber + ") to " + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); id = collectionId + Validator.escapeResourceName(displayName); countNumber++; */ } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doHandlepasteshortcut ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis9") + " " + currentPasteItem.substring (currentPasteItem.lastIndexOf (Entity.SEPARATOR)+1) + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + currentPasteItem.substring (currentPasteItem.lastIndexOf (Entity.SEPARATOR)+1) + " " + rb.getString("mismatch")); } // try-catch } // for } if (state.getAttribute(STATE_MESSAGE) == null) { if (((String) state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { // reset the copy flag state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // paste shortcut sucessful state.setAttribute (STATE_MODE, MODE_LIST); } } // doHandlepasteshortcut /** * Edit the editable collection/resource properties */ public static void doEdit ( RunData data ) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Map current_stack_frame = pushOnStack(state); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } String id = NULL_STRING; id = params.getString ("id"); if(id == null || id.length() == 0) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile2")); return; } current_stack_frame.put(STATE_STACK_EDIT_ID, id); String collectionId = (String) params.getString("collectionId"); if(collectionId == null) { collectionId = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); state.setAttribute(STATE_HOME_COLLECTION_ID, collectionId); } current_stack_frame.put(STATE_STACK_EDIT_COLLECTION_ID, collectionId); EditItem item = getEditItem(id, collectionId, data); if (state.getAttribute(STATE_MESSAGE) == null) { // got resource and sucessfully populated item with values // state.setAttribute (STATE_MODE, MODE_EDIT); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_EDIT_ITEM_INIT); state.setAttribute(STATE_EDIT_ALERTS, new HashSet()); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); } else { popFromStack(state); } } // doEdit public static EditItem getEditItem(String id, String collectionId, RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Stack operations_stack = (Stack) state.getAttribute(STATE_SUSPENDED_OPERATIONS_STACK); Map current_stack_frame = peekAtStack(state); EditItem item = null; // populate an EditItem object with values from the resource and return the EditItem try { ResourceProperties properties = ContentHostingService.getProperties(id); boolean isCollection = false; try { isCollection = properties.getBooleanProperty(ResourceProperties.PROP_IS_COLLECTION); } catch(Exception e) { // assume isCollection is false if property is not set } ContentEntity entity = null; String itemType = ""; byte[] content = null; if(isCollection) { itemType = "folder"; entity = ContentHostingService.getCollection(id); } else { entity = ContentHostingService.getResource(id); itemType = ((ContentResource) entity).getContentType(); content = ((ContentResource) entity).getContent(); } String itemName = properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); item = new EditItem(id, itemName, itemType); item.setInDropbox(ContentHostingService.isInDropbox(id)); BasicRightsAssignment rightsObj = new BasicRightsAssignment(item.getItemNum(), properties); item.setRights(rightsObj); String encoding = data.getRequest().getCharacterEncoding(); if(encoding != null) { item.setEncoding(encoding); } String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } item.setCopyrightStatus(defaultCopyrightStatus); if(content != null) { item.setContent(content); } String dummyId = collectionId.trim(); if(dummyId.endsWith(Entity.SEPARATOR)) { dummyId += "dummy"; } else { dummyId += Entity.SEPARATOR + "dummy"; } String containerId = ContentHostingService.getContainingCollectionId (id); item.setContainer(containerId); boolean canRead = ContentHostingService.allowGetCollection(id); boolean canAddFolder = ContentHostingService.allowAddCollection(id); boolean canAddItem = ContentHostingService.allowAddResource(id); boolean canDelete = ContentHostingService.allowRemoveResource(id); boolean canRevise = ContentHostingService.allowUpdateResource(id); item.setCanRead(canRead); item.setCanRevise(canRevise); item.setCanAddItem(canAddItem); item.setCanAddFolder(canAddFolder); item.setCanDelete(canDelete); // item.setIsUrl(isUrl); AccessMode access = ((GroupAwareEntity) entity).getAccess(); if(access == null) { item.setAccess(AccessMode.INHERITED.toString()); } else { item.setAccess(access.toString()); } AccessMode inherited_access = ((GroupAwareEntity) entity).getInheritedAccess(); if(inherited_access == null || inherited_access.equals(AccessMode.SITE)) { item.setInheritedAccess(AccessMode.INHERITED.toString()); } else { item.setInheritedAccess(inherited_access.toString()); } Site site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); Collection site_groups = site.getGroups(); item.setAllSiteGroups(site_groups); List access_groups = new Vector(((GroupAwareEntity) entity).getGroups()); item.setEntityGroupRefs(access_groups); // if(access_groups != null) // { // // Iterator it = access_groups.iterator(); // while(it.hasNext()) // { // String groupRef = (String) it.next(); // Group group = site.getGroup(groupRef); // item.addGroup(group.getId()); // } // } List inherited_access_groups = new Vector(((GroupAwareEntity) entity).getInheritedGroups()); item.setInheritedGroupRefs(inherited_access_groups); // if(inherited_access_groups != null) // { // Iterator it = inherited_access_groups.iterator(); // while(it.hasNext()) // { // String groupRef = (String) it.next(); // Group group = site.getGroup(groupRef); // item.addInheritedGroup(group.getId()); // } // } Collection allowedRemoveGroups = null; if(AccessMode.GROUPED == access) { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(id); Collection more = ContentHostingService.getGroupsWithRemovePermission(collectionId); if(more != null && ! more.isEmpty()) { allowedRemoveGroups.addAll(more); } } else if(AccessMode.GROUPED == inherited_access) { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(collectionId); } else { allowedRemoveGroups = ContentHostingService.getGroupsWithRemovePermission(ContentHostingService.getSiteCollection(site.getId())); } item.setAllowedRemoveGroupRefs(allowedRemoveGroups); Collection allowedAddGroups = null; if(AccessMode.GROUPED == access) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(id); Collection more = ContentHostingService.getGroupsWithAddPermission(collectionId); if(more != null && ! more.isEmpty()) { allowedAddGroups.addAll(more); } } else if(AccessMode.GROUPED == inherited_access) { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(collectionId); } else { allowedAddGroups = ContentHostingService.getGroupsWithAddPermission(ContentHostingService.getSiteCollection(site.getId())); } item.setAllowedAddGroupRefs(allowedAddGroups); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(preventPublicDisplay.booleanValue()) { item.setPubviewPossible(false); item.setPubviewInherited(false); item.setPubview(false); } else { item.setPubviewPossible(true); // find out about pubview boolean pubviewset = ContentHostingService.isInheritingPubView(id); item.setPubviewInherited(pubviewset); boolean pubview = pubviewset; if (!pubviewset) { pubview = ContentHostingService.isPubView(id); item.setPubview(pubview); } } if(item.isUrl()) { String url = new String(content); item.setFilename(url); } else if(item.isStructuredArtifact()) { String formtype = properties.getProperty(ResourceProperties.PROP_STRUCTOBJ_TYPE); current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); setupStructuredObjects(state); Document doc = Xml.readDocumentFromString(new String(content)); Element root = doc.getDocumentElement(); importStructuredArtifact(root, item.getForm()); List flatList = item.getForm().getFlatList(); item.setProperties(flatList); } else if(item.isHtml() || item.isPlaintext() || item.isFileUpload()) { String filename = properties.getProperty(ResourceProperties.PROP_ORIGINAL_FILENAME); if(filename == null) { // this is a hack to deal with the fact that original filenames were not saved for some time. if(containerId != null && item.getId().startsWith(containerId) && containerId.length() < item.getId().length()) { filename = item.getId().substring(containerId.length()); } } if(filename == null) { item.setFilename(itemName); } else { item.setFilename(filename); } } String description = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); item.setDescription(description); try { Time creTime = properties.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTime = creTime.toStringLocalShortDate() + " " + creTime.toStringLocalShort(); item.setCreatedTime(createdTime); } catch(Exception e) { String createdTime = properties.getProperty(ResourceProperties.PROP_CREATION_DATE); item.setCreatedTime(createdTime); } try { String createdBy = getUserProperty(properties, ResourceProperties.PROP_CREATOR).getDisplayName(); item.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = properties.getProperty(ResourceProperties.PROP_CREATOR); item.setCreatedBy(createdBy); } try { Time modTime = properties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTime = modTime.toStringLocalShortDate() + " " + modTime.toStringLocalShort(); item.setModifiedTime(modifiedTime); } catch(Exception e) { String modifiedTime = properties.getProperty(ResourceProperties.PROP_MODIFIED_DATE); item.setModifiedTime(modifiedTime); } try { String modifiedBy = getUserProperty(properties, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); item.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = properties.getProperty(ResourceProperties.PROP_MODIFIED_BY); item.setModifiedBy(modifiedBy); } String url = ContentHostingService.getUrl(id); item.setUrl(url); String size = ""; if(properties.getProperty(ResourceProperties.PROP_CONTENT_LENGTH) != null) { size = properties.getPropertyFormatted(ResourceProperties.PROP_CONTENT_LENGTH) + " (" + Validator.getFileSizeWithDividor(properties.getProperty(ResourceProperties.PROP_CONTENT_LENGTH)) +" bytes)"; } item.setSize(size); String copyrightStatus = properties.getProperty(properties.getNamePropCopyrightChoice()); if(copyrightStatus == null || copyrightStatus.trim().equals("")) { copyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); } item.setCopyrightStatus(copyrightStatus); String copyrightInfo = properties.getPropertyFormatted(properties.getNamePropCopyright()); item.setCopyrightInfo(copyrightInfo); String copyrightAlert = properties.getProperty(properties.getNamePropCopyrightAlert()); if("true".equalsIgnoreCase(copyrightAlert)) { item.setCopyrightAlert(true); } else { item.setCopyrightAlert(false); } Map metadata = new Hashtable(); List groups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(groups != null && ! groups.isEmpty()) { Iterator it = groups.iterator(); while(it.hasNext()) { MetadataGroup group = (MetadataGroup) it.next(); Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String name = prop.getFullname(); String widget = prop.getWidget(); if(widget.equals(ResourcesMetadata.WIDGET_DATE) || widget.equals(ResourcesMetadata.WIDGET_DATETIME) || widget.equals(ResourcesMetadata.WIDGET_TIME)) { Time time = TimeService.newTime(); try { time = properties.getTimeProperty(name); } catch(Exception ignore) { // use "now" as default in that case } metadata.put(name, time); } else { String value = properties.getPropertyFormatted(name); metadata.put(name, value); } } } item.setMetadata(metadata); } else { item.setMetadata(new Hashtable()); } // for collections only if(item.isFolder()) { // setup for quota - ADMIN only, site-root collection only if (SecurityService.isSuperUser()) { Reference ref = EntityManager.newReference(entity.getReference()); String context = ref.getContext(); String siteCollectionId = ContentHostingService.getSiteCollection(context); if(siteCollectionId.equals(entity.getId())) { item.setCanSetQuota(true); try { long quota = properties.getLongProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); item.setHasQuota(true); item.setQuota(Long.toString(quota)); } catch (Exception any) { } } } } } catch (IdUnusedException e) { addAlert(state, RESOURCE_NOT_EXIST_STRING); } catch (PermissionException e) { addAlert(state, rb.getString("notpermis2") + " " + id + ". " ); } catch(TypeException e) { addAlert(state," " + rb.getString("typeex") + " " + id); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch(RuntimeException e) { logger.warn("ResourcesAction.doEdit ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } return item; } /** * This method updates the session state with information needed to create or modify * structured artifacts in the resources tool. Among other things, it obtains a list * of "forms" available to the user and places that list in state indexed as * "STATE_STRUCTOBJ_HOMES". If the current formtype is known (in state indexed as * "STATE_STACK_STRUCTOBJ_TYPE"), the list of properties associated with that form type is * generated. If we are in a "create" context, the properties are added to each of * the items in the list of items indexed as "STATE_STACK_CREATE_ITEMS". If we are in an * "edit" context, the properties are added to the current item being edited (a state * attribute indexed as "STATE_STACK_EDIT_ITEM"). The metaobj SchemaBean associated with * the current form and its root SchemaNode object are also placed in state for later * reference. */ public static void setupStructuredObjects(SessionState state) { Map current_stack_frame = peekAtStack(state); String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } HomeFactory factory = (HomeFactory) ComponentManager.get("homeFactory"); Map homes = factory.getHomes(StructuredArtifactHomeInterface.class); List listOfHomes = new Vector(); Iterator it = homes.keySet().iterator(); while(it.hasNext()) { String key = (String) it.next(); try { Object obj = homes.get(key); listOfHomes.add(obj); } catch(Exception ignore) {} } current_stack_frame.put(STATE_STRUCTOBJ_HOMES, listOfHomes); StructuredArtifactHomeInterface home = null; SchemaBean rootSchema = null; ResourcesMetadata elements = null; if(formtype == null || formtype.equals("")) { formtype = ""; current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } else if(listOfHomes.isEmpty()) { // hmmm } else { try { home = (StructuredArtifactHomeInterface) factory.getHome(formtype); } catch(NullPointerException ignore) { home = null; } } if(home != null) { rootSchema = new SchemaBean(home.getRootNode(), home.getSchema(), formtype, home.getType().getDescription()); List fields = rootSchema.getFields(); String docRoot = rootSchema.getFieldName(); elements = new ResourcesMetadata("", docRoot, "", "", ResourcesMetadata.WIDGET_NESTED, ResourcesMetadata.WIDGET_NESTED); elements.setDottedparts(docRoot); elements.setContainer(null); elements = createHierarchicalList(elements, fields, 1); String instruction = home.getInstruction(); current_stack_frame.put(STATE_STACK_STRUCTOBJ_ROOTNAME, docRoot); current_stack_frame.put(STATE_STACK_STRUCT_OBJ_SCHEMA, rootSchema); List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items != null) { Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } List flatList = elements.getFlatList(); for(int i = 0; i < number.intValue(); i++) { //%%%%% doing this wipes out data that's been stored previously EditItem item = (EditItem) new_items.get(i); item.setRootname(docRoot); item.setFormtype(formtype); item.setInstruction(instruction); item.setProperties(flatList); item.setForm(elements); } current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } else if(current_stack_frame.get(STATE_STACK_EDIT_ITEM) != null) { EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); item.setRootname(docRoot); item.setFormtype(formtype); item.setInstruction(instruction); item.setForm(elements); } } } // setupStructuredArtifacts /** * This method navigates through a list of SchemaNode objects representing fields in a form, * creates a ResourcesMetadata object for each field and adds those as nested fields within * a root element. If a field contains nested fields, a recursive call adds nested fields * in the corresponding ResourcesMetadata object. * @param element The root element to which field descriptions are added. * @param fields A list of metaobj SchemaNode objects. * @param depth The depth of nesting, corresponding to the amount of indent that will be used * when displaying the list. * @return The update root element. */ private static ResourcesMetadata createHierarchicalList(ResourcesMetadata element, List fields, int depth) { List properties = new Vector(); for(Iterator fieldIt = fields.iterator(); fieldIt.hasNext(); ) { SchemaBean field = (SchemaBean) fieldIt.next(); SchemaNode node = field.getSchema(); Map annotations = field.getAnnotations(); Pattern pattern = null; String localname = field.getFieldName(); String description = field.getDescription(); String label = (String) annotations.get("label"); if(label == null || label.trim().equals("")) { label = description; } String richText = (String) annotations.get("isRichText"); boolean isRichText = richText != null && richText.equalsIgnoreCase(Boolean.TRUE.toString()); Class javaclass = node.getObjectType(); String typename = javaclass.getName(); String widget = ResourcesMetadata.WIDGET_STRING; int length = 0; List enumerals = null; if(field.getFields().size() > 0) { widget = ResourcesMetadata.WIDGET_NESTED; } else if(node.hasEnumerations()) { enumerals = node.getEnumeration(); typename = String.class.getName(); widget = ResourcesMetadata.WIDGET_ENUM; } else if(typename.equals(String.class.getName())) { length = node.getType().getMaxLength(); String baseType = node.getType().getBaseType(); if(isRichText) { widget = ResourcesMetadata.WIDGET_WYSIWYG; } else if(baseType.trim().equalsIgnoreCase(ResourcesMetadata.NAMESPACE_XSD_ABBREV + ResourcesMetadata.XSD_NORMALIZED_STRING)) { widget = ResourcesMetadata.WIDGET_STRING; if(length > 50) { length = 50; } } else if(length > 100 || length < 1) { widget = ResourcesMetadata.WIDGET_TEXTAREA; } else if(length > 50) { length = 50; } pattern = node.getType().getPattern(); } else if(typename.equals(Date.class.getName())) { widget = ResourcesMetadata.WIDGET_DATE; } else if(typename.equals(Boolean.class.getName())) { widget = ResourcesMetadata.WIDGET_BOOLEAN; } else if(typename.equals(URI.class.getName())) { widget = ResourcesMetadata.WIDGET_ANYURI; } else if(typename.equals(Number.class.getName())) { widget = ResourcesMetadata.WIDGET_INTEGER; //length = node.getType().getTotalDigits(); length = INTEGER_WIDGET_LENGTH; } else if(typename.equals(Double.class.getName())) { widget = ResourcesMetadata.WIDGET_DOUBLE; length = DOUBLE_WIDGET_LENGTH; } int minCard = node.getMinOccurs(); int maxCard = node.getMaxOccurs(); if(maxCard < 1) { maxCard = Integer.MAX_VALUE; } if(minCard < 0) { minCard = 0; } minCard = java.lang.Math.max(0,minCard); maxCard = java.lang.Math.max(1,maxCard); int currentCount = java.lang.Math.min(java.lang.Math.max(1,minCard),maxCard); ResourcesMetadata prop = new ResourcesMetadata(element.getDottedname(), localname, label, description, typename, widget); List parts = new Vector(element.getDottedparts()); parts.add(localname); prop.setDottedparts(parts); prop.setContainer(element); if(ResourcesMetadata.WIDGET_NESTED.equals(widget)) { prop = createHierarchicalList(prop, field.getFields(), depth + 1); } prop.setMinCardinality(minCard); prop.setMaxCardinality(maxCard); prop.setCurrentCount(currentCount); prop.setDepth(depth); if(enumerals != null) { prop.setEnumeration(enumerals); } if(length > 0) { prop.setLength(length); } if(pattern != null) { prop.setPattern(pattern); } properties.add(prop); } element.setNested(properties); return element; } // createHierarchicalList /** * This method captures property values from an org.w3c.dom.Document and inserts them * into a hierarchical list of ResourcesMetadata objects which describes the structure * of the form. The values are added by inserting nested instances into the properties. * * @param element An org.w3c.dom.Element containing values to be imported. * @param properties A hierarchical list of ResourcesMetadata objects describing a form */ public static void importStructuredArtifact(Node node, ResourcesMetadata property) { if(property == null || node == null) { return; } String tagname = property.getLocalname(); String nodename = node.getLocalName(); if(! tagname.equals(nodename)) { // return; } if(property.getNested().size() == 0) { boolean value_found = false; Node child = node.getFirstChild(); while(! value_found && child != null) { if(child.getNodeType() == Node.TEXT_NODE) { Text value = (Text) child; if(ResourcesMetadata.WIDGET_DATE.equals(property.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(property.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(property.getWidget())) { SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd"); Time time = TimeService.newTime(); try { Date date = df.parse(value.getData()); time = TimeService.newTime(date.getTime()); } catch(Exception ignore) { // use "now" as default in that case } property.setValue(0, time); } else if(ResourcesMetadata.WIDGET_ANYURI.equals(property.getWidget())) { Reference ref = EntityManager.newReference(ContentHostingService.getReference(value.getData())); property.setValue(0, ref); } else { property.setValue(0, value.getData()); } } child = child.getNextSibling(); } } else if(node instanceof Element) { // a nested element Iterator nestedIt = property.getNested().iterator(); while(nestedIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) nestedIt.next(); NodeList nodes = ((Element) node).getElementsByTagName(prop.getLocalname()); if(nodes == null) { continue; } for(int i = 0; i < nodes.getLength(); i++) { Node n = nodes.item(i); if(n != null) { ResourcesMetadata instance = prop.addInstance(); if(instance != null) { importStructuredArtifact(n, instance); } } } } } } // importStructuredArtifact protected static String validateURL(String url) throws MalformedURLException { if (url.equals (NULL_STRING)) { // ignore the empty url field } else if (url.indexOf ("://") == -1) { // if it's missing the transport, add http:// url = "http://" + url; } if(!url.equals(NULL_STRING)) { // valid protocol? try { // test to see if the input validates as a URL. // Checks string for format only. URL u = new URL(url); } catch (MalformedURLException e1) { try { Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { // if URL has "unknown" protocol, check remaider with // "http" protocol and accept input it that validates. URL test = new URL("http://" + matcher.group(2)); } else { throw e1; } } catch (MalformedURLException e2) { throw e1; } } } return url; } /** * Retrieve values for an item from edit context. Edit context contains just one item at a time of a known type * (folder, file, text document, structured-artifact, etc). This method retrieves the data apppropriate to the * type and updates the values of the EditItem stored as the STATE_STACK_EDIT_ITEM attribute in state. * @param state * @param params * @param item */ protected static void captureValues(SessionState state, ParameterParser params) { Map current_stack_frame = peekAtStack(state); EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); Set alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_EDIT_ALERTS, alerts); } String flow = params.getString("flow"); boolean intentChanged = "intentChanged".equals(flow); String check_fileName = params.getString("check_fileName"); boolean expectFile = "true".equals(check_fileName); String intent = params.getString("intent"); String oldintent = (String) current_stack_frame.get(STATE_STACK_EDIT_INTENT); boolean upload_file = expectFile && item.isFileUpload() || ((item.isHtml() || item.isPlaintext()) && !intentChanged && INTENT_REPLACE_FILE.equals(intent) && INTENT_REPLACE_FILE.equals(oldintent)); boolean revise_file = (item.isHtml() || item.isPlaintext()) && !intentChanged && INTENT_REVISE_FILE.equals(intent) && INTENT_REVISE_FILE.equals(oldintent); String name = params.getString("name"); if(name == null || "".equals(name.trim())) { alerts.add(rb.getString("titlenotnull")); // addAlert(state, rb.getString("titlenotnull")); } else { item.setName(name.trim()); } String description = params.getString("description"); if(description == null) { item.setDescription(""); } else { item.setDescription(description); } item.setContentHasChanged(false); if(upload_file) { String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() >= max_bytes) { alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else */ { // check for file replacement FileItem fileitem = params.getFileItem("fileName"); if(fileitem == null) { // "The user submitted a file to upload but it was too big!" alerts.clear(); alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); //item.setMissing("fileName"); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { if(item.getContent() == null || item.getContent().length <= 0) { // "The user submitted the form, but didn't select a file to upload!" alerts.add(rb.getString("choosefile") + ". "); //item.setMissing("fileName"); } } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contenttype = fileitem.getContentType(); if(bytes.length >= max_bytes) { alerts.clear(); alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); // item.setMissing("fileName"); } else if(bytes.length > 0) { item.setContent(bytes); item.setContentHasChanged(true); item.setMimeType(contenttype); item.setFilename(filename); } } } } else if(revise_file) { // check for input from editor (textarea) String content = params.getString("content"); if(content != null) { item.setContent(content); item.setContentHasChanged(true); } } else if(item.isUrl()) { String url = params.getString("Url"); if(url == null || url.trim().equals("")) { item.setFilename(""); alerts.add(rb.getString("validurl")); } else { // valid protocol? item.setFilename(url); try { // test format of input URL u = new URL(url); } catch (MalformedURLException e1) { try { // if URL did not validate, check whether the problem was an // unrecognized protocol, and accept input if that's the case. Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { URL test = new URL("http://" + matcher.group(2)); } else { url = "http://" + url; URL test = new URL(url); item.setFilename(url); } } catch (MalformedURLException e2) { // invalid url alerts.add(rb.getString("validurl")); } } } } else if(item.isFolder()) { if(item.canSetQuota()) { // read the quota fields String setQuota = params.getString("setQuota"); boolean hasQuota = params.getBoolean("hasQuota"); item.setHasQuota(hasQuota); if(hasQuota) { int q = params.getInt("quota"); item.setQuota(Integer.toString(q)); } } } else if(item.isStructuredArtifact()) { String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } String formtype_check = params.getString("formtype"); if(formtype_check == null || formtype_check.equals("")) { alerts.add(rb.getString("type")); item.setMissing("formtype"); } else if(formtype_check.equals(formtype)) { item.setFormtype(formtype); capturePropertyValues(params, item, item.getProperties()); } } if(! item.isFolder() && ! item.isStructuredArtifact() && ! item.isUrl()) { String mime_category = params.getString("mime_category"); String mime_subtype = params.getString("mime_subtype"); if(mime_category != null && mime_subtype != null) { String mimetype = mime_category + "/" + mime_subtype; if(! mimetype.equals(item.getMimeType())) { item.setMimeType(mimetype); item.setContentTypeHasChanged(true); } } } if(item.isFileUpload() || item.isHtml() || item.isPlaintext()) { BasicRightsAssignment rightsObj = item.getRights(); rightsObj.captureValues(params); boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnership = params.getString("ccOwnership"); if(ccOwnership != null) { item.setRightsownership(ccOwnership); } String ccTerms = params.getString("ccTerms"); if(ccTerms != null) { item.setLicense(ccTerms); } String ccCommercial = params.getString("ccCommercial"); if(ccCommercial != null) { item.setAllowCommercial(ccCommercial); } String ccModification = params.getString("ccModification"); if(ccCommercial != null) { item.setAllowModifications(ccModification); } String ccRightsYear = params.getString("ccRightsYear"); if(ccRightsYear != null) { item.setRightstyear(ccRightsYear); } String ccRightsOwner = params.getString("ccRightsOwner"); if(ccRightsOwner != null) { item.setRightsowner(ccRightsOwner); } /* ccValues.ccOwner = new Array(); ccValues.myRights = new Array(); ccValues.otherRights = new Array(); ccValues.ccCommercial = new Array(); ccValues.ccModifications = new Array(); ccValues.ccRightsYear = new Array(); ccValues.ccRightsOwner = new Array(); */ } else { // check for copyright status // check for copyright info // check for copyright alert String copyrightStatus = StringUtil.trimToNull(params.getString ("copyrightStatus")); String copyrightInfo = StringUtil.trimToNull(params.getCleanString ("copyrightInfo")); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert")); if (copyrightStatus != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyrightStatus.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (copyrightInfo != null) { item.setCopyrightInfo( copyrightInfo ); } else { alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyrightStatus.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { item.setCopyrightInfo((String) state.getAttribute (STATE_MY_COPYRIGHT)); } item.setCopyrightStatus( copyrightStatus ); } item.setCopyrightAlert(copyrightAlert != null); } } if(! RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } String access_mode = params.getString("access_mode"); if(access_mode == null || AccessMode.GROUPED.toString().equals(access_mode)) { // we inherit more than one group and must check whether group access changes at this item String[] access_groups = params.getStrings("access_groups"); SortedSet new_groups = new TreeSet(); if(access_groups != null) { new_groups.addAll(Arrays.asList(access_groups)); } new_groups = item.convertToRefs(new_groups); Collection inh_grps = item.getInheritedGroupRefs(); boolean groups_are_inherited = (new_groups.size() == inh_grps.size()) && inh_grps.containsAll(new_groups); if(groups_are_inherited) { new_groups.clear(); item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.INHERITED.toString()); } else { item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.GROUPED.toString()); } item.setPubview(false); } else if(PUBLIC_ACCESS.equals(access_mode)) { if(! preventPublicDisplay.booleanValue() && ! item.isPubviewInherited()) { item.setPubview(true); item.setAccess(AccessMode.INHERITED.toString()); } } else if(AccessMode.INHERITED.toString().equals(access_mode)) { item.setAccess(AccessMode.INHERITED.toString()); item.clearGroups(); item.setPubview(false); } } int noti = NotificationService.NOTI_NONE; // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // set noti to none if in dropbox mode noti = NotificationService.NOTI_NONE; } else { // read the notification options String notification = params.getString("notify"); if ("r".equals(notification)) { noti = NotificationService.NOTI_REQUIRED; } else if ("o".equals(notification)) { noti = NotificationService.NOTI_OPTIONAL; } } item.setNotification(noti); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { Iterator groupIt = metadataGroups.iterator(); while(groupIt.hasNext()) { MetadataGroup group = (MetadataGroup) groupIt.next(); if(group.isShowing()) { Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String propname = prop.getFullname(); if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_year", year); month = params.getInt(propname + "_month", month); day = params.getInt(propname + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_hour", hour); minute = params.getInt(propname + "_minute", minute); second = params.getInt(propname + "_second", second); millisecond = params.getInt(propname + "_millisecond", millisecond); ampm = params.getString(propname + "_ampm").trim(); if("pm".equalsIgnoreCase("ampm")) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); item.setMetadataItem(propname,value); } else { String value = params.getString(propname); if(value != null) { item.setMetadataItem(propname, value); } } } } } } current_stack_frame.put(STATE_STACK_EDIT_ITEM, item); state.setAttribute(STATE_EDIT_ALERTS, alerts); } // captureValues /** * Retrieve from an html form all the values needed to create a new resource * @param item The EditItem object in which the values are temporarily stored. * @param index The index of the item (used as a suffix in the name of the form element) * @param state * @param params * @param markMissing Indicates whether to mark required elements if they are missing. * @return */ public static Set captureValues(EditItem item, int index, SessionState state, ParameterParser params, boolean markMissing) { Map current_stack_frame = peekAtStack(state); Set item_alerts = new HashSet(); boolean blank_entry = true; item.clearMissing(); String name = params.getString("name" + index); if(name == null || name.trim().equals("")) { if(markMissing) { item_alerts.add(rb.getString("titlenotnull")); item.setMissing("name"); } item.setName(""); // addAlert(state, rb.getString("titlenotnull")); } else { item.setName(name); blank_entry = false; } String description = params.getString("description" + index); if(description == null || description.trim().equals("")) { item.setDescription(""); } else { item.setDescription(description); blank_entry = false; } item.setContentHasChanged(false); if(item.isFileUpload()) { String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() >= max_bytes) { item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); } else */ { // check for file replacement FileItem fileitem = null; try { fileitem = params.getFileItem("fileName" + index); } catch(Exception e) { // this is an error in Firefox, Mozilla and Netscape // "The user didn't select a file to upload!" if(item.getContent() == null || item.getContent().length <= 0) { item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } if(fileitem == null) { // "The user submitted a file to upload but it was too big!" item_alerts.clear(); item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); item.setMissing("fileName"); } else if (fileitem.getFileName() == null || fileitem.getFileName().length() == 0) { if(item.getContent() == null || item.getContent().length <= 0) { // "The user submitted the form, but didn't select a file to upload!" item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } else if (fileitem.getFileName().length() > 0) { String filename = Validator.getFileName(fileitem.getFileName()); byte[] bytes = fileitem.get(); String contenttype = fileitem.getContentType(); if(bytes.length >= max_bytes) { item_alerts.clear(); item_alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); item.setMissing("fileName"); } else if(bytes.length > 0) { item.setContent(bytes); item.setContentHasChanged(true); item.setMimeType(contenttype); item.setFilename(filename); blank_entry = false; } else { item_alerts.add(rb.getString("choosefile") + " " + (index + 1) + ". "); item.setMissing("fileName"); } } } } else if(item.isPlaintext()) { // check for input from editor (textarea) String content = params.getString("content" + index); if(content != null) { item.setContentHasChanged(true); item.setContent(content); blank_entry = false; } item.setMimeType(MIME_TYPE_DOCUMENT_PLAINTEXT); } else if(item.isHtml()) { // check for input from editor (textarea) String content = params.getCleanString("content" + index); StringBuffer alertMsg = new StringBuffer(); content = FormattedText.processHtmlDocument(content, alertMsg); if (alertMsg.length() > 0) { item_alerts.add(alertMsg.toString()); } if(content != null && !content.equals("")) { item.setContent(content); item.setContentHasChanged(true); blank_entry = false; } item.setMimeType(MIME_TYPE_DOCUMENT_HTML); } else if(item.isUrl()) { item.setMimeType(ResourceProperties.TYPE_URL); String url = params.getString("Url" + index); if(url == null || url.trim().equals("")) { item.setFilename(""); item_alerts.add(rb.getString("specifyurl")); item.setMissing("Url"); } else { item.setFilename(url); blank_entry = false; // is protocol supplied and, if so, is it recognized? try { // check format of input URL u = new URL(url); } catch (MalformedURLException e1) { try { // if URL did not validate, check whether the problem was an // unrecognized protocol, and accept input if that's the case. Pattern pattern = Pattern.compile("\\s*([a-zA-Z0-9]+)://([^\\n]+)"); Matcher matcher = pattern.matcher(url); if(matcher.matches()) { URL test = new URL("http://" + matcher.group(2)); } else { url = "http://" + url; URL test = new URL(url); item.setFilename(url); } } catch (MalformedURLException e2) { // invalid url item_alerts.add(rb.getString("validurl")); item.setMissing("Url"); } } } } else if(item.isStructuredArtifact()) { String formtype = (String) current_stack_frame.get(STATE_STACK_STRUCTOBJ_TYPE); if(formtype == null) { formtype = (String) state.getAttribute(STATE_STRUCTOBJ_TYPE); if(formtype == null) { formtype = ""; } current_stack_frame.put(STATE_STACK_STRUCTOBJ_TYPE, formtype); } String formtype_check = params.getString("formtype"); if(formtype_check == null || formtype_check.equals("")) { item_alerts.add("Must select a form type"); item.setMissing("formtype"); } else if(formtype_check.equals(formtype)) { item.setFormtype(formtype); capturePropertyValues(params, item, item.getProperties()); // blank_entry = false; } item.setMimeType(MIME_TYPE_STRUCTOBJ); } if(item.isFileUpload() || item.isHtml() || item.isPlaintext()) { BasicRightsAssignment rightsObj = item.getRights(); rightsObj.captureValues(params); boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnership = params.getString("ccOwnership" + index); if(ccOwnership != null) { item.setRightsownership(ccOwnership); } String ccTerms = params.getString("ccTerms" + index); if(ccTerms != null) { item.setLicense(ccTerms); } String ccCommercial = params.getString("ccCommercial" + index); if(ccCommercial != null) { item.setAllowCommercial(ccCommercial); } String ccModification = params.getString("ccModification" + index); if(ccCommercial != null) { item.setAllowModifications(ccModification); } String ccRightsYear = params.getString("ccRightsYear" + index); if(ccRightsYear != null) { item.setRightstyear(ccRightsYear); } String ccRightsOwner = params.getString("ccRightsOwner" + index); if(ccRightsOwner != null) { item.setRightsowner(ccRightsOwner); } /* ccValues.ccOwner = new Array(); ccValues.myRights = new Array(); ccValues.otherRights = new Array(); ccValues.ccCommercial = new Array(); ccValues.ccModifications = new Array(); ccValues.ccRightsYear = new Array(); ccValues.ccRightsOwner = new Array(); */ } else { // check for copyright status // check for copyright info // check for copyright alert String copyrightStatus = StringUtil.trimToNull(params.getString ("copyright" + index)); String copyrightInfo = StringUtil.trimToNull(params.getCleanString ("newcopyright" + index)); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert" + index)); if (copyrightStatus != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyrightStatus.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (copyrightInfo != null) { item.setCopyrightInfo( copyrightInfo ); } else { item_alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyrightStatus.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { item.setCopyrightInfo((String) state.getAttribute (STATE_MY_COPYRIGHT)); } item.setCopyrightStatus( copyrightStatus ); } item.setCopyrightAlert(copyrightAlert != null); } } if(! RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } String access_mode = params.getString("access_mode" + index); if(access_mode == null || AccessMode.GROUPED.toString().equals(access_mode)) { // we inherit more than one group and must check whether group access changes at this item String[] access_groups = params.getStrings("access_groups" + index); SortedSet new_groups = new TreeSet(); if(access_groups != null) { new_groups.addAll(Arrays.asList(access_groups)); } new_groups = item.convertToRefs(new_groups); Collection inh_grps = item.getInheritedGroupRefs(); boolean groups_are_inherited = (new_groups.size() == inh_grps.size()) && inh_grps.containsAll(new_groups); if(groups_are_inherited) { new_groups.clear(); item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.INHERITED.toString()); } else { item.setEntityGroupRefs(new_groups); item.setAccess(AccessMode.GROUPED.toString()); } item.setPubview(false); } else if(PUBLIC_ACCESS.equals(access_mode)) { if(! preventPublicDisplay.booleanValue() && ! item.isPubviewInherited()) { item.setPubview(true); item.setAccess(AccessMode.INHERITED.toString()); } } else if(AccessMode.INHERITED.toString().equals(access_mode) ) { item.setAccess(AccessMode.INHERITED.toString()); item.clearGroups(); item.setPubview(false); } } int noti = NotificationService.NOTI_NONE; // %%STATE_MODE_RESOURCES%% if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase((String) state.getAttribute(STATE_MODE_RESOURCES))) { // set noti to none if in dropbox mode noti = NotificationService.NOTI_NONE; } else { // read the notification options String notification = params.getString("notify" + index); if ("r".equals(notification)) { noti = NotificationService.NOTI_REQUIRED; } else if ("o".equals(notification)) { noti = NotificationService.NOTI_OPTIONAL; } } item.setNotification(noti); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { Iterator groupIt = metadataGroups.iterator(); while(groupIt.hasNext()) { MetadataGroup group = (MetadataGroup) groupIt.next(); if(item.isGroupShowing(group.getName())) { Iterator propIt = group.iterator(); while(propIt.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) propIt.next(); String propname = prop.getFullname(); if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_" + index + "_year", year); month = params.getInt(propname + "_" + index + "_month", month); day = params.getInt(propname + "_" + index + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_" + index + "_hour", hour); minute = params.getInt(propname + "_" + index + "_minute", minute); second = params.getInt(propname + "_" + index + "_second", second); millisecond = params.getInt(propname + "_" + index + "_millisecond", millisecond); ampm = params.getString(propname + "_" + index + "_ampm").trim(); if("pm".equalsIgnoreCase(ampm)) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); item.setMetadataItem(propname,value); } else { String value = params.getString(propname + "_" + index); if(value != null) { item.setMetadataItem(propname, value); } } } } } } item.markAsBlank(blank_entry); return item_alerts; } /** * Retrieve values for one or more items from create context. Create context contains up to ten items at a time * all of the same type (folder, file, text document, structured-artifact, etc). This method retrieves the data * apppropriate to the type and updates the values of the EditItem objects stored as the STATE_STACK_CREATE_ITEMS * attribute in state. If the third parameter is "true", missing/incorrect user inputs will generate error messages * and attach flags to the input elements. * @param state * @param params * @param markMissing Should this method generate error messages and add flags for missing/incorrect user inputs? */ protected static void captureMultipleValues(SessionState state, ParameterParser params, boolean markMissing) { Map current_stack_frame = peekAtStack(state); Integer number = (Integer) current_stack_frame.get(STATE_STACK_CREATE_NUMBER); if(number == null) { number = (Integer) state.getAttribute(STATE_CREATE_NUMBER); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } if(number == null) { number = new Integer(1); current_stack_frame.put(STATE_STACK_CREATE_NUMBER, number); } List new_items = (List) current_stack_frame.get(STATE_STACK_CREATE_ITEMS); if(new_items == null) { String collectionId = params.getString("collectionId"); String defaultCopyrightStatus = (String) state.getAttribute(DEFAULT_COPYRIGHT); if(defaultCopyrightStatus == null || defaultCopyrightStatus.trim().equals("")) { defaultCopyrightStatus = ServerConfigurationService.getString("default.copyright"); state.setAttribute(DEFAULT_COPYRIGHT, defaultCopyrightStatus); } String itemType = (String) current_stack_frame.get(STATE_STACK_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = (String) state.getAttribute(STATE_CREATE_TYPE); if(itemType == null || itemType.trim().equals("")) { itemType = TYPE_UPLOAD; } current_stack_frame.put(STATE_STACK_CREATE_TYPE, itemType); } String encoding = (String) state.getAttribute(STATE_ENCODING); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); new_items = newEditItems(collectionId, itemType, encoding, defaultCopyrightStatus, preventPublicDisplay.booleanValue(), CREATE_MAX_ITEMS); current_stack_frame.put(STATE_STACK_CREATE_ITEMS, new_items); } Set alerts = (Set) state.getAttribute(STATE_CREATE_ALERTS); if(alerts == null) { alerts = new HashSet(); state.setAttribute(STATE_CREATE_ALERTS, alerts); } int actualCount = 0; Set first_item_alerts = null; String max_file_size_mb = (String) state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE); int max_bytes = 1096 * 1096; try { max_bytes = Integer.parseInt(max_file_size_mb) * 1096 * 1096; } catch(Exception e) { // if unable to parse an integer from the value // in the properties file, use 1 MB as a default max_file_size_mb = "1"; max_bytes = 1096 * 1096; } /* // params.getContentLength() returns m_req.getContentLength() if(params.getContentLength() > max_bytes) { alerts.add(rb.getString("size") + " " + max_file_size_mb + "MB " + rb.getString("exceeded2")); state.setAttribute(STATE_CREATE_ALERTS, alerts); return; } */ for(int i = 0; i < number.intValue(); i++) { EditItem item = (EditItem) new_items.get(i); Set item_alerts = captureValues(item, i, state, params, markMissing); if(i == 0) { first_item_alerts = item_alerts; } else if(item.isBlank()) { item.clearMissing(); } if(! item.isBlank()) { alerts.addAll(item_alerts); actualCount ++; } } if(actualCount > 0) { EditItem item = (EditItem) new_items.get(0); if(item.isBlank()) { item.clearMissing(); } } else if(markMissing) { alerts.addAll(first_item_alerts); } state.setAttribute(STATE_CREATE_ALERTS, alerts); current_stack_frame.put(STATE_STACK_CREATE_ACTUAL_COUNT, Integer.toString(actualCount)); } // captureMultipleValues protected static void capturePropertyValues(ParameterParser params, EditItem item, List properties) { // use the item's properties if they're not supplied if(properties == null) { properties = item.getProperties(); } // if max cardinality > 1, value is a list (Iterate over members of list) // else value is an object, not a list // if type is nested, object is a Map (iterate over name-value pairs for the properties of the nested object) // else object is type to store value, usually a string or a date/time Iterator it = properties.iterator(); while(it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); String propname = prop.getDottedname(); if(ResourcesMetadata.WIDGET_NESTED.equals(prop.getWidget())) { // do nothing } else if(ResourcesMetadata.WIDGET_BOOLEAN.equals(prop.getWidget())) { String value = params.getString(propname); if(value == null || Boolean.FALSE.toString().equals(value)) { prop.setValue(0, Boolean.FALSE.toString()); } else { prop.setValue(0, Boolean.TRUE.toString()); } } else if(ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { int year = 0; int month = 0; int day = 0; int hour = 0; int minute = 0; int second = 0; int millisecond = 0; String ampm = ""; if(prop.getWidget().equals(ResourcesMetadata.WIDGET_DATE) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { year = params.getInt(propname + "_year", year); month = params.getInt(propname + "_month", month); day = params.getInt(propname + "_day", day); } if(prop.getWidget().equals(ResourcesMetadata.WIDGET_TIME) || prop.getWidget().equals(ResourcesMetadata.WIDGET_DATETIME)) { hour = params.getInt(propname + "_hour", hour); minute = params.getInt(propname + "_minute", minute); second = params.getInt(propname + "_second", second); millisecond = params.getInt(propname + "_millisecond", millisecond); ampm = params.getString(propname + "_ampm"); if("pm".equalsIgnoreCase(ampm)) { if(hour < 12) { hour += 12; } } else if(hour == 12) { hour = 0; } } if(hour > 23) { hour = hour % 24; day++; } Time value = TimeService.newTimeLocal(year, month, day, hour, minute, second, millisecond); prop.setValue(0, value); } else if(ResourcesMetadata.WIDGET_ANYURI.equals(prop.getWidget())) { String value = params.getString(propname); if(value != null && ! value.trim().equals("")) { Reference ref = EntityManager.newReference(ContentHostingService.getReference(value)); prop.setValue(0, ref); } } else { String value = params.getString(propname); if(value != null) { prop.setValue(0, value); } } } } // capturePropertyValues /** * Modify the properties */ public static void doSavechanges ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); String flow = params.getString("flow").trim(); if(flow == null || "cancel".equals(flow)) { doCancel(data); return; } // get values from form and update STATE_STACK_EDIT_ITEM attribute in state captureValues(state, params); Map current_stack_frame = peekAtStack(state); EditItem item = (EditItem) current_stack_frame.get(STATE_STACK_EDIT_ITEM); if(flow.equals("showMetadata")) { doShow_metadata(data); return; } else if(flow.equals("hideMetadata")) { doHide_metadata(data); return; } else if(flow.equals("intentChanged")) { doToggle_intent(data); return; } else if(flow.equals("addInstance")) { String field = params.getString("field"); addInstance(field, item.getProperties()); ResourcesMetadata form = item.getForm(); List flatList = form.getFlatList(); item.setProperties(flatList); return; } else if(flow.equals("linkResource")) { // captureMultipleValues(state, params, false); createLink(data, state); //Map new_stack_frame = pushOnStack(state); //new_stack_frame.put(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); state.setAttribute(ResourcesAction.STATE_RESOURCES_HELPER_MODE, ResourcesAction.MODE_ATTACHMENT_SELECT); return; } Set alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); if(item.isStructuredArtifact()) { SchemaBean bean = (SchemaBean) current_stack_frame.get(STATE_STACK_STRUCT_OBJ_SCHEMA); SaveArtifactAttempt attempt = new SaveArtifactAttempt(item, bean.getSchema()); validateStructuredArtifact(attempt); Iterator errorIt = attempt.getErrors().iterator(); while(errorIt.hasNext()) { ValidationError error = (ValidationError) errorIt.next(); alerts.add(error.getDefaultMessage()); } } if(alerts.isEmpty()) { // populate the property list try { // get an edit ContentCollectionEdit cedit = null; ContentResourceEdit redit = null; GroupAwareEdit gedit = null; ResourcePropertiesEdit pedit = null; if(item.isFolder()) { cedit = ContentHostingService.editCollection(item.getId()); gedit = cedit; pedit = cedit.getPropertiesEdit(); } else { redit = ContentHostingService.editResource(item.getId()); gedit = redit; pedit = redit.getPropertiesEdit(); } try { Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } if(! preventPublicDisplay.booleanValue()) { ContentHostingService.setPubView(gedit.getId(), item.isPubview()); } if(! AccessMode.GROUPED.toString().equals(item.getAccess()) && AccessMode.GROUPED == gedit.getAccess()) { gedit.clearGroupAccess(); } else if(AccessMode.GROUPED.toString().equals(item.getAccess()) && ! item.getEntityGroupRefs().isEmpty()) { gedit.setGroupAccess(item.getEntityGroupRefs()); } else { gedit.clearGroupAccess(); } } catch(InconsistentException e) { // TODO: Should this be reported to user?? logger.warn("ResourcesAction.doSavechanges ***** InconsistentException changing groups ***** " + e.getMessage()); } if(item.isFolder()) { } else { if(item.isUrl()) { redit.setContent(item.getFilename().getBytes()); } else if(item.isStructuredArtifact()) { redit.setContentType(item.getMimeType()); redit.setContent(item.getContent()); } else if(item.contentHasChanged()) { redit.setContentType(item.getMimeType()); redit.setContent(item.getContent()); } else if(item.contentTypeHasChanged()) { redit.setContentType(item.getMimeType()); } BasicRightsAssignment rightsObj = item.getRights(); rightsObj.addResourceProperties(pedit); String copyright = StringUtil.trimToNull(params.getString ("copyright")); String newcopyright = StringUtil.trimToNull(params.getCleanString (NEW_COPYRIGHT)); String copyrightAlert = StringUtil.trimToNull(params.getString("copyrightAlert")); if (copyright != null) { if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) != null && copyright.equals(state.getAttribute(COPYRIGHT_NEW_COPYRIGHT))) { if (newcopyright != null) { pedit.addProperty (ResourceProperties.PROP_COPYRIGHT, newcopyright); } else { alerts.add(rb.getString("specifycp2")); // addAlert(state, rb.getString("specifycp2")); } } else if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) != null && copyright.equals (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT))) { String mycopyright = (String) state.getAttribute (STATE_MY_COPYRIGHT); pedit.addProperty (ResourceProperties.PROP_COPYRIGHT, mycopyright); } pedit.addProperty(ResourceProperties.PROP_COPYRIGHT_CHOICE, copyright); } if (copyrightAlert != null) { pedit.addProperty (ResourceProperties.PROP_COPYRIGHT_ALERT, copyrightAlert); } else { pedit.removeProperty (ResourceProperties.PROP_COPYRIGHT_ALERT); } } if (!(item.isFolder() && (item.getId().equals ((String) state.getAttribute (STATE_HOME_COLLECTION_ID))))) { pedit.addProperty (ResourceProperties.PROP_DISPLAY_NAME, item.getName()); } // the home collection's title is not modificable pedit.addProperty (ResourceProperties.PROP_DESCRIPTION, item.getDescription()); // deal with quota (collections only) if ((cedit != null) && item.canSetQuota()) { if (item.hasQuota()) { // set the quota pedit.addProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA, item.getQuota()); } else { // clear the quota pedit.removeProperty(ResourceProperties.PROP_COLLECTION_BODY_QUOTA); } } List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); state.setAttribute(STATE_EDIT_ALERTS, alerts); saveMetadata(pedit, metadataGroups, item); alerts = (Set) state.getAttribute(STATE_EDIT_ALERTS); // commit the change if (cedit != null) { ContentHostingService.commitCollection(cedit); } else { ContentHostingService.commitResource(redit, item.getNotification()); } current_stack_frame.put(STATE_STACK_EDIT_INTENT, INTENT_REVISE_FILE); Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } // need to refresh collection containing current edit item make changes show up String containerId = ContentHostingService.getContainingCollectionId(item.getId()); Map expandedCollections = (Map) state.getAttribute(STATE_EXPANDED_COLLECTIONS); Object old = expandedCollections.remove(containerId); if (old != null) { try { ContentCollection container = ContentHostingService.getCollection(containerId); expandedCollections.put(containerId, container); } catch (Throwable ignore){} } if(item.isFolder()) { old = expandedCollections.remove(item.getId()); if (old != null) { try { ContentCollection folder = ContentHostingService.getCollection(item.getId()); expandedCollections.put(item.getId(), folder); } catch (Throwable ignore){} } } } catch (TypeException e) { alerts.add(rb.getString("typeex") + " " + item.getId()); // addAlert(state," " + rb.getString("typeex") + " " + item.getId()); } catch (IdUnusedException e) { alerts.add(RESOURCE_NOT_EXIST_STRING); // addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (PermissionException e) { alerts.add(rb.getString("notpermis10") + " " + item.getId()); // addAlert(state, rb.getString("notpermis10") + " " + item.getId() + ". " ); } catch (InUseException e) { alerts.add(rb.getString("someone") + " " + item.getId()); // addAlert(state, rb.getString("someone") + " " + item.getId() + ". "); } catch (ServerOverloadException e) { alerts.add(rb.getString("failed")); } catch (OverQuotaException e) { alerts.add(rb.getString("changing1") + " " + item.getId() + " " + rb.getString("changing2")); // addAlert(state, rb.getString("changing1") + " " + item.getId() + " " + rb.getString("changing2")); } catch(RuntimeException e) { logger.warn("ResourcesAction.doSavechanges ***** Unknown Exception ***** " + e.getMessage()); logger.warn("ResourcesAction.doSavechanges ***** Unknown Exception ***** ", e); alerts.add(rb.getString("failed")); } } // if - else if(alerts.isEmpty()) { // modify properties sucessful String mode = (String) state.getAttribute(STATE_MODE); popFromStack(state); resetCurrentMode(state); } //if-else else { Iterator alertIt = alerts.iterator(); while(alertIt.hasNext()) { String alert = (String) alertIt.next(); addAlert(state, alert); } alerts.clear(); state.setAttribute(STATE_EDIT_ALERTS, alerts); // state.setAttribute(STATE_CREATE_MISSING_ITEM, missing); } } // doSavechanges /** * @param pedit * @param metadataGroups * @param metadata */ private static void saveMetadata(ResourcePropertiesEdit pedit, List metadataGroups, EditItem item) { if(metadataGroups != null && !metadataGroups.isEmpty()) { MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(it.hasNext()) { group = (MetadataGroup) it.next(); Iterator props = group.iterator(); while(props.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) props.next(); if(ResourcesMetadata.WIDGET_DATETIME.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_DATE.equals(prop.getWidget()) || ResourcesMetadata.WIDGET_TIME.equals(prop.getWidget())) { Time val = (Time)item.getMetadata().get(prop.getFullname()); if(val != null) { pedit.addProperty(prop.getFullname(), val.toString()); } } else { String val = (String) item.getMetadata().get(prop.getFullname()); pedit.addProperty(prop.getFullname(), val); } } } } } /** * @param data */ protected static void doToggle_intent(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); ParameterParser params = data.getParameters (); String intent = params.getString("intent"); Map current_stack_frame = peekAtStack(state); current_stack_frame.put(STATE_STACK_EDIT_INTENT, intent); } // doToggle_intent /** * @param data */ public static void doHideOtherSites(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.FALSE.toString()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); } /** * @param data */ public static void doShowOtherSites(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.TRUE.toString()); } /** * @param data */ public static void doHide_metadata(RunData data) { ParameterParser params = data.getParameters (); String name = params.getString("metadataGroup"); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { boolean found = false; MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(!found && it.hasNext()) { group = (MetadataGroup) it.next(); found = (name.equals(Validator.escapeUrl(group.getName())) || name.equals(group.getName())); } if(found) { group.setShowing(false); } } } // doHide_metadata /** * @param data */ public static void doShow_metadata(RunData data) { ParameterParser params = data.getParameters (); String name = params.getString("metadataGroup"); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && ! metadataGroups.isEmpty()) { boolean found = false; MetadataGroup group = null; Iterator it = metadataGroups.iterator(); while(!found && it.hasNext()) { group = (MetadataGroup) it.next(); found = (name.equals(Validator.escapeUrl(group.getName())) || name.equals(group.getName())); } if(found) { group.setShowing(true); } } } // doShow_metadata /** * Sort based on the given property */ public static void doSort ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String criteria = params.getString ("criteria"); if (criteria.equals ("title")) { criteria = ResourceProperties.PROP_DISPLAY_NAME; } else if (criteria.equals ("size")) { criteria = ResourceProperties.PROP_CONTENT_LENGTH; } else if (criteria.equals ("created by")) { criteria = ResourceProperties.PROP_CREATOR; } else if (criteria.equals ("last modified")) { criteria = ResourceProperties.PROP_MODIFIED_DATE; } // current sorting sequence String asc = NULL_STRING; if (!criteria.equals (state.getAttribute (STATE_SORT_BY))) { state.setAttribute (STATE_SORT_BY, criteria); asc = Boolean.TRUE.toString(); state.setAttribute (STATE_SORT_ASC, asc); } else { // current sorting sequence asc = (String) state.getAttribute (STATE_SORT_ASC); //toggle between the ascending and descending sequence if (asc.equals (Boolean.TRUE.toString())) { asc = Boolean.FALSE.toString(); } else { asc = Boolean.TRUE.toString(); } state.setAttribute (STATE_SORT_ASC, asc); } if (state.getAttribute(STATE_MESSAGE) == null) { // sort sucessful // state.setAttribute (STATE_MODE, MODE_LIST); } // if-else } // doSort /** * set the state name to be "deletecofirm" if any item has been selected for deleting */ public void doDeleteconfirm ( RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); Set deleteIdSet = new TreeSet(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } String[] deleteIds = data.getParameters ().getStrings ("selectedMembers"); if (deleteIds == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile3")); } else { deleteIdSet.addAll(Arrays.asList(deleteIds)); List deleteItems = new Vector(); List notDeleteItems = new Vector(); List nonEmptyFolders = new Vector(); List roots = (List) state.getAttribute(STATE_COLLECTION_ROOTS); Iterator rootIt = roots.iterator(); while(rootIt.hasNext()) { BrowseItem root = (BrowseItem) rootIt.next(); List members = root.getMembers(); Iterator memberIt = members.iterator(); while(memberIt.hasNext()) { BrowseItem member = (BrowseItem) memberIt.next(); if(deleteIdSet.contains(member.getId())) { if(member.isFolder()) { if(ContentHostingService.allowRemoveCollection(member.getId())) { deleteItems.add(member); if(! member.isEmpty()) { nonEmptyFolders.add(member); } } else { notDeleteItems.add(member); } } else if(ContentHostingService.allowRemoveResource(member.getId())) { deleteItems.add(member); } else { notDeleteItems.add(member); } } } } if(! notDeleteItems.isEmpty()) { String notDeleteNames = ""; boolean first_item = true; Iterator notIt = notDeleteItems.iterator(); while(notIt.hasNext()) { BrowseItem item = (BrowseItem) notIt.next(); if(first_item) { notDeleteNames = item.getName(); first_item = false; } else if(notIt.hasNext()) { notDeleteNames += ", " + item.getName(); } else { notDeleteNames += " and " + item.getName(); } } addAlert(state, rb.getString("notpermis14") + notDeleteNames); } /* //htripath-SAK-1712 - Set new collectionId as resources are not deleted under 'more' requirement. if(state.getAttribute(STATE_MESSAGE) == null){ String newCollectionId=ContentHostingService.getContainingCollectionId(currentId); state.setAttribute(STATE_COLLECTION_ID, newCollectionId); } */ // delete item state.setAttribute (STATE_DELETE_ITEMS, deleteItems); state.setAttribute (STATE_DELETE_ITEMS_NOT_EMPTY, nonEmptyFolders); } // if-else if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_MODE, MODE_DELETE_CONFIRM); state.setAttribute(STATE_LIST_SELECTIONS, deleteIdSet); } } // doDeleteconfirm /** * set the state name to be "cut" if any item has been selected for cutting */ public void doCut ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); String[] cutItems = data.getParameters ().getStrings ("selectedMembers"); if (cutItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile5")); state.setAttribute (STATE_MODE, MODE_LIST); } else { Vector cutIdsVector = new Vector (); String nonCutIds = NULL_STRING; String cutId = NULL_STRING; for (int i = 0; i < cutItems.length; i++) { cutId = cutItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (cutId); if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { if (ContentHostingService.allowRemoveResource (cutId)) { cutIdsVector.add (cutId); } else { nonCutIds = nonCutIds + " " + properties.getProperty (ResourceProperties.PROP_DISPLAY_NAME) + "; "; } } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { if (nonCutIds.length ()>0) { addAlert(state, rb.getString("notpermis16") +" " + nonCutIds); } if (cutIdsVector.size ()>0) { state.setAttribute (STATE_CUT_FLAG, Boolean.TRUE.toString()); if (((String) state.getAttribute (STATE_SELECT_ALL_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); } Vector copiedIds = (Vector) state.getAttribute (STATE_COPIED_IDS); for (int i = 0; i < cutIdsVector.size (); i++) { String currentId = (String) cutIdsVector.elementAt (i); if ( copiedIds.contains (currentId)) { copiedIds.remove (currentId); } } if (copiedIds.size ()==0) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } state.setAttribute (STATE_COPIED_IDS, copiedIds); state.setAttribute (STATE_CUT_IDS, cutIdsVector); } } } // if-else } // doCut /** * set the state name to be "copy" if any item has been selected for copying */ public void doCopy ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } Vector copyItemsVector = new Vector (); String[] copyItems = data.getParameters ().getStrings ("selectedMembers"); if (copyItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { String copyId = NULL_STRING; for (int i = 0; i < copyItems.length; i++) { copyId = copyItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (copyId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_COPY_FLAG, Boolean.TRUE.toString()); copyItemsVector.addAll(Arrays.asList(copyItems)); ContentHostingService.eliminateDuplicates(copyItemsVector); state.setAttribute (STATE_COPIED_IDS, copyItemsVector); } // if-else } // if-else } // doCopy /** * Handle user's selection of items to be moved. */ public void doMove ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List moveItemsVector = new Vector(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); String[] moveItems = data.getParameters ().getStrings ("selectedMembers"); if (moveItems == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { String moveId = NULL_STRING; for (int i = 0; i < moveItems.length; i++) { moveId = moveItems[i]; try { ResourceProperties properties = ContentHostingService.getProperties (moveId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch } if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_MOVE_FLAG, Boolean.TRUE.toString()); moveItemsVector.addAll(Arrays.asList(moveItems)); ContentHostingService.eliminateDuplicates(moveItemsVector); state.setAttribute (STATE_MOVED_IDS, moveItemsVector); } // if-else } // if-else } // doMove /** * If copy-flag is set to false, erase the copied-id's list and set copied flags to false * in all the browse items. If copied-id's list is empty, set copy-flag to false and set * copied flags to false in all the browse items. If copy-flag is set to true and copied-id's * list is not empty, update the copied flags of all browse items so copied flags for the * copied items are set to true and all others are set to false. */ protected void setCopyFlags(SessionState state) { String copyFlag = (String) state.getAttribute(STATE_COPY_FLAG); List copyItemsVector = (List) state.getAttribute(STATE_COPIED_IDS); if(copyFlag == null) { copyFlag = Boolean.FALSE.toString(); state.setAttribute(STATE_COPY_FLAG, copyFlag); } if(copyFlag.equals(Boolean.TRUE.toString())) { if(copyItemsVector == null) { copyItemsVector = new Vector(); state.setAttribute(STATE_COPIED_IDS, copyItemsVector); } if(copyItemsVector.isEmpty()) { state.setAttribute(STATE_COPY_FLAG, Boolean.FALSE.toString()); } } else { copyItemsVector = new Vector(); state.setAttribute(STATE_COPIED_IDS, copyItemsVector); } List roots = (List) state.getAttribute(STATE_COLLECTION_ROOTS); Iterator rootIt = roots.iterator(); while(rootIt.hasNext()) { BrowseItem root = (BrowseItem) rootIt.next(); boolean root_copied = copyItemsVector.contains(root.getId()); root.setCopied(root_copied); List members = root.getMembers(); Iterator memberIt = members.iterator(); while(memberIt.hasNext()) { BrowseItem member = (BrowseItem) memberIt.next(); boolean member_copied = copyItemsVector.contains(member.getId()); member.setCopied(member_copied); } } // check -- jim state.setAttribute(STATE_COLLECTION_ROOTS, roots); } // setCopyFlags /** * Expand all the collection resources. */ static public void doExpandall ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); // expansion actually occurs in getBrowseItems method. state.setAttribute(STATE_EXPAND_ALL_FLAG, Boolean.TRUE.toString()); state.setAttribute(STATE_NEED_TO_EXPAND_ALL, Boolean.TRUE.toString()); } // doExpandall /** * Unexpand all the collection resources */ public static void doUnexpandall ( RunData data) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); state.setAttribute(STATE_EXPANDED_COLLECTIONS, new HashMap()); state.setAttribute(STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); } // doUnexpandall /** * Populate the state object, if needed - override to do something! */ protected void initState(SessionState state, VelocityPortlet portlet, JetspeedRunData data) { super.initState(state, portlet, data); if(state.getAttribute(STATE_INITIALIZED) == null) { initCopyContext(state); initMoveContext(state); } initStateAttributes(state, portlet); } // initState /** * Remove the state variables used internally, on the way out. */ static private void cleanupState(SessionState state) { state.removeAttribute(STATE_FROM_TEXT); state.removeAttribute(STATE_HAS_ATTACHMENT_BEFORE); state.removeAttribute(STATE_ATTACH_SHOW_DROPBOXES); state.removeAttribute(STATE_ATTACH_COLLECTION_ID); state.removeAttribute(COPYRIGHT_FAIRUSE_URL); state.removeAttribute(COPYRIGHT_NEW_COPYRIGHT); state.removeAttribute(COPYRIGHT_SELF_COPYRIGHT); state.removeAttribute(COPYRIGHT_TYPES); state.removeAttribute(DEFAULT_COPYRIGHT_ALERT); state.removeAttribute(DEFAULT_COPYRIGHT); state.removeAttribute(STATE_EXPANDED_COLLECTIONS); state.removeAttribute(STATE_FILE_UPLOAD_MAX_SIZE); state.removeAttribute(NEW_COPYRIGHT_INPUT); state.removeAttribute(STATE_COLLECTION_ID); state.removeAttribute(STATE_COLLECTION_PATH); state.removeAttribute(STATE_CONTENT_SERVICE); state.removeAttribute(STATE_CONTENT_TYPE_IMAGE_SERVICE); //state.removeAttribute(STATE_STACK_EDIT_INTENT); state.removeAttribute(STATE_EXPAND_ALL_FLAG); state.removeAttribute(STATE_HELPER_NEW_ITEMS); state.removeAttribute(STATE_HELPER_CHANGED); state.removeAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME); state.removeAttribute(STATE_HOME_COLLECTION_ID); state.removeAttribute(STATE_LIST_SELECTIONS); state.removeAttribute(STATE_MY_COPYRIGHT); state.removeAttribute(STATE_NAVIGATION_ROOT); state.removeAttribute(STATE_PASTE_ALLOWED_FLAG); state.removeAttribute(STATE_SELECT_ALL_FLAG); state.removeAttribute(STATE_SHOW_ALL_SITES); state.removeAttribute(STATE_SITE_TITLE); state.removeAttribute(STATE_SORT_ASC); state.removeAttribute(STATE_SORT_BY); state.removeAttribute(STATE_STACK_STRUCTOBJ_TYPE); state.removeAttribute(STATE_STACK_STRUCTOBJ_TYPE_READONLY); state.removeAttribute(STATE_INITIALIZED); state.removeAttribute(VelocityPortletPaneledAction.STATE_HELPER); } // cleanupState public static void initStateAttributes(SessionState state, VelocityPortlet portlet) { if (state.getAttribute (STATE_INITIALIZED) != null) return; if (state.getAttribute(STATE_FILE_UPLOAD_MAX_SIZE) == null) { state.setAttribute(STATE_FILE_UPLOAD_MAX_SIZE, ServerConfigurationService.getString("content.upload.max", "1")); } PortletConfig config = portlet.getPortletConfig(); try { Integer size = new Integer(config.getInitParameter(PARAM_PAGESIZE)); if(size == null || size.intValue() < 1) { size = new Integer(DEFAULT_PAGE_SIZE); } state.setAttribute(STATE_PAGESIZE, size); } catch(Exception any) { state.setAttribute(STATE_PAGESIZE, new Integer(DEFAULT_PAGE_SIZE)); } // state.setAttribute(STATE_TOP_PAGE_MESSAGE, ""); state.setAttribute (STATE_CONTENT_SERVICE, ContentHostingService.getInstance()); state.setAttribute (STATE_CONTENT_TYPE_IMAGE_SERVICE, ContentTypeImageService.getInstance()); TimeBreakdown timeBreakdown = (TimeService.newTime()).breakdownLocal (); String mycopyright = COPYRIGHT_SYMBOL + " " + timeBreakdown.getYear () +", " + UserDirectoryService.getCurrentUser().getDisplayName () + ". All Rights Reserved. "; state.setAttribute (STATE_MY_COPYRIGHT, mycopyright); if(state.getAttribute(STATE_MODE) == null) { state.setAttribute (STATE_MODE, MODE_LIST); state.setAttribute (STATE_FROM, NULL_STRING); } state.setAttribute (STATE_SORT_BY, ResourceProperties.PROP_DISPLAY_NAME); state.setAttribute (STATE_SORT_ASC, Boolean.TRUE.toString()); state.setAttribute (STATE_SELECT_ALL_FLAG, Boolean.FALSE.toString()); state.setAttribute (STATE_EXPAND_ALL_FLAG, Boolean.FALSE.toString()); state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); state.setAttribute (STATE_COLLECTION_PATH, new Vector ()); // %%STATE_MODE_RESOURCES%% // In helper mode, calling tool should set attribute STATE_MODE_RESOURCES String resources_mode = (String) state.getAttribute(STATE_MODE_RESOURCES); if(resources_mode == null) { // get resources mode from tool registry resources_mode = portlet.getPortletConfig().getInitParameter("resources_mode"); if(resources_mode != null) { state.setAttribute(STATE_MODE_RESOURCES, resources_mode); } } boolean show_other_sites = false; if(RESOURCES_MODE_HELPER.equals(resources_mode)) { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.helper", SHOW_ALL_SITES_IN_FILE_PICKER); } else if(RESOURCES_MODE_DROPBOX.equals(resources_mode)) { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.dropbox", SHOW_ALL_SITES_IN_DROPBOX); } else { show_other_sites = ServerConfigurationService.getBoolean("resources.show_all_collections.tool", SHOW_ALL_SITES_IN_RESOURCES); } /** This attribute indicates whether "Other Sites" twiggle should show */ state.setAttribute(STATE_SHOW_ALL_SITES, Boolean.toString(show_other_sites)); /** This attribute indicates whether "Other Sites" twiggle should be open */ state.setAttribute(STATE_SHOW_OTHER_SITES, Boolean.FALSE.toString()); // set the home collection to the parameter, if present, or the default if not String home = StringUtil.trimToNull(portlet.getPortletConfig().getInitParameter("home")); state.setAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME, home); if ((home == null) || (home.length() == 0)) { // no home set, see if we are in dropbox mode if (RESOURCES_MODE_DROPBOX.equalsIgnoreCase(resources_mode)) { home = ContentHostingService.getDropboxCollection(); // if it came back null, we will pretend not to be in dropbox mode if (home != null) { state.setAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME, ContentHostingService.getDropboxDisplayName()); // create/update the collection of folders in the dropbox ContentHostingService.createDropboxCollection(); } } // if we still don't have a home, if ((home == null) || (home.length() == 0)) { home = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); // TODO: what's the 'name' of the context? -ggolden // we'll need this to create the home collection if needed state.setAttribute (STATE_HOME_COLLECTION_DISPLAY_NAME, ToolManager.getCurrentPlacement().getContext() /*SiteService.getSiteDisplay(ToolManager.getCurrentPlacement().getContext()) */); } } state.setAttribute (STATE_HOME_COLLECTION_ID, home); state.setAttribute (STATE_COLLECTION_ID, home); state.setAttribute (STATE_NAVIGATION_ROOT, home); HomeFactory factory = (HomeFactory) ComponentManager.get("homeFactory"); if(factory != null) { Map homes = factory.getHomes(StructuredArtifactHomeInterface.class); if(! homes.isEmpty()) { state.setAttribute(STATE_SHOW_FORM_ITEMS, Boolean.TRUE.toString()); } } // state.setAttribute (STATE_COLLECTION_ID, state.getAttribute (STATE_HOME_COLLECTION_ID)); if (state.getAttribute(STATE_SITE_TITLE) == null) { String title = ""; try { title = ((Site) SiteService.getSite(ToolManager.getCurrentPlacement().getContext())).getTitle(); } catch (IdUnusedException e) { // ignore } state.setAttribute(STATE_SITE_TITLE, title); } HashMap expandedCollections = new HashMap(); //expandedCollections.add (state.getAttribute (STATE_HOME_COLLECTION_ID)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); if(state.getAttribute(STATE_USING_CREATIVE_COMMONS) == null) { String usingCreativeCommons = ServerConfigurationService.getString("copyright.use_creative_commons"); if( usingCreativeCommons != null && usingCreativeCommons.equalsIgnoreCase(Boolean.TRUE.toString())) { state.setAttribute(STATE_USING_CREATIVE_COMMONS, Boolean.TRUE.toString()); } else { state.setAttribute(STATE_USING_CREATIVE_COMMONS, Boolean.FALSE.toString()); } } if (state.getAttribute(COPYRIGHT_TYPES) == null) { if (ServerConfigurationService.getStrings("copyrighttype") != null) { state.setAttribute(COPYRIGHT_TYPES, new ArrayList(Arrays.asList(ServerConfigurationService.getStrings("copyrighttype")))); } } if (state.getAttribute(DEFAULT_COPYRIGHT) == null) { if (ServerConfigurationService.getString("default.copyright") != null) { state.setAttribute(DEFAULT_COPYRIGHT, ServerConfigurationService.getString("default.copyright")); } } if (state.getAttribute(DEFAULT_COPYRIGHT_ALERT) == null) { if (ServerConfigurationService.getString("default.copyright.alert") != null) { state.setAttribute(DEFAULT_COPYRIGHT_ALERT, ServerConfigurationService.getString("default.copyright.alert")); } } if (state.getAttribute(NEW_COPYRIGHT_INPUT) == null) { if (ServerConfigurationService.getString("newcopyrightinput") != null) { state.setAttribute(NEW_COPYRIGHT_INPUT, ServerConfigurationService.getString("newcopyrightinput")); } } if (state.getAttribute(COPYRIGHT_FAIRUSE_URL) == null) { if (ServerConfigurationService.getString("fairuse.url") != null) { state.setAttribute(COPYRIGHT_FAIRUSE_URL, ServerConfigurationService.getString("fairuse.url")); } } if (state.getAttribute(COPYRIGHT_SELF_COPYRIGHT) == null) { if (ServerConfigurationService.getString("copyrighttype.own") != null) { state.setAttribute(COPYRIGHT_SELF_COPYRIGHT, ServerConfigurationService.getString("copyrighttype.own")); } } if (state.getAttribute(COPYRIGHT_NEW_COPYRIGHT) == null) { if (ServerConfigurationService.getString("copyrighttype.new") != null) { state.setAttribute(COPYRIGHT_NEW_COPYRIGHT, ServerConfigurationService.getString("copyrighttype.new")); } } // get resources mode from tool registry String optional_properties = portlet.getPortletConfig().getInitParameter("optional_properties"); if(optional_properties != null && "true".equalsIgnoreCase(optional_properties)) { initMetadataContext(state); } state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, Boolean.FALSE); String[] siteTypes = ServerConfigurationService.getStrings("prevent.public.resources"); if(siteTypes != null) { Site site; try { site = SiteService.getSite(ToolManager.getCurrentPlacement().getContext()); for(int i = 0; i < siteTypes.length; i++) { if ((StringUtil.trimToNull(siteTypes[i])).equals(site.getType())) { state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, Boolean.TRUE); } } } catch (IdUnusedException e) { // allow public display } catch(NullPointerException e) { // allow public display } } state.setAttribute (STATE_INITIALIZED, Boolean.TRUE.toString()); } /** * Setup our observer to be watching for change events for the collection */ private void updateObservation(SessionState state, String peid) { // ContentObservingCourier observer = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // // // the delivery location for this tool // String deliveryId = clientWindowId(state, peid); // observer.setDeliveryId(deliveryId); } /** * Add additional resource pattern to the observer *@param pattern The pattern value to be added *@param state The state object */ private static void addObservingPattern(String pattern, SessionState state) { // // get the observer and add the pattern // ContentObservingCourier o = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // o.addResourcePattern(ContentHostingService.getReference(pattern)); // // // add it back to state // state.setAttribute(STATE_OBSERVER, o); } // addObservingPattern /** * Remove a resource pattern from the observer *@param pattern The pattern value to be removed *@param state The state object */ private static void removeObservingPattern(String pattern, SessionState state) { // // get the observer and remove the pattern // ContentObservingCourier o = (ContentObservingCourier) state.getAttribute(STATE_OBSERVER); // o.removeResourcePattern(ContentHostingService.getReference(pattern)); // // // add it back to state // state.setAttribute(STATE_OBSERVER, o); } // removeObservingPattern /** * initialize the copy context */ private static void initCopyContext (SessionState state) { state.setAttribute (STATE_COPIED_IDS, new Vector ()); state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } // initCopyContent /** * initialize the copy context */ private static void initMoveContext (SessionState state) { state.setAttribute (STATE_MOVED_IDS, new Vector ()); state.setAttribute (STATE_MOVE_FLAG, Boolean.FALSE.toString()); } // initCopyContent /** * initialize the cut context */ private void initCutContext (SessionState state) { state.setAttribute (STATE_CUT_IDS, new Vector ()); state.setAttribute (STATE_CUT_FLAG, Boolean.FALSE.toString()); } // initCutContent /** * find out whether there is a duplicate item in testVector * @param testVector The Vector to be tested on * @param testSize The integer of the test range * @return The index value of the duplicate ite */ private int repeatedName (Vector testVector, int testSize) { for (int i=1; i <= testSize; i++) { String currentName = (String) testVector.get (i); for (int j=i+1; j <= testSize; j++) { String comparedTitle = (String) testVector.get (j); if (comparedTitle.length()>0 && currentName.length()>0 && comparedTitle.equals (currentName)) { return j; } } } return 0; } // repeatedName /** * Is the id already exist in the current resource? * @param testVector The Vector to be tested on * @param testSize The integer of the test range * @parma isCollection Looking for collection or not * @return The index value of the exist id */ private int foundInResource (Vector testVector, int testSize, String collectionId, boolean isCollection) { try { ContentCollection c = ContentHostingService.getCollection(collectionId); Iterator membersIterator = c.getMemberResources().iterator(); while (membersIterator.hasNext()) { ResourceProperties p = ((Entity) membersIterator.next()).getProperties(); String displayName = p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); if (displayName != null) { String collectionOrResource = p.getProperty(ResourceProperties.PROP_IS_COLLECTION); for (int i=1; i <= testSize; i++) { String testName = (String) testVector.get(i); if ((testName != null) && (displayName.equals (testName)) && ((isCollection && collectionOrResource.equals (Boolean.TRUE.toString())) || (!isCollection && collectionOrResource.equals(Boolean.FALSE.toString())))) { return i; } } // for } } } catch (IdUnusedException e){} catch (TypeException e){} catch (PermissionException e){} return 0; } // foundInResource /** * empty String Vector object with the size sepecified * @param size The Vector object size -1 * @return The Vector object consists of null Strings */ private static Vector emptyVector (int size) { Vector v = new Vector (); for (int i=0; i <= size; i++) { v.add (i, ""); } return v; } // emptyVector /** * Setup for customization **/ public String buildOptionsPanelContext( VelocityPortlet portlet, Context context, RunData data, SessionState state) { context.put("tlang",rb); String home = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); Reference ref = EntityManager.newReference(ContentHostingService.getReference(home)); String siteId = ref.getContext(); context.put("form-submit", BUTTON + "doConfigure_update"); context.put("form-cancel", BUTTON + "doCancel_options"); context.put("description", "Setting options for Resources in worksite " + SiteService.getSiteDisplay(siteId)); // pick the "-customize" template based on the standard template name String template = (String)getContext(data).get("template"); return template + "-customize"; } // buildOptionsPanelContext /** * Handle the configure context's update button */ public void doConfigure_update(RunData data, Context context) { // access the portlet element id to find our state String peid = ((JetspeedRunData)data).getJs_peid(); SessionState state = ((JetspeedRunData)data).getPortletSessionState(peid); // we are done with customization... back to the main (browse) mode state.setAttribute(STATE_MODE, MODE_LIST); // commit the change // saveOptions(); cancelOptions(); } // doConfigure_update /** * doCancel_options called for form input tags type="submit" named="eventSubmit_doCancel" * cancel the options process */ public void doCancel_options(RunData data, Context context) { // access the portlet element id to find our state String peid = ((JetspeedRunData)data).getJs_peid(); SessionState state = ((JetspeedRunData)data).getPortletSessionState(peid); // cancel the options cancelOptions(); // we are done with customization... back to the main (MODE_LIST) mode state.setAttribute(STATE_MODE, MODE_LIST); } // doCancel_options /** * Add the collection id into the expanded collection list * @throws PermissionException * @throws TypeException * @throws IdUnusedException */ public static void doExpand_collection(RunData data) throws IdUnusedException, TypeException, PermissionException { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = params.getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); String id = params.getString("collectionId"); currentMap.put (id,ContentHostingService.getCollection (id)); state.setAttribute(STATE_EXPANDED_COLLECTIONS, currentMap); // add this folder id into the set to be event-observed addObservingPattern(id, state); } // doExpand_collection /** * Remove the collection id from the expanded collection list */ static public void doCollapse_collection(RunData data) { SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); HashMap currentMap = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); //get the ParameterParser from RunData ParameterParser params = data.getParameters (); String collectionId = params.getString("collectionId"); // save the current selections Set selectedSet = new TreeSet(); String[] selectedItems = data.getParameters ().getStrings ("selectedMembers"); if(selectedItems != null) { selectedSet.addAll(Arrays.asList(selectedItems)); } state.setAttribute(STATE_LIST_SELECTIONS, selectedSet); HashMap newSet = new HashMap(); Iterator l = currentMap.keySet().iterator (); while (l.hasNext ()) { // remove the collection id and all of the subcollections // Resource collection = (Resource) l.next(); // String id = (String) collection.getId(); String id = (String) l.next(); if (id.indexOf (collectionId)==-1) { // newSet.put(id,collection); newSet.put(id,currentMap.get(id)); } } state.setAttribute(STATE_EXPANDED_COLLECTIONS, newSet); // remove this folder id into the set to be event-observed removeObservingPattern(collectionId, state); } // doCollapse_collection /** * @param state * @param homeCollectionId * @param currentCollectionId * @return */ public static List getCollectionPath(SessionState state) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // make sure the channedId is set String currentCollectionId = (String) state.getAttribute (STATE_COLLECTION_ID); if(! isStackEmpty(state)) { Map current_stack_frame = peekAtStack(state); String createCollectionId = (String) current_stack_frame.get(STATE_STACK_CREATE_COLLECTION_ID); if(createCollectionId == null) { createCollectionId = (String) state.getAttribute(STATE_CREATE_COLLECTION_ID); } if(createCollectionId != null) { currentCollectionId = createCollectionId; } else { String editCollectionId = (String) current_stack_frame.get(STATE_EDIT_COLLECTION_ID); if(editCollectionId == null) { editCollectionId = (String) state.getAttribute(STATE_EDIT_COLLECTION_ID); } if(editCollectionId != null) { currentCollectionId = editCollectionId; } } } String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); String navRoot = (String) state.getAttribute(STATE_NAVIGATION_ROOT); LinkedList collectionPath = new LinkedList(); String previousCollectionId = ""; Vector pathitems = new Vector(); while(currentCollectionId != null && ! currentCollectionId.equals(navRoot) && ! currentCollectionId.equals(previousCollectionId)) { pathitems.add(currentCollectionId); previousCollectionId = currentCollectionId; currentCollectionId = contentService.getContainingCollectionId(currentCollectionId); } pathitems.add(navRoot); if(!navRoot.equals(homeCollectionId)) { pathitems.add(homeCollectionId); } Iterator items = pathitems.iterator(); while(items.hasNext()) { String id = (String) items.next(); try { ResourceProperties props = contentService.getProperties(id); String name = props.getPropertyFormatted(ResourceProperties.PROP_DISPLAY_NAME); PathItem item = new PathItem(id, name); boolean canRead = contentService.allowGetCollection(id) || contentService.allowGetResource(id); item.setCanRead(canRead); String url = contentService.getUrl(id); item.setUrl(url); item.setLast(collectionPath.isEmpty()); if(id.equals(homeCollectionId)) { item.setRoot(homeCollectionId); } else { item.setRoot(navRoot); } try { boolean isFolder = props.getBooleanProperty(ResourceProperties.PROP_IS_COLLECTION); item.setIsFolder(isFolder); } catch (EntityPropertyNotDefinedException e1) { } catch (EntityPropertyTypeException e1) { } collectionPath.addFirst(item); } catch (PermissionException e) { } catch (IdUnusedException e) { } } return collectionPath; } /** * Get the items in this folder that should be seen. * @param collectionId - String version of * @param expandedCollections - Hash of collection resources * @param sortedBy - pass through to ContentHostingComparator * @param sortedAsc - pass through to ContentHostingComparator * @param parent - The folder containing this item * @param isLocal - true if navigation root and home collection id of site are the same, false otherwise * @param state - The session state * @return a List of BrowseItem objects */ protected static List getBrowseItems(String collectionId, HashMap expandedCollections, Set highlightedItems, String sortedBy, String sortedAsc, BrowseItem parent, boolean isLocal, SessionState state) { boolean need_to_expand_all = Boolean.TRUE.toString().equals((String)state.getAttribute(STATE_NEED_TO_EXPAND_ALL)); List newItems = new LinkedList(); try { // find the ContentHosting service org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); // get the collection // try using existing resource first ContentCollection collection = null; // get the collection if (expandedCollections.containsKey(collectionId)) { collection = (ContentCollection) expandedCollections.get(collectionId); } else { collection = ContentHostingService.getCollection(collectionId); if(need_to_expand_all) { expandedCollections.put(collectionId, collection); state.setAttribute(STATE_EXPANDED_COLLECTIONS, expandedCollections); } } String dummyId = collectionId.trim(); if(dummyId.endsWith(Entity.SEPARATOR)) { dummyId += "dummy"; } else { dummyId += Entity.SEPARATOR + "dummy"; } boolean canRead = false; boolean canDelete = false; boolean canRevise = false; boolean canAddFolder = false; boolean canAddItem = false; boolean canUpdate = false; int depth = 0; if(parent == null || ! parent.canRead()) { canRead = contentService.allowGetCollection(collectionId); } else { canRead = parent.canRead(); } if(parent == null || ! parent.canDelete()) { canDelete = contentService.allowRemoveResource(collectionId); } else { canDelete = parent.canDelete(); } if(parent == null || ! parent.canRevise()) { canRevise = contentService.allowUpdateResource(collectionId); } else { canRevise = parent.canRevise(); } if(parent == null || ! parent.canAddFolder()) { canAddFolder = contentService.allowAddCollection(dummyId); } else { canAddFolder = parent.canAddFolder(); } if(parent == null || ! parent.canAddItem()) { canAddItem = contentService.allowAddResource(dummyId); } else { canAddItem = parent.canAddItem(); } if(parent == null || ! parent.canUpdate()) { canUpdate = AuthzGroupService.allowUpdate(collectionId); } else { canUpdate = parent.canUpdate(); } if(parent != null) { depth = parent.getDepth() + 1; } if(canAddItem) { state.setAttribute(STATE_PASTE_ALLOWED_FLAG, Boolean.TRUE.toString()); } boolean hasDeletableChildren = canDelete; boolean hasCopyableChildren = canRead; String homeCollectionId = (String) state.getAttribute(STATE_HOME_COLLECTION_ID); ResourceProperties cProperties = collection.getProperties(); String folderName = cProperties.getProperty(ResourceProperties.PROP_DISPLAY_NAME); if(collectionId.equals(homeCollectionId)) { folderName = (String) state.getAttribute(STATE_HOME_COLLECTION_DISPLAY_NAME); } BrowseItem folder = new BrowseItem(collectionId, folderName, "folder"); if(parent == null) { folder.setRoot(collectionId); } else { folder.setRoot(parent.getRoot()); } boolean isInDropbox = ContentHostingService.isInDropbox(collectionId); folder.setInDropbox(isInDropbox); BasicRightsAssignment rightsObj = new BasicRightsAssignment(folder.getItemNum(), cProperties); folder.setRights(rightsObj); AccessMode access = collection.getAccess(); if(access == null || AccessMode.SITE == access) { folder.setAccess(AccessMode.INHERITED.toString()); } else { folder.setAccess(access.toString()); } AccessMode inherited_access = collection.getInheritedAccess(); if(inherited_access == null || AccessMode.SITE == inherited_access) { folder.setInheritedAccess(AccessMode.INHERITED.toString()); } else { folder.setInheritedAccess(inherited_access.toString()); } Collection access_groups = collection.getGroupObjects(); if(access_groups == null) { access_groups = new Vector(); } folder.setGroups(access_groups); Collection inherited_access_groups = collection.getInheritedGroupObjects(); if(inherited_access_groups == null) { inherited_access_groups = new Vector(); } folder.setInheritedGroups(inherited_access_groups); if(parent != null && (parent.isPubview() || parent.isPubviewInherited())) { folder.setPubviewInherited(true); folder.setPubview(false); } else if(ContentHostingService.isPubView(folder.getId())) { folder.setPubview(true); } if(highlightedItems == null || highlightedItems.isEmpty()) { // do nothing } else if(parent != null && parent.isHighlighted()) { folder.setInheritsHighlight(true); folder.setHighlighted(true); } else if(highlightedItems.contains(collectionId)) { folder.setHighlighted(true); folder.setInheritsHighlight(false); } String containerId = contentService.getContainingCollectionId (collectionId); folder.setContainer(containerId); folder.setCanRead(canRead); folder.setCanRevise(canRevise); folder.setCanAddItem(canAddItem); folder.setCanAddFolder(canAddFolder); folder.setCanDelete(canDelete); folder.setCanUpdate(canUpdate); try { Time createdTime = cProperties.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTimeString = createdTime.toStringLocalShortDate(); folder.setCreatedTime(createdTimeString); } catch(Exception e) { String createdTimeString = cProperties.getProperty(ResourceProperties.PROP_CREATION_DATE); folder.setCreatedTime(createdTimeString); } try { String createdBy = getUserProperty(cProperties, ResourceProperties.PROP_CREATOR).getDisplayName(); folder.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = cProperties.getProperty(ResourceProperties.PROP_CREATOR); folder.setCreatedBy(createdBy); } try { Time modifiedTime = cProperties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTimeString = modifiedTime.toStringLocalShortDate(); folder.setModifiedTime(modifiedTimeString); } catch(Exception e) { String modifiedTimeString = cProperties.getProperty(ResourceProperties.PROP_MODIFIED_DATE); folder.setModifiedTime(modifiedTimeString); } try { String modifiedBy = getUserProperty(cProperties, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); folder.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = cProperties.getProperty(ResourceProperties.PROP_MODIFIED_BY); folder.setModifiedBy(modifiedBy); } String url = contentService.getUrl(collectionId); folder.setUrl(url); try { int collection_size = contentService.getCollectionSize(collectionId); folder.setIsEmpty(collection_size < 1); folder.setIsTooBig(collection_size > EXPANDABLE_FOLDER_SIZE_LIMIT); } catch(RuntimeException e) { folder.setIsEmpty(true); folder.setIsTooBig(false); } folder.setDepth(depth); newItems.add(folder); if(need_to_expand_all || expandedCollections.containsKey (collectionId)) { // Get the collection members from the 'new' collection List newMembers = collection.getMemberResources (); Collections.sort (newMembers, ContentHostingService.newContentHostingComparator (sortedBy, Boolean.valueOf (sortedAsc).booleanValue ())); // loop thru the (possibly) new members and add to the list Iterator it = newMembers.iterator(); while(it.hasNext()) { ContentEntity resource = (ContentEntity) it.next(); ResourceProperties props = resource.getProperties(); String itemId = resource.getId(); if(resource.isCollection()) { List offspring = getBrowseItems(itemId, expandedCollections, highlightedItems, sortedBy, sortedAsc, folder, isLocal, state); if(! offspring.isEmpty()) { BrowseItem child = (BrowseItem) offspring.get(0); hasDeletableChildren = hasDeletableChildren || child.hasDeletableChildren(); hasCopyableChildren = hasCopyableChildren || child.hasCopyableChildren(); } // add all the items in the subfolder to newItems newItems.addAll(offspring); } else { AccessMode access_mode = ((GroupAwareEntity) resource).getAccess(); if(access_mode == null) { access_mode = AccessMode.INHERITED; } else if(access_mode == AccessMode.GROUPED) { if(! ContentHostingService.allowGetResource(resource.getId())) { continue; } } String itemType = ((ContentResource)resource).getContentType(); String itemName = props.getProperty(ResourceProperties.PROP_DISPLAY_NAME); BrowseItem newItem = new BrowseItem(itemId, itemName, itemType); newItem.setAccess(access_mode.toString()); newItem.setInheritedAccess(folder.getEffectiveAccess()); newItem.setInDropbox(isInDropbox); BasicRightsAssignment rightsObj2 = new BasicRightsAssignment(newItem.getItemNum(), props); newItem.setRights(rightsObj2); Collection groups = ((GroupAwareEntity) resource).getGroupObjects(); if(groups == null) { groups = new Vector(); } Collection inheritedGroups = folder.getGroups(); if(inheritedGroups == null || inheritedGroups.isEmpty()) { inheritedGroups = folder.getInheritedGroups(); } newItem.setGroups(groups); newItem.setInheritedGroups(inheritedGroups); newItem.setContainer(collectionId); newItem.setRoot(folder.getRoot()); newItem.setCanDelete(canDelete); newItem.setCanRevise(canRevise); newItem.setCanRead(canRead); newItem.setCanCopy(canRead); newItem.setCanAddItem(canAddItem); // true means this user can add an item in the folder containing this item (used for "duplicate") if(highlightedItems == null || highlightedItems.isEmpty()) { // do nothing } else if(folder.isHighlighted()) { newItem.setInheritsHighlight(true); newItem.setHighlighted(true); } else if(highlightedItems.contains(itemId)) { newItem.setHighlighted(true); newItem.setInheritsHighlight(false); } try { Time createdTime = props.getTimeProperty(ResourceProperties.PROP_CREATION_DATE); String createdTimeString = createdTime.toStringLocalShortDate(); newItem.setCreatedTime(createdTimeString); } catch(Exception e) { String createdTimeString = props.getProperty(ResourceProperties.PROP_CREATION_DATE); newItem.setCreatedTime(createdTimeString); } try { String createdBy = getUserProperty(props, ResourceProperties.PROP_CREATOR).getDisplayName(); newItem.setCreatedBy(createdBy); } catch(Exception e) { String createdBy = props.getProperty(ResourceProperties.PROP_CREATOR); newItem.setCreatedBy(createdBy); } try { Time modifiedTime = props.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTimeString = modifiedTime.toStringLocalShortDate(); newItem.setModifiedTime(modifiedTimeString); } catch(Exception e) { String modifiedTimeString = props.getProperty(ResourceProperties.PROP_MODIFIED_DATE); newItem.setModifiedTime(modifiedTimeString); } try { String modifiedBy = getUserProperty(props, ResourceProperties.PROP_MODIFIED_BY).getDisplayName(); newItem.setModifiedBy(modifiedBy); } catch(Exception e) { String modifiedBy = props.getProperty(ResourceProperties.PROP_MODIFIED_BY); newItem.setModifiedBy(modifiedBy); } if(folder.isPubview() || folder.isPubviewInherited()) { newItem.setPubviewInherited(true); newItem.setPubview(false); } else if(ContentHostingService.isPubView(resource.getId())) { newItem.setPubview(true); } String size = props.getPropertyFormatted(ResourceProperties.PROP_CONTENT_LENGTH); newItem.setSize(size); String target = Validator.getResourceTarget(props.getProperty(ResourceProperties.PROP_CONTENT_TYPE)); newItem.setTarget(target); String newUrl = contentService.getUrl(itemId); newItem.setUrl(newUrl); try { boolean copyrightAlert = props.getBooleanProperty(ResourceProperties.PROP_COPYRIGHT_ALERT); newItem.setCopyrightAlert(copyrightAlert); } catch(Exception e) {} newItem.setDepth(depth + 1); if (checkItemFilter((ContentResource)resource, newItem, state)) { newItems.add(newItem); } } } } folder.seDeletableChildren(hasDeletableChildren); folder.setCopyableChildren(hasCopyableChildren); // return newItems; } catch (IdUnusedException ignore) { // this condition indicates a site that does not have a resources collection (mercury?) } catch (TypeException e) { addAlert(state, "TypeException."); } catch (PermissionException e) { // ignore -- we'll just skip this collection since user lacks permission to access it. //addAlert(state, "PermissionException"); } return newItems; } // getBrowseItems protected static boolean checkItemFilter(ContentResource resource, BrowseItem newItem, SessionState state) { ContentResourceFilter filter = (ContentResourceFilter)state.getAttribute(STATE_ATTACH_FILTER); if (filter != null) { if (newItem != null) { newItem.setCanSelect(filter.allowSelect(resource)); } return filter.allowView(resource); } else if (newItem != null) { newItem.setCanSelect(true); } return true; } protected static boolean checkSelctItemFilter(ContentResource resource, SessionState state) { ContentResourceFilter filter = (ContentResourceFilter)state.getAttribute(STATE_ATTACH_FILTER); if (filter != null) { return filter.allowSelect(resource); } return true; } /** * set the state name to be "copy" if any item has been selected for copying */ public void doCopyitem ( RunData data ) { // get the state object SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); String itemId = data.getParameters ().getString ("itemId"); if (itemId == null) { // there is no resource selected, show the alert message to the user addAlert(state, rb.getString("choosefile6")); state.setAttribute (STATE_MODE, MODE_LIST); } else { try { ResourceProperties properties = ContentHostingService.getProperties (itemId); /* if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } */ } catch (PermissionException e) { addAlert(state, rb.getString("notpermis15")); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { state.setAttribute (STATE_COPY_FLAG, Boolean.TRUE.toString()); state.setAttribute (STATE_COPIED_ID, itemId); } // if-else } // if-else } // doCopyitem /** * Paste the previously copied item(s) */ public static void doPasteitems ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); List items = (List) state.getAttribute(STATE_COPIED_IDS); String collectionId = params.getString ("collectionId"); Iterator itemIter = items.iterator(); while (itemIter.hasNext()) { // get the copied item to be pasted String itemId = (String) itemIter.next(); String originalDisplayName = NULL_STRING; try { String id = ContentHostingService.copyIntoFolder(itemId, collectionId); String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { String helper_mode = (String) state.getAttribute(STATE_RESOURCES_HELPER_MODE); if(helper_mode != null && MODE_ATTACHMENT_NEW_ITEM.equals(helper_mode)) { // add to the attachments vector List attachments = EntityManager.newReferenceList(); Reference ref = EntityManager.newReference(ContentHostingService.getReference(id)); attachments.add(ref); cleanupState(state); state.setAttribute(STATE_ATTACHMENTS, attachments); } else { if(state.getAttribute(STATE_ATTACH_LINKS) == null) { attachItem(id, state); } else { attachLink(id, state); } } } } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } catch(IdUsedException e) { addAlert(state, rb.getString("toomany")); } catch(IdLengthException e) { addAlert(state, rb.getString("toolong") + " " + e.getMessage()); } catch(IdUniquenessException e) { addAlert(state, "Could not add this item to this folder"); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch(InconsistentException e) { addAlert(state, rb.getString("recursive") + " " + itemId); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doPasteitems ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } } } } // doPasteitems /** * Paste the item(s) selected to be moved */ public static void doMoveitems ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } List items = (List) state.getAttribute(STATE_MOVED_IDS); String collectionId = params.getString ("collectionId"); Iterator itemIter = items.iterator(); while (itemIter.hasNext()) { // get the copied item to be pasted String itemId = (String) itemIter.next(); String originalDisplayName = NULL_STRING; try { /* ResourceProperties properties = ContentHostingService.getProperties (itemId); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else */ { ContentHostingService.moveIntoFolder(itemId, collectionId); } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } catch (InconsistentException e) { addAlert(state, rb.getString("recursive") + " " + itemId); } catch(IdUsedException e) { addAlert(state, rb.getString("toomany")); } catch(ServerOverloadException e) { addAlert(state, rb.getString("failed")); } catch (OverQuotaException e) { addAlert(state, rb.getString("overquota")); } // try-catch catch(RuntimeException e) { logger.warn("ResourcesAction.doMoveitems ***** Unknown Exception ***** " + e.getMessage()); addAlert(state, rb.getString("failed")); } if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_MOVE_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_MOVE_FLAG, Boolean.FALSE.toString()); } } } } // doMoveitems /** * Paste the previously copied item(s) */ public static void doPasteitem ( RunData data) { ParameterParser params = data.getParameters (); SessionState state = ((JetspeedRunData)data).getPortletSessionState (((JetspeedRunData)data).getJs_peid ()); // get the copied item to be pasted String itemId = params.getString("itemId"); String collectionId = params.getString ("collectionId"); String originalDisplayName = NULL_STRING; try { ResourceProperties properties = ContentHostingService.getProperties (itemId); originalDisplayName = properties.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); // copy, cut and paste not operated on collections if (properties.getProperty (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { String alert = (String) state.getAttribute(STATE_MESSAGE); if (alert == null || ((alert != null) && (alert.indexOf(RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING) == -1))) { addAlert(state, RESOURCE_INVALID_OPERATION_ON_COLLECTION_STRING); } } else { // paste the resource ContentResource resource = ContentHostingService.getResource (itemId); ResourceProperties p = ContentHostingService.getProperties(itemId); String displayName = DUPLICATE_STRING + p.getProperty(ResourceProperties.PROP_DISPLAY_NAME); String newItemId = ContentHostingService.copyIntoFolder(itemId, collectionId); ContentResourceEdit copy = ContentHostingService.editResource(newItemId); ResourcePropertiesEdit pedit = copy.getPropertiesEdit(); pedit.addProperty(ResourceProperties.PROP_DISPLAY_NAME, displayName); ContentHostingService.commitResource(copy, NotificationService.NOTI_NONE); } // if-else } catch (PermissionException e) { addAlert(state, rb.getString("notpermis8") + " " + originalDisplayName + ". "); } catch (IdUnusedException e) { addAlert(state,RESOURCE_NOT_EXIST_STRING); } catch (IdUsedException e) { addAlert(state, rb.getString("notaddreso") + " " + originalDisplayName + " " + rb.getString("used2")); } catch(IdLengthException e) { addAlert(state, rb.getString("toolong") + " " + e.getMessage()); } catch(IdUniquenessException e) { addAlert(state, "Could not add this item to this folder"); } catch (InconsistentException ee) { addAlert(state, RESOURCE_INVALID_TITLE_STRING); } catch(InUseException e) { addAlert(state, rb.getString("someone") + " " + originalDisplayName + ". "); } catch(OverQuotaException e) { addAlert(state, rb.getString("overquota")); } catch(ServerOverloadException e) { // this represents temporary unavailability of server's filesystem // for server configured to save resource body in filesystem addAlert(state, rb.getString("failed")); } catch (TypeException e) { addAlert(state, rb.getString("pasteitem") + " " + originalDisplayName + " " + rb.getString("mismatch")); } // try-catch if (state.getAttribute(STATE_MESSAGE) == null) { // delete sucessful String mode = (String) state.getAttribute(STATE_MODE); if(MODE_HELPER.equals(mode)) { state.setAttribute(STATE_RESOURCES_HELPER_MODE, MODE_ATTACHMENT_SELECT); } else { state.setAttribute (STATE_MODE, MODE_LIST); } // try to expand the collection HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); if(! expandedCollections.containsKey(collectionId)) { org.sakaiproject.content.api.ContentHostingService contentService = (org.sakaiproject.content.api.ContentHostingService) state.getAttribute (STATE_CONTENT_SERVICE); try { ContentCollection coll = contentService.getCollection(collectionId); expandedCollections.put(collectionId, coll); } catch(Exception ignore){} } // reset the copy flag if (((String)state.getAttribute (STATE_COPY_FLAG)).equals (Boolean.TRUE.toString())) { state.setAttribute (STATE_COPY_FLAG, Boolean.FALSE.toString()); } } } // doPasteitem /** * Fire up the permissions editor for the current folder's permissions */ public void doFolder_permissions(RunData data, Context context) { SessionState state = ((JetspeedRunData)data).getPortletSessionState(((JetspeedRunData)data).getJs_peid()); ParameterParser params = data.getParameters(); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } // get the current collection id and the related site String collectionId = params.getString("collectionId"); //(String) state.getAttribute (STATE_COLLECTION_ID); String title = ""; try { title = ContentHostingService.getProperties(collectionId).getProperty(ResourceProperties.PROP_DISPLAY_NAME); } catch (PermissionException e) { addAlert(state, rb.getString("notread")); } catch (IdUnusedException e) { addAlert(state, rb.getString("notfindfol")); } // the folder to edit Reference ref = EntityManager.newReference(ContentHostingService.getReference(collectionId)); state.setAttribute(PermissionsHelper.TARGET_REF, ref.getReference()); // use the folder's context (as a site) for roles String siteRef = SiteService.siteReference(ref.getContext()); state.setAttribute(PermissionsHelper.ROLES_REF, siteRef); // ... with this description state.setAttribute(PermissionsHelper.DESCRIPTION, rb.getString("setpermis") + " " + title); // ... showing only locks that are prpefixed with this state.setAttribute(PermissionsHelper.PREFIX, "content."); // get into helper mode with this helper tool startHelper(data.getRequest(), "sakai.permissions.helper"); } // doFolder_permissions /** * Fire up the permissions editor for the tool's permissions */ public void doPermissions(RunData data, Context context) { SessionState state = ((JetspeedRunData)data).getPortletSessionState(((JetspeedRunData)data).getJs_peid()); // cancel copy if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_COPY_FLAG))) { initCopyContext(state); } // cancel move if there is one in progress if(! Boolean.FALSE.toString().equals(state.getAttribute (STATE_MOVE_FLAG))) { initMoveContext(state); } // should we save here? state.setAttribute(STATE_LIST_SELECTIONS, new TreeSet()); // get the current home collection id and the related site String collectionId = (String) state.getAttribute (STATE_HOME_COLLECTION_ID); Reference ref = EntityManager.newReference(ContentHostingService.getReference(collectionId)); String siteRef = SiteService.siteReference(ref.getContext()); // setup for editing the permissions of the site for this tool, using the roles of this site, too state.setAttribute(PermissionsHelper.TARGET_REF, siteRef); // ... with this description state.setAttribute(PermissionsHelper.DESCRIPTION, rb.getString("setpermis1") + SiteService.getSiteDisplay(ref.getContext())); // ... showing only locks that are prpefixed with this state.setAttribute(PermissionsHelper.PREFIX, "content."); // get into helper mode with this helper tool startHelper(data.getRequest(), "sakai.permissions.helper"); } // doPermissions /** * is notification enabled? */ protected boolean notificationEnabled(SessionState state) { return true; } // notificationEnabled /** * Processes the HTML document that is coming back from the browser * (from the formatted text editing widget). * @param state Used to pass in any user-visible alerts or errors when processing the text * @param strFromBrowser The string from the browser * @return The formatted text */ private String processHtmlDocumentFromBrowser(SessionState state, String strFromBrowser) { StringBuffer alertMsg = new StringBuffer(); String text = FormattedText.processHtmlDocument(strFromBrowser, alertMsg); if (alertMsg.length() > 0) addAlert(state, alertMsg.toString()); return text; } /** * * Whether a resource item can be replaced * @param p The ResourceProperties object for the resource item * @return true If it can be replaced; false otherwise */ private static boolean replaceable(ResourceProperties p) { boolean rv = true; if (p.getPropertyFormatted (ResourceProperties.PROP_IS_COLLECTION).equals (Boolean.TRUE.toString())) { rv = false; } else if (p.getProperty (ResourceProperties.PROP_CONTENT_TYPE).equals (ResourceProperties.TYPE_URL)) { rv = false; } String displayName = p.getPropertyFormatted (ResourceProperties.PROP_DISPLAY_NAME); if (displayName.indexOf(SHORTCUT_STRING) != -1) { rv = false; } return rv; } // replaceable /** * * put copyright info into context */ private static void copyrightChoicesIntoContext(SessionState state, Context context) { boolean usingCreativeCommons = state.getAttribute(STATE_USING_CREATIVE_COMMONS) != null && state.getAttribute(STATE_USING_CREATIVE_COMMONS).equals(Boolean.TRUE.toString()); if(usingCreativeCommons) { String ccOwnershipLabel = "Who created this resource?"; List ccOwnershipList = new Vector(); ccOwnershipList.add("-- Select --"); ccOwnershipList.add("I created this resource"); ccOwnershipList.add("Someone else created this resource"); String ccMyGrantLabel = "Terms of use"; List ccMyGrantOptions = new Vector(); ccMyGrantOptions.add("-- Select --"); ccMyGrantOptions.add("Use my copyright"); ccMyGrantOptions.add("Use Creative Commons License"); ccMyGrantOptions.add("Use Public Domain Dedication"); String ccCommercialLabel = "Allow commercial use?"; List ccCommercialList = new Vector(); ccCommercialList.add("Yes"); ccCommercialList.add("No"); String ccModificationLabel = "Allow Modifications?"; List ccModificationList = new Vector(); ccModificationList.add("Yes"); ccModificationList.add("Yes, share alike"); ccModificationList.add("No"); String ccOtherGrantLabel = "Terms of use"; List ccOtherGrantList = new Vector(); ccOtherGrantList.add("Subject to fair-use exception"); ccOtherGrantList.add("Public domain (created before copyright law applied)"); ccOtherGrantList.add("Public domain (copyright has expired)"); ccOtherGrantList.add("Public domain (government document not subject to copyright)"); String ccRightsYear = "Year"; String ccRightsOwner = "Copyright owner"; String ccAcknowledgeLabel = "Require users to acknowledge author's rights before access?"; List ccAcknowledgeList = new Vector(); ccAcknowledgeList.add("Yes"); ccAcknowledgeList.add("No"); String ccInfoUrl = ""; int year = TimeService.newTime().breakdownLocal().getYear(); String username = UserDirectoryService.getCurrentUser().getDisplayName(); context.put("usingCreativeCommons", Boolean.TRUE); context.put("ccOwnershipLabel", ccOwnershipLabel); context.put("ccOwnershipList", ccOwnershipList); context.put("ccMyGrantLabel", ccMyGrantLabel); context.put("ccMyGrantOptions", ccMyGrantOptions); context.put("ccCommercialLabel", ccCommercialLabel); context.put("ccCommercialList", ccCommercialList); context.put("ccModificationLabel", ccModificationLabel); context.put("ccModificationList", ccModificationList); context.put("ccOtherGrantLabel", ccOtherGrantLabel); context.put("ccOtherGrantList", ccOtherGrantList); context.put("ccRightsYear", ccRightsYear); context.put("ccRightsOwner", ccRightsOwner); context.put("ccAcknowledgeLabel", ccAcknowledgeLabel); context.put("ccAcknowledgeList", ccAcknowledgeList); context.put("ccInfoUrl", ccInfoUrl); context.put("ccThisYear", Integer.toString(year)); context.put("ccThisUser", username); } else { //copyright if (state.getAttribute(COPYRIGHT_FAIRUSE_URL) != null) { context.put("fairuseurl", state.getAttribute(COPYRIGHT_FAIRUSE_URL)); } if (state.getAttribute(NEW_COPYRIGHT_INPUT) != null) { context.put("newcopyrightinput", state.getAttribute(NEW_COPYRIGHT_INPUT)); } if (state.getAttribute(COPYRIGHT_TYPES) != null) { List copyrightTypes = (List) state.getAttribute(COPYRIGHT_TYPES); context.put("copyrightTypes", copyrightTypes); context.put("copyrightTypesSize", new Integer(copyrightTypes.size() - 1)); context.put("USE_THIS_COPYRIGHT", copyrightTypes.get(copyrightTypes.size() - 1)); } } Boolean preventPublicDisplay = (Boolean) state.getAttribute(STATE_PREVENT_PUBLIC_DISPLAY); if(preventPublicDisplay == null) { preventPublicDisplay = Boolean.FALSE; state.setAttribute(STATE_PREVENT_PUBLIC_DISPLAY, preventPublicDisplay); } context.put("preventPublicDisplay", preventPublicDisplay); } // copyrightChoicesIntoContext /** * Add variables and constants to the velocity context to render an editor * for inputing and modifying optional metadata properties about a resource. */ private static void metadataGroupsIntoContext(SessionState state, Context context) { context.put("STRING", ResourcesMetadata.WIDGET_STRING); context.put("TEXTAREA", ResourcesMetadata.WIDGET_TEXTAREA); context.put("BOOLEAN", ResourcesMetadata.WIDGET_BOOLEAN); context.put("INTEGER", ResourcesMetadata.WIDGET_INTEGER); context.put("DOUBLE", ResourcesMetadata.WIDGET_DOUBLE); context.put("DATE", ResourcesMetadata.WIDGET_DATE); context.put("TIME", ResourcesMetadata.WIDGET_TIME); context.put("DATETIME", ResourcesMetadata.WIDGET_DATETIME); context.put("ANYURI", ResourcesMetadata.WIDGET_ANYURI); context.put("WYSIWYG", ResourcesMetadata.WIDGET_WYSIWYG); context.put("today", TimeService.newTime()); List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups != null && !metadataGroups.isEmpty()) { context.put("metadataGroups", metadataGroups); } } // metadataGroupsIntoContext /** * initialize the metadata context */ private static void initMetadataContext(SessionState state) { // define MetadataSets map List metadataGroups = (List) state.getAttribute(STATE_METADATA_GROUPS); if(metadataGroups == null) { metadataGroups = new Vector(); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } // define DublinCore if( !metadataGroups.contains(new MetadataGroup(rb.getString("opt_props"))) ) { MetadataGroup dc = new MetadataGroup( rb.getString("opt_props") ); // dc.add(ResourcesMetadata.PROPERTY_DC_TITLE); // dc.add(ResourcesMetadata.PROPERTY_DC_DESCRIPTION); dc.add(ResourcesMetadata.PROPERTY_DC_ALTERNATIVE); dc.add(ResourcesMetadata.PROPERTY_DC_CREATOR); dc.add(ResourcesMetadata.PROPERTY_DC_PUBLISHER); dc.add(ResourcesMetadata.PROPERTY_DC_SUBJECT); dc.add(ResourcesMetadata.PROPERTY_DC_CREATED); dc.add(ResourcesMetadata.PROPERTY_DC_ISSUED); // dc.add(ResourcesMetadata.PROPERTY_DC_MODIFIED); // dc.add(ResourcesMetadata.PROPERTY_DC_TABLEOFCONTENTS); dc.add(ResourcesMetadata.PROPERTY_DC_ABSTRACT); dc.add(ResourcesMetadata.PROPERTY_DC_CONTRIBUTOR); // dc.add(ResourcesMetadata.PROPERTY_DC_TYPE); // dc.add(ResourcesMetadata.PROPERTY_DC_FORMAT); // dc.add(ResourcesMetadata.PROPERTY_DC_IDENTIFIER); // dc.add(ResourcesMetadata.PROPERTY_DC_SOURCE); // dc.add(ResourcesMetadata.PROPERTY_DC_LANGUAGE); // dc.add(ResourcesMetadata.PROPERTY_DC_COVERAGE); // dc.add(ResourcesMetadata.PROPERTY_DC_RIGHTS); dc.add(ResourcesMetadata.PROPERTY_DC_AUDIENCE); dc.add(ResourcesMetadata.PROPERTY_DC_EDULEVEL); metadataGroups.add(dc); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } /* // define DublinCore if(!metadataGroups.contains(new MetadataGroup("Test of Datatypes"))) { MetadataGroup dc = new MetadataGroup("Test of Datatypes"); dc.add(ResourcesMetadata.PROPERTY_DC_TITLE); dc.add(ResourcesMetadata.PROPERTY_DC_DESCRIPTION); dc.add(ResourcesMetadata.PROPERTY_DC_ANYURI); dc.add(ResourcesMetadata.PROPERTY_DC_DOUBLE); dc.add(ResourcesMetadata.PROPERTY_DC_DATETIME); dc.add(ResourcesMetadata.PROPERTY_DC_TIME); dc.add(ResourcesMetadata.PROPERTY_DC_DATE); dc.add(ResourcesMetadata.PROPERTY_DC_BOOLEAN); dc.add(ResourcesMetadata.PROPERTY_DC_INTEGER); metadataGroups.add(dc); state.setAttribute(STATE_METADATA_GROUPS, metadataGroups); } */ } /** * Internal class that encapsulates all information about a resource that is needed in the browse mode */ public static class BrowseItem { protected static Integer seqnum = new Integer(0); private String m_itemnum; // attributes of all resources protected String m_name; protected String m_id; protected String m_type; protected SortedSet m_allSiteGroups; protected SortedSet m_inheritedGroupRefs; protected SortedSet m_entityGroupRefs; protected SortedSet m_allowedRemoveGroupRefs; protected SortedSet m_allowedAddGroupRefs; protected Map m_allSiteGroupsMap; protected boolean m_canRead; protected boolean m_canRevise; protected boolean m_canDelete; protected boolean m_canCopy; protected boolean m_isCopied; protected boolean m_canAddItem; protected boolean m_canAddFolder; protected boolean m_canSelect; protected boolean m_inDropbox; protected List m_members; protected boolean m_isEmpty; protected boolean m_isHighlighted; protected boolean m_inheritsHighlight; protected String m_createdBy; protected String m_createdTime; protected String m_modifiedBy; protected String m_modifiedTime; protected String m_size; protected String m_target; protected String m_container; protected String m_root; protected int m_depth; protected boolean m_hasDeletableChildren; protected boolean m_hasCopyableChildren; protected boolean m_copyrightAlert; protected String m_url; protected boolean m_isLocal; protected boolean m_isAttached; private boolean m_isMoved; private boolean m_canUpdate; private boolean m_toobig; protected String m_access; protected String m_inheritedAccess; protected Collection m_groups; protected Collection m_oldInheritedGroups; protected Collection m_oldPossibleGroups; protected BasicRightsAssignment m_rights; protected boolean m_pubview; protected boolean m_pubview_inherited; protected boolean m_pubview_possible; /** * @param id * @param name * @param type */ public BrowseItem(String id, String name, String type) { m_name = name; m_id = id; m_type = type; Integer snum; synchronized(seqnum) { snum = seqnum; seqnum = new Integer((seqnum.intValue() + 1) % 10000); } m_itemnum = "Item00000000".substring(0,10 - snum.toString().length()) + snum.toString(); m_allowedRemoveGroupRefs = new TreeSet(); m_allowedAddGroupRefs = new TreeSet(); m_allSiteGroups = new TreeSet(new Comparator() { protected final String DELIM = "::"; public int compare(Object arg0, Object arg1) { Group group0 = (Group) arg0; Group group1 = (Group) arg1; String string0 = group0.getTitle() + DELIM + group0.getId(); String string1 = group1.getTitle() + DELIM + group1.getId(); return string0.compareTo(string1); } }); m_entityGroupRefs = new TreeSet(); m_inheritedGroupRefs = new TreeSet(); m_allSiteGroupsMap = new Hashtable(); // set defaults m_rights = new BasicRightsAssignment(m_itemnum, false); m_members = new LinkedList(); m_canRead = false; m_canRevise = false; m_canDelete = false; m_canCopy = false; m_isEmpty = true; m_toobig = false; m_isCopied = false; m_isMoved = false; m_isAttached = false; m_canSelect = true; // default is true. m_hasDeletableChildren = false; m_hasCopyableChildren = false; m_createdBy = ""; m_modifiedBy = ""; // m_createdTime = TimeService.newTime().toStringLocalDate(); // m_modifiedTime = TimeService.newTime().toStringLocalDate(); m_size = ""; m_depth = 0; m_copyrightAlert = false; m_url = ""; m_target = ""; m_root = ""; m_pubview = false; m_pubview_inherited = false; m_pubview_possible = true; m_isHighlighted = false; m_inheritsHighlight = false; m_canAddItem = false; m_canAddFolder = false; m_canUpdate = false; m_access = AccessMode.INHERITED.toString(); m_groups = new Vector(); } public String getItemNum() { return m_itemnum; } public boolean isInherited(Group group) { return this.m_inheritedGroupRefs.contains(group.getReference()); } public boolean isLocal(Group group) { return this.m_entityGroupRefs.contains(group.getReference()); } public boolean isPossible(Group group) { boolean rv = false; if(AccessMode.GROUPED.toString().equals(this.m_inheritedAccess)) { rv = this.m_inheritedGroupRefs.contains(group.getReference()); } else { rv = this.m_allSiteGroupsMap.containsKey(group.getReference()); } return rv; } public boolean allowedRemove(Group group) { return this.m_allowedRemoveGroupRefs.contains(group.getReference()); } public SortedSet getAllowedRemoveGroupRefs() { return m_allowedRemoveGroupRefs; } public void setAllowedRemoveGroupRefs(Collection allowedRemoveGroupRefs) { importGroupRefs(allowedRemoveGroupRefs, this.m_allowedRemoveGroupRefs); } public void addAllowedRemoveGroupRef(String allowedRemoveGroupRef) { addGroupRefToCollection(allowedRemoveGroupRef, m_allowedRemoveGroupRefs); } public boolean allowedAdd(Group group) { return this.m_allowedAddGroupRefs.contains(group.getReference()); } public SortedSet getAllowedAddGroupRefs() { return m_allowedAddGroupRefs; } public void setAllowedAddGroupRefs(Collection allowedAddGroupRefs) { importGroupRefs(allowedAddGroupRefs, this.m_allowedAddGroupRefs); } public void addAllowedAddGroupRef(String allowedAddGroupRef) { addGroupRefToCollection(allowedAddGroupRef, m_allowedAddGroupRefs); } public List getAllSiteGroups() { return new Vector(m_allSiteGroups); } public void setAllSiteGroups(Collection allSiteGroups) { this.m_allSiteGroups.clear(); this.m_allSiteGroupsMap.clear(); addAllSiteGroups(allSiteGroups); } public void addAllSiteGroups(Collection allSiteGroups) { Iterator it = allSiteGroups.iterator(); while(it.hasNext()) { Group group = (Group) it.next(); if(! m_allSiteGroupsMap.containsKey(group.getReference())) { this.m_allSiteGroups.add(group); m_allSiteGroupsMap.put(group.getReference(), group); m_allSiteGroupsMap.put(group.getId(), group); } } } public SortedSet getEntityGroupRefs() { return m_entityGroupRefs; } public void setEntityGroupRefs(Collection entityGroupRefs) { importGroupRefs(entityGroupRefs, this.m_entityGroupRefs); } public void addEntityGroupRef(String entityGroupRef) { addGroupRefToCollection(entityGroupRef, m_entityGroupRefs); } public SortedSet getInheritedGroupRefs() { return m_inheritedGroupRefs; } public void setInheritedGroupRefs(Collection inheritedGroupRefs) { importGroupRefs(inheritedGroupRefs, this.m_inheritedGroupRefs); } public void addInheritedGroupRef(String inheritedGroupRef) { addGroupRefToCollection(inheritedGroupRef, m_inheritedGroupRefs); } protected void importGroupRefs(Collection groupRefs, Collection collection) { collection.clear(); Iterator it = groupRefs.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group) { addGroupRefToCollection(((Group) obj).getReference(), collection); } else if(obj instanceof String) { addGroupRefToCollection((String) obj, collection); } } } protected void addGroupRefToCollection(String groupRef, Collection collection) { Group group = (Group) m_allSiteGroupsMap.get(groupRef); if(group != null) { if(! collection.contains(group.getReference())) { collection.add(group.getReference()); } } } public void setIsTooBig(boolean toobig) { m_toobig = toobig; } public boolean isTooBig() { return m_toobig; } /** * @param name */ public void setName(String name) { m_name = name; } /** * @param root */ public void setRoot(String root) { m_root = root; } /** * @return */ public String getRoot() { return m_root; } /** * @return */ public List getMembers() { List rv = new LinkedList(); if(m_members != null) { rv.addAll(m_members); } return rv; } /** * @param members */ public void addMembers(Collection members) { if(m_members == null) { m_members = new LinkedList(); } m_members.addAll(members); } /** * @return */ public boolean canAddItem() { return m_canAddItem; } /** * @return */ public boolean canDelete() { return m_canDelete; } /** * @return */ public boolean canRead() { return m_canRead; } public boolean canSelect() { return m_canSelect; } /** * @return */ public boolean canRevise() { return m_canRevise; } /** * @return */ public String getId() { return m_id; } /** * @return */ public String getName() { return m_name; } /** * @return */ public int getDepth() { return m_depth; } /** * @param depth */ public void setDepth(int depth) { m_depth = depth; } /** * @param canCreate */ public void setCanAddItem(boolean canAddItem) { m_canAddItem = canAddItem; } /** * @param canDelete */ public void setCanDelete(boolean canDelete) { m_canDelete = canDelete; } /** * @param canRead */ public void setCanRead(boolean canRead) { m_canRead = canRead; } public void setCanSelect(boolean canSelect) { m_canSelect = canSelect; } /** * @param canRevise */ public void setCanRevise(boolean canRevise) { m_canRevise = canRevise; } /** * @return */ public boolean isFolder() { return TYPE_FOLDER.equals(m_type); } /** * @return */ public String getType() { return m_type; } /** * @return */ public boolean canAddFolder() { return m_canAddFolder; } /** * @param b */ public void setCanAddFolder(boolean canAddFolder) { m_canAddFolder = canAddFolder; } /** * @return */ public boolean canCopy() { return m_canCopy; } /** * @param canCopy */ public void setCanCopy(boolean canCopy) { m_canCopy = canCopy; } /** * @return */ public boolean hasCopyrightAlert() { return m_copyrightAlert; } /** * @param copyrightAlert */ public void setCopyrightAlert(boolean copyrightAlert) { m_copyrightAlert = copyrightAlert; } /** * @return */ public String getUrl() { return m_url; } /** * @param url */ public void setUrl(String url) { m_url = url; } /** * @return */ public boolean isCopied() { return m_isCopied; } /** * @param isCopied */ public void setCopied(boolean isCopied) { m_isCopied = isCopied; } /** * @return */ public boolean isMoved() { return m_isMoved; } /** * @param isCopied */ public void setMoved(boolean isMoved) { m_isMoved = isMoved; } /** * @return */ public String getCreatedBy() { return m_createdBy; } /** * @return */ public String getCreatedTime() { return m_createdTime; } /** * @return */ public String getModifiedBy() { return m_modifiedBy; } /** * @return */ public String getModifiedTime() { return m_modifiedTime; } /** * @return */ public String getSize() { if(m_size == null) { m_size = ""; } return m_size; } /** * @param creator */ public void setCreatedBy(String creator) { m_createdBy = creator; } /** * @param time */ public void setCreatedTime(String time) { m_createdTime = time; } /** * @param modifier */ public void setModifiedBy(String modifier) { m_modifiedBy = modifier; } /** * @param time */ public void setModifiedTime(String time) { m_modifiedTime = time; } /** * @param size */ public void setSize(String size) { m_size = size; } /** * @return */ public String getTarget() { return m_target; } /** * @param target */ public void setTarget(String target) { m_target = target; } /** * @return */ public boolean isEmpty() { return m_isEmpty; } /** * @param isEmpty */ public void setIsEmpty(boolean isEmpty) { m_isEmpty = isEmpty; } /** * @return */ public String getContainer() { return m_container; } /** * @param container */ public void setContainer(String container) { m_container = container; } public void setIsLocal(boolean isLocal) { m_isLocal = isLocal; } public boolean isLocal() { return m_isLocal; } /** * @return Returns the isAttached. */ public boolean isAttached() { return m_isAttached; } /** * @param isAttached The isAttached to set. */ public void setAttached(boolean isAttached) { this.m_isAttached = isAttached; } /** * @return Returns the hasCopyableChildren. */ public boolean hasCopyableChildren() { return m_hasCopyableChildren; } /** * @param hasCopyableChildren The hasCopyableChildren to set. */ public void setCopyableChildren(boolean hasCopyableChildren) { this.m_hasCopyableChildren = hasCopyableChildren; } /** * @return Returns the hasDeletableChildren. */ public boolean hasDeletableChildren() { return m_hasDeletableChildren; } /** * @param hasDeletableChildren The hasDeletableChildren to set. */ public void seDeletableChildren(boolean hasDeletableChildren) { this.m_hasDeletableChildren = hasDeletableChildren; } /** * @return Returns the canUpdate. */ public boolean canUpdate() { return m_canUpdate; } /** * @param canUpdate The canUpdate to set. */ public void setCanUpdate(boolean canUpdate) { m_canUpdate = canUpdate; } public void setHighlighted(boolean isHighlighted) { m_isHighlighted = isHighlighted; } public boolean isHighlighted() { return m_isHighlighted; } public void setInheritsHighlight(boolean inheritsHighlight) { m_inheritsHighlight = inheritsHighlight; } public boolean inheritsHighlighted() { return m_inheritsHighlight; } /** * Access the access mode for this item. * @return The access mode. */ public String getAccess() { return m_access; } /** * Access the access mode for this item. * @return The access mode. */ public String getInheritedAccess() { return m_inheritedAccess; } public String getEntityAccess() { String rv = AccessMode.INHERITED.toString(); boolean sameGroups = true; if(AccessMode.GROUPED.toString().equals(m_access)) { Iterator it = getGroups().iterator(); while(sameGroups && it.hasNext()) { Group g = (Group) it.next(); sameGroups = inheritsGroup(g.getReference()); } it = getInheritedGroups().iterator(); while(sameGroups && it.hasNext()) { Group g = (Group) it.next(); sameGroups = hasGroup(g.getReference()); } if(!sameGroups) { rv = AccessMode.GROUPED.toString(); } } return rv; } public String getEffectiveAccess() { String rv = this.m_access; if(AccessMode.INHERITED.toString().equals(rv)) { rv = this.m_inheritedAccess; } if(AccessMode.INHERITED.toString().equals(rv)) { rv = AccessMode.SITE.toString(); } return rv; } public String getEffectiveGroups() { String rv = rb.getString("access.site1"); if(this.isPubviewInherited()) { rv = rb.getString("access.public1"); } else if(this.isPubview()) { rv = rb.getString("access.public1"); } else if(this.isInDropbox()) { rv = rb.getString("access.dropbox1"); } else if(AccessMode.GROUPED.toString().equals(getEffectiveAccess())) { rv = (String) rb.getFormattedMessage("access.group1", new Object[]{getGroupNames()}); } return rv; } public Collection getPossibleGroups() { return m_oldPossibleGroups; } public void setPossibleGroups(Collection groups) { m_oldPossibleGroups = groups; } public String getGroupNames() { String rv = ""; Collection groupRefs = this.m_entityGroupRefs; if(groupRefs == null || groupRefs.isEmpty()) { groupRefs = this.m_inheritedGroupRefs; } Iterator it = groupRefs.iterator(); while(it.hasNext()) { String groupRef = (String) it.next(); Group group = (Group) this.m_allSiteGroupsMap.get(groupRef); if(group != null) { if(rv.length() == 0) { rv += group.getTitle(); } else { rv += ", " + group.getTitle(); } } } // TODO: After updating getBrowserItems, get rid of this part if(rv.length() == 0) { Collection groups = getGroups(); if(groups == null || groups.isEmpty()) { groups = getInheritedGroups(); } Iterator grit = groups.iterator(); while(grit.hasNext()) { Group g = (Group) grit.next(); rv += g.getTitle(); if(grit.hasNext()) { rv += ", "; } } } return rv; } /** * Set the access mode for this item. * @param access */ public void setAccess(String access) { m_access = access; } /** * Set the access mode for this item. * @param access */ public void setInheritedAccess(String access) { m_inheritedAccess = access; } /** * Access a list of Group objects that can access this item. * @return Returns the groups. */ public List getGroups() { if(m_groups == null) { m_groups = new Vector(); } return new Vector(m_groups); } /** * Access a list of Group objects that can access this item. * @return Returns the groups. */ public List getInheritedGroups() { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } return new Vector(m_oldInheritedGroups); } /** * Determine whether a group has access to this item. * @param groupRef The internal reference string that uniquely identifies the group. * @return true if the group has access, false otherwise. */ public boolean hasGroup(String groupRef) { if(m_groups == null) { m_groups = new Vector(); } boolean found = false; Iterator it = m_groups.iterator(); while(it.hasNext() && !found) { Group gr = (Group) it.next(); found = gr.getReference().equals(groupRef); } return found; } /** * Determine whether a group has access to this item. * @param groupRef The internal reference string that uniquely identifies the group. * @return true if the group has access, false otherwise. */ public boolean inheritsGroup(String groupRef) { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } boolean found = false; Iterator it = m_oldInheritedGroups.iterator(); while(it.hasNext() && !found) { Group gr = (Group) it.next(); found = gr.getReference().equals(groupRef); } return found; } /** * Replace the current list of groups with this list of Group objects representing the groups that have access to this item. * @param groups The groups to set. */ public void setGroups(Collection groups) { if(groups == null) { return; } if(m_groups == null) { m_groups = new Vector(); } m_groups.clear(); Iterator it = groups.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group && ! hasGroup(((Group) obj).getReference())) { m_groups.add(obj); } else if(obj instanceof String && ! hasGroup((String) obj)) { addGroup((String) obj); } } } /** * Replace the current list of groups with this list of Group objects representing the groups that have access to this item. * @param groups The groups to set. */ public void setInheritedGroups(Collection groups) { if(groups == null) { return; } if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } m_oldInheritedGroups.clear(); Iterator it = groups.iterator(); while(it.hasNext()) { Object obj = it.next(); if(obj instanceof Group && ! inheritsGroup(((Group) obj).getReference())) { m_oldInheritedGroups.add(obj); } else if(obj instanceof String && ! hasGroup((String) obj)) { addInheritedGroup((String) obj); } } } /** * Add a string reference identifying a Group to the list of groups that have access to this item. * @param groupRef */ public void addGroup(String groupId) { if(m_groups == null) { m_groups = new Vector(); } if(m_container == null) { if(m_id == null) { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } else { m_container = ContentHostingService.getContainingCollectionId(m_id); } if(m_container == null || m_container.trim() == "") { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } } boolean found = false; Collection groups = ContentHostingService.getGroupsWithReadAccess(m_container); Iterator it = groups.iterator(); while( it.hasNext() && !found ) { Group group = (Group) it.next(); if(group.getId().equals(groupId)) { if(! hasGroup(group.getReference())) { m_groups.add(group); } found = true; } } } /** * Add a Group to the list of groups that have access to this item. * @param group The Group object to be added */ public void addGroup(Group group) { if(m_groups == null) { m_groups = new Vector(); } if(! hasGroup(group.getReference())) { m_groups.add(group); } } /** * Add a string reference identifying a Group to the list of groups that have access to this item. * @param groupRef */ public void addInheritedGroup(String groupId) { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } if(m_container == null) { if(m_id == null) { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } else { m_container = ContentHostingService.getContainingCollectionId(m_id); } if(m_container == null || m_container.trim() == "") { m_container = ContentHostingService.getSiteCollection(ToolManager.getCurrentPlacement().getContext()); } } boolean found = false; Collection groups = ContentHostingService.getGroupsWithReadAccess(m_container); Iterator it = groups.iterator(); while( it.hasNext() && !found ) { Group group = (Group) it.next(); String gid = group.getId(); String gref = group.getReference(); if(gid.equals(groupId) || gref.equals(groupId)) { if(! inheritsGroup(group.getReference())) { m_oldInheritedGroups.add(group); } found = true; } } } /** * Remove all groups from the item. */ public void clearGroups() { if(this.m_groups == null) { m_groups = new Vector(); } m_groups.clear(); } /** * Remove all inherited groups from the item. */ public void clearInheritedGroups() { if(m_oldInheritedGroups == null) { m_oldInheritedGroups = new Vector(); } m_oldInheritedGroups.clear(); } /** * @return Returns the pubview. */ public boolean isPubview() { return m_pubview; } /** * @param pubview The pubview to set. */ public void setPubview(boolean pubview) { m_pubview = pubview; } /** * @param pubview The pubview to set. */ public void setPubviewPossible(boolean possible) { m_pubview_possible = possible; } /** * @return Returns the pubviewset. */ public boolean isPubviewInherited() { return m_pubview_inherited; } /** * * */ public boolean isPubviewPossible() { return m_pubview_possible; } /** * @param pubviewset The pubviewset to set. */ public void setPubviewInherited(boolean pubviewset) { m_pubview_inherited = pubviewset; } /** * @return Returns the rights. */ public BasicRightsAssignment getRights() { return m_rights; } /** * @param rights The rights to set. */ public void setRights(BasicRightsAssignment rights) { this.m_rights = rights; } /** * @return Returns true if the item is in a dropbox (assuming it's been initialized correctly). */ public boolean isInDropbox() { return m_inDropbox; } /** * @param inDropbox The value for inDropbox to set. */ public void setInDropbox(boolean inDropbox) { this.m_inDropbox = inDropbox; } } // inner class BrowseItem /** * Inner class encapsulates information about resources (folders and items) for editing */ public static class EditItem extends BrowseItem { protected String m_copyrightStatus; protected String m_copyrightInfo; // protected boolean m_copyrightAlert; protected String m_filename; protected byte[] m_content; protected String m_encoding; protected String m_mimetype; protected String m_description; protected Map m_metadata; protected boolean m_hasQuota; protected boolean m_canSetQuota; protected String m_quota; protected boolean m_isUrl; protected boolean m_contentHasChanged; protected boolean m_contentTypeHasChanged; protected int m_notification = NotificationService.NOTI_NONE; protected String m_formtype; protected String m_rootname; protected Map m_structuredArtifact; protected List m_properties; protected Set m_metadataGroupsShowing; protected Set m_missingInformation; protected boolean m_hasBeenAdded; protected ResourcesMetadata m_form; protected boolean m_isBlank; protected String m_instruction; protected String m_ccRightsownership; protected String m_ccLicense; protected String m_ccCommercial; protected String m_ccModification; protected String m_ccRightsOwner; protected String m_ccRightsYear; /** * @param id * @param name * @param type */ public EditItem(String id, String name, String type) { super(id, name, type); m_filename = ""; m_contentHasChanged = false; m_contentTypeHasChanged = false; m_metadata = new Hashtable(); m_structuredArtifact = new Hashtable(); m_metadataGroupsShowing = new HashSet(); m_mimetype = type; m_content = null; m_encoding = "UTF-8"; m_notification = NotificationService.NOTI_NONE; m_hasQuota = false; m_canSetQuota = false; m_formtype = ""; m_rootname = ""; m_missingInformation = new HashSet(); m_hasBeenAdded = false; m_properties = new Vector(); m_isBlank = true; m_instruction = ""; m_ccRightsownership = ""; m_ccLicense = ""; // m_copyrightStatus = ServerConfigurationService.getString("default.copyright"); } public SortedSet convertToRefs(Collection groupIds) { SortedSet groupRefs = new TreeSet(); Iterator it = groupIds.iterator(); while(it.hasNext()) { String groupId = (String) it.next(); Group group = (Group) this.m_allSiteGroupsMap.get(groupId); if(group != null) { groupRefs.add(group.getReference()); } } return groupRefs; } public void setRightsowner(String ccRightsOwner) { m_ccRightsOwner = ccRightsOwner; } public String getRightsowner() { return m_ccRightsOwner; } public void setRightstyear(String ccRightsYear) { m_ccRightsYear = ccRightsYear; } public String getRightsyear() { return m_ccRightsYear; } public void setAllowModifications(String ccModification) { m_ccModification = ccModification; } public String getAllowModifications() { return m_ccModification; } public void setAllowCommercial(String ccCommercial) { m_ccCommercial = ccCommercial; } public String getAllowCommercial() { return m_ccCommercial; } /** * * @param license */ public void setLicense(String license) { m_ccLicense = license; } /** * * @return */ public String getLicense() { return m_ccLicense; } /** * Record a value for instructions to be displayed to the user in the editor (for Form Items). * @param instruction The value of the instructions. */ public void setInstruction(String instruction) { if(instruction == null) { instruction = ""; } m_instruction = instruction.trim(); } /** * Access instructions to be displayed to the user in the editor (for Form Items). * @return The instructions. */ public String getInstruction() { return m_instruction; } /** * Set the character encoding type that will be used when converting content body between strings and byte arrays. * Default is "UTF-8". * @param encoding A valid name for a character set encoding scheme (@see java.lang.Charset) */ public void setEncoding(String encoding) { m_encoding = encoding; } /** * Get the character encoding type that is used when converting content body between strings and byte arrays. * Default is "UTF-8". * @return The name of the character set encoding scheme (@see java.lang.Charset) */ public String getEncoding() { return m_encoding; } /** * Set marker indicating whether current item is a blank entry * @param isBlank */ public void markAsBlank(boolean isBlank) { m_isBlank = isBlank; } /** * Access marker indicating whether current item is a blank entry * @return true if current entry is blank, false otherwise */ public boolean isBlank() { return m_isBlank; } /** * Change the root ResourcesMetadata object that defines the form for a Structured Artifact. * @param form */ public void setForm(ResourcesMetadata form) { m_form = form; } /** * Access the root ResourcesMetadata object that defines the form for a Structured Artifact. * @return the form. */ public ResourcesMetadata getForm() { return m_form; } /** * @param properties */ public void setProperties(List properties) { m_properties = properties; } public List getProperties() { return m_properties; } /** * Replace current values of Structured Artifact with new values. * @param map The new values. */ public void setValues(Map map) { m_structuredArtifact = map; } /** * Access the entire set of values stored in the Structured Artifact * @return The set of values. */ public Map getValues() { return m_structuredArtifact; } /** * @param id * @param name * @param type */ public EditItem(String type) { this(null, "", type); } /** * @param id */ public void setId(String id) { m_id = id; } /** * Show the indicated metadata group for the item * @param group */ public void showMetadataGroup(String group) { m_metadataGroupsShowing.add(group); } /** * Hide the indicated metadata group for the item * @param group */ public void hideMetadataGroup(String group) { m_metadataGroupsShowing.remove(group); m_metadataGroupsShowing.remove(Validator.escapeUrl(group)); } /** * Query whether the indicated metadata group is showing for the item * @param group * @return true if the metadata group is showing, false otherwise */ public boolean isGroupShowing(String group) { return m_metadataGroupsShowing.contains(group) || m_metadataGroupsShowing.contains(Validator.escapeUrl(group)); } /** * @return */ public boolean isFileUpload() { return !isFolder() && !isUrl() && !isHtml() && !isPlaintext() && !isStructuredArtifact(); } /** * @param type */ public void setType(String type) { m_type = type; } /** * @param mimetype */ public void setMimeType(String mimetype) { m_mimetype = mimetype; } public String getRightsownership() { return m_ccRightsownership; } public void setRightsownership(String owner) { m_ccRightsownership = owner; } /** * @return */ public String getMimeType() { return m_mimetype; } public String getMimeCategory() { if(this.m_mimetype == null || this.m_mimetype.equals("")) { return ""; } int index = this.m_mimetype.indexOf("/"); if(index < 0) { return this.m_mimetype; } return this.m_mimetype.substring(0, index); } public String getMimeSubtype() { if(this.m_mimetype == null || this.m_mimetype.equals("")) { return ""; } int index = this.m_mimetype.indexOf("/"); if(index < 0 || index + 1 == this.m_mimetype.length()) { return ""; } return this.m_mimetype.substring(index + 1); } /** * @param formtype */ public void setFormtype(String formtype) { m_formtype = formtype; } /** * @return */ public String getFormtype() { return m_formtype; } /** * @return Returns the copyrightInfo. */ public String getCopyrightInfo() { return m_copyrightInfo; } /** * @param copyrightInfo The copyrightInfo to set. */ public void setCopyrightInfo(String copyrightInfo) { m_copyrightInfo = copyrightInfo; } /** * @return Returns the copyrightStatus. */ public String getCopyrightStatus() { return m_copyrightStatus; } /** * @param copyrightStatus The copyrightStatus to set. */ public void setCopyrightStatus(String copyrightStatus) { m_copyrightStatus = copyrightStatus; } /** * @return Returns the description. */ public String getDescription() { return m_description; } /** * @param description The description to set. */ public void setDescription(String description) { m_description = description; } /** * @return Returns the filename. */ public String getFilename() { return m_filename; } /** * @param filename The filename to set. */ public void setFilename(String filename) { m_filename = filename; } /** * @return Returns the metadata. */ public Map getMetadata() { return m_metadata; } /** * @param metadata The metadata to set. */ public void setMetadata(Map metadata) { m_metadata = metadata; } /** * @param name * @param value */ public void setMetadataItem(String name, Object value) { m_metadata.put(name, value); } public boolean isSitePossible() { return !m_pubview_inherited && !isGroupInherited() && !isSingleGroupInherited(); } public boolean isGroupPossible() { // Collection groups = getPossibleGroups(); // return ! groups.isEmpty(); return this.m_allowedAddGroupRefs != null && ! this.m_allowedAddGroupRefs.isEmpty(); } public boolean isGroupInherited() { return AccessMode.INHERITED.toString().equals(this.m_access) && AccessMode.GROUPED.toString().equals(m_inheritedAccess); } /** * Does this entity inherit grouped access mode with a single group that has access? * @return true if this entity inherits grouped access mode with a single group that has access, and false otherwise. */ public boolean isSingleGroupInherited() { //Collection groups = getInheritedGroups(); return // AccessMode.INHERITED.toString().equals(this.m_access) && AccessMode.GROUPED.toString().equals(this.m_inheritedAccess) && this.m_inheritedGroupRefs != null && this.m_inheritedGroupRefs.size() == 1; // && this.m_oldInheritedGroups != null // && this.m_oldInheritedGroups.size() == 1; } public String getSingleGroupTitle() { return (String) rb.getFormattedMessage("access.title4", new Object[]{getGroupNames()}); } /** * Is this entity's access restricted to the site (not pubview) and are there no groups defined for the site? * @return */ public boolean isSiteOnly() { boolean isSiteOnly = false; isSiteOnly = !isGroupPossible() && !isPubviewPossible(); return isSiteOnly; } /** * @return Returns the content. */ public byte[] getContent() { return m_content; } /** * @return Returns the content as a String. */ public String getContentstring() { String rv = ""; if(m_content != null && m_content.length > 0) { try { rv = new String( m_content, m_encoding ); } catch(UnsupportedEncodingException e) { rv = new String( m_content ); } } return rv; } /** * @param content The content to set. */ public void setContent(byte[] content) { m_content = content; } /** * @param content The content to set. */ public void setContent(String content) { try { m_content = content.getBytes(m_encoding); } catch(UnsupportedEncodingException e) { m_content = content.getBytes(); } } /** * @return Returns the canSetQuota. */ public boolean canSetQuota() { return m_canSetQuota; } /** * @param canSetQuota The canSetQuota to set. */ public void setCanSetQuota(boolean canSetQuota) { m_canSetQuota = canSetQuota; } /** * @return Returns the hasQuota. */ public boolean hasQuota() { return m_hasQuota; } /** * @param hasQuota The hasQuota to set. */ public void setHasQuota(boolean hasQuota) { m_hasQuota = hasQuota; } /** * @return Returns the quota. */ public String getQuota() { return m_quota; } /** * @param quota The quota to set. */ public void setQuota(String quota) { m_quota = quota; } /** * @return true if content-type of item indicates it represents a URL, false otherwise */ public boolean isUrl() { return TYPE_URL.equals(m_type) || ResourceProperties.TYPE_URL.equals(m_mimetype); } /** * @return true if content-type of item indicates it represents a URL, false otherwise */ public boolean isStructuredArtifact() { return TYPE_FORM.equals(m_type); } /** * @return true if content-type of item is "text/text" (plain text), false otherwise */ public boolean isPlaintext() { return MIME_TYPE_DOCUMENT_PLAINTEXT.equals(m_mimetype) || MIME_TYPE_DOCUMENT_PLAINTEXT.equals(m_type); } /** * @return true if content-type of item is "text/html" (an html document), false otherwise */ public boolean isHtml() { return MIME_TYPE_DOCUMENT_HTML.equals(m_mimetype) || MIME_TYPE_DOCUMENT_HTML.equals(m_type); } public boolean contentHasChanged() { return m_contentHasChanged; } public void setContentHasChanged(boolean changed) { m_contentHasChanged = changed; } public boolean contentTypeHasChanged() { return m_contentTypeHasChanged; } public void setContentTypeHasChanged(boolean changed) { m_contentTypeHasChanged = changed; } public void setNotification(int notification) { m_notification = notification; } public int getNotification() { return m_notification; } /** * @return Returns the artifact. */ public Map getStructuredArtifact() { return m_structuredArtifact; } /** * @param artifact The artifact to set. */ public void setStructuredArtifact(Map artifact) { this.m_structuredArtifact = artifact; } /** * @param name * @param value */ public void setValue(String name, Object value) { setValue(name, 0, value); } /** * @param name * @param index * @param value */ public void setValue(String name, int index, Object value) { List list = getList(name); try { list.set(index, value); } catch(ArrayIndexOutOfBoundsException e) { list.add(value); } m_structuredArtifact.put(name, list); } /** * Access a value of a structured artifact field of type String. * @param name The name of the field to access. * @return the value, or null if the named field is null or not a String. */ public String getString(String name) { if(m_structuredArtifact == null) { m_structuredArtifact = new Hashtable(); } Object value = m_structuredArtifact.get(name); String rv = ""; if(value == null) { // do nothing } else if(value instanceof String) { rv = (String) value; } else { rv = value.toString(); } return rv; } public Object getValue(String name, int index) { List list = getList(name); Object rv = null; try { rv = list.get(index); } catch(ArrayIndexOutOfBoundsException e) { // return null } return rv; } public Object getPropertyValue(String name) { return getPropertyValue(name, 0); } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public Object getPropertyValue(String name, int index) { String[] names = name.split(ResourcesMetadata.DOT); Object rv = null; if(m_properties == null) { m_properties = new Vector(); } Iterator it = m_properties.iterator(); while(rv == null && it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); if(name.equals(prop.getDottedname())) { rv = prop.getValue(index); } } return rv; } public void setPropertyValue(String name, Object value) { setPropertyValue(name, 0, value); } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public void setPropertyValue(String name, int index, Object value) { if(m_properties == null) { m_properties = new Vector(); } boolean found = false; Iterator it = m_properties.iterator(); while(!found && it.hasNext()) { ResourcesMetadata prop = (ResourcesMetadata) it.next(); if(name.equals(prop.getDottedname())) { found = true; prop.setValue(index, value); } } } /** * Access a particular value in a Structured Artifact, as identified by the parameter "name". This * implementation of the method assumes that the name is a series of String identifiers delimited * by the ResourcesAction.ResourcesMetadata.DOT String. * @param name The delimited identifier for the item. * @return The value identified by the name, or null if the name does not identify a valid item. */ public Object getValue(String name) { String[] names = name.split(ResourcesMetadata.DOT); Object rv = m_structuredArtifact; if(rv != null && (rv instanceof Map) && ((Map) rv).isEmpty()) { rv = null; } for(int i = 1; rv != null && i < names.length; i++) { if(rv instanceof Map) { rv = ((Map) rv).get(names[i]); } else { rv = null; } } return rv; } /** * Access a list of values associated with a named property of a structured artifact. * @param name The name of the property. * @return The list of values associated with that name, or an empty list if the property is not defined. */ public List getList(String name) { if(m_structuredArtifact == null) { m_structuredArtifact = new Hashtable(); } Object value = m_structuredArtifact.get(name); List rv = new Vector(); if(value == null) { m_structuredArtifact.put(name, rv); } else if(value instanceof Collection) { rv.addAll((Collection)value); } else { rv.add(value); } return rv; } /** * @return */ /* public Element exportStructuredArtifact(List properties) { return null; } */ /** * @return Returns the name of the root of a structured artifact definition. */ public String getRootname() { return m_rootname; } /** * @param rootname The name to be assigned for the root of a structured artifact. */ public void setRootname(String rootname) { m_rootname = rootname; } /** * Add a property name to the list of properties missing from the input. * @param propname The name of the property. */ public void setMissing(String propname) { m_missingInformation.add(propname); } /** * Query whether a particular property is missing * @param propname The name of the property * @return The value "true" if the property is missing, "false" otherwise. */ public boolean isMissing(String propname) { return m_missingInformation.contains(propname) || m_missingInformation.contains(Validator.escapeUrl(propname)); } /** * Empty the list of missing properties. */ public void clearMissing() { m_missingInformation.clear(); } public void setAdded(boolean added) { m_hasBeenAdded = added; } public boolean hasBeenAdded() { return m_hasBeenAdded; } } // inner class EditItem /** * Inner class encapsulates information about folders (and final item?) in a collection path (a.k.a. breadcrumb) */ public static class PathItem { protected String m_url; protected String m_name; protected String m_id; protected boolean m_canRead; protected boolean m_isFolder; protected boolean m_isLast; protected String m_root; protected boolean m_isLocal; public PathItem(String id, String name) { m_id = id; m_name = name; m_canRead = false; m_isFolder = false; m_isLast = false; m_url = ""; m_isLocal = true; } /** * @return */ public boolean canRead() { return m_canRead; } /** * @return */ public String getId() { return m_id; } /** * @return */ public boolean isFolder() { return m_isFolder; } /** * @return */ public boolean isLast() { return m_isLast; } /** * @return */ public String getName() { return m_name; } /** * @param canRead */ public void setCanRead(boolean canRead) { m_canRead = canRead; } /** * @param id */ public void setId(String id) { m_id = id; } /** * @param isFolder */ public void setIsFolder(boolean isFolder) { m_isFolder = isFolder; } /** * @param isLast */ public void setLast(boolean isLast) { m_isLast = isLast; } /** * @param name */ public void setName(String name) { m_name = name; } /** * @return */ public String getUrl() { return m_url; } /** * @param url */ public void setUrl(String url) { m_url = url; } /** * @param root */ public void setRoot(String root) { m_root = root; } /** * @return */ public String getRoot() { return m_root; } public void setIsLocal(boolean isLocal) { m_isLocal = isLocal; } public boolean isLocal() { return m_isLocal; } } // inner class PathItem /** * * inner class encapsulates information about groups of metadata tags (such as DC, LOM, etc.) * */ public static class MetadataGroup extends Vector { /** * */ private static final long serialVersionUID = -821054142728929236L; protected String m_name; protected boolean m_isShowing; /** * @param name */ public MetadataGroup(String name) { super(); m_name = name; m_isShowing = false; } /** * @return */ public boolean isShowing() { return m_isShowing; } /** * @param isShowing */ public void setShowing(boolean isShowing) { m_isShowing = isShowing; } /** * @return */ public String getName() { return m_name; } /** * @param name */ public void setName(String name) { m_name = name; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) * needed to determine List.contains() */ public boolean equals(Object obj) { MetadataGroup mg = (MetadataGroup) obj; boolean rv = (obj != null) && (m_name.equals(mg)); return rv; } } public static class AttachItem { protected String m_id; protected String m_displayName; protected String m_accessUrl; protected String m_collectionId; protected String m_contentType; /** * @param id * @param displayName * @param collectionId * @param accessUrl */ public AttachItem(String id, String displayName, String collectionId, String accessUrl) { m_id = id; m_displayName = displayName; m_collectionId = collectionId; m_accessUrl = accessUrl; } /** * @return Returns the accessUrl. */ public String getAccessUrl() { return m_accessUrl; } /** * @param accessUrl The accessUrl to set. */ public void setAccessUrl(String accessUrl) { m_accessUrl = accessUrl; } /** * @return Returns the collectionId. */ public String getCollectionId() { return m_collectionId; } /** * @param collectionId The collectionId to set. */ public void setCollectionId(String collectionId) { m_collectionId = collectionId; } /** * @return Returns the id. */ public String getId() { return m_id; } /** * @param id The id to set. */ public void setId(String id) { m_id = id; } /** * @return Returns the name. */ public String getDisplayName() { String displayName = m_displayName; if(displayName == null || displayName.trim().equals("")) { displayName = isolateName(m_id); } return displayName; } /** * @param name The name to set. */ public void setDisplayName(String name) { m_displayName = name; } /** * @return Returns the contentType. */ public String getContentType() { return m_contentType; } /** * @param contentType */ public void setContentType(String contentType) { this.m_contentType = contentType; } } // Inner class AttachItem public static class ElementCarrier { protected Element element; protected String parent; public ElementCarrier(Element element, String parent) { this.element = element; this.parent = parent; } public Element getElement() { return element; } public void setElement(Element element) { this.element = element; } public String getParent() { return parent; } public void setParent(String parent) { this.parent = parent; } } public static class SaveArtifactAttempt { protected EditItem item; protected List errors; protected SchemaNode schema; public SaveArtifactAttempt(EditItem item, SchemaNode schema) { this.item = item; this.schema = schema; } /** * @return Returns the errors. */ public List getErrors() { return errors; } /** * @param errors The errors to set. */ public void setErrors(List errors) { this.errors = errors; } /** * @return Returns the item. */ public EditItem getItem() { return item; } /** * @param item The item to set. */ public void setItem(EditItem item) { this.item = item; } /** * @return Returns the schema. */ public SchemaNode getSchema() { return schema; } /** * @param schema The schema to set. */ public void setSchema(SchemaNode schema) { this.schema = schema; } } /** * Develop a list of all the site collections that there are to page. * Sort them as appropriate, and apply search criteria. */ protected static List readAllResources(SessionState state) { List other_sites = new Vector(); String collectionId = (String) state.getAttribute (STATE_ATTACH_COLLECTION_ID); if(collectionId == null) { collectionId = (String) state.getAttribute (STATE_COLLECTION_ID); } HashMap expandedCollections = (HashMap) state.getAttribute(STATE_EXPANDED_COLLECTIONS); // set the sort values String sortedBy = (String) state.getAttribute (STATE_SORT_BY); String sortedAsc = (String) state.getAttribute (STATE_SORT_ASC); Boolean showRemove = (Boolean) state.getAttribute(STATE_SHOW_REMOVE_ACTION); boolean showRemoveAction = showRemove != null && showRemove.booleanValue(); Boolean showMove = (Boolean) state.getAttribute(STATE_SHOW_MOVE_ACTION); boolean showMoveAction = showMove != null && showMove.booleanValue(); Boolean showCopy = (Boolean) state.getAttribute(STATE_SHOW_COPY_ACTION); boolean showCopyAction = showCopy != null && showCopy.booleanValue(); Set highlightedItems = (Set) state.getAttribute(STATE_HIGHLIGHTED_ITEMS); // add user's personal workspace User user = UserDirectoryService.getCurrentUser(); String userId = user.getId(); String userName = user.getDisplayName(); String wsId = SiteService.getUserSiteId(userId); String wsCollectionId = ContentHostingService.getSiteCollection(wsId); if(! collectionId.equals(wsCollectionId)) { List members = getBrowseItems(wsCollectionId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); showRemoveAction = showRemoveAction || root.hasDeletableChildren(); showMoveAction = showMoveAction || root.hasDeletableChildren(); showCopyAction = showCopyAction || root.hasCopyableChildren(); root.addMembers(members); root.setName(userName + " " + rb.getString("gen.wsreso")); other_sites.add(root); } } // add all other sites user has access to /* * NOTE: This does not (and should not) get all sites for admin. * Getting all sites for admin is too big a request and * would result in too big a display to render in html. */ Map othersites = ContentHostingService.getCollectionMap(); Iterator siteIt = othersites.keySet().iterator(); SortedSet sort = new TreeSet(); while(siteIt.hasNext()) { String collId = (String) siteIt.next(); String displayName = (String) othersites.get(collId); sort.add(displayName + DELIM + collId); } Iterator sortIt = sort.iterator(); while(sortIt.hasNext()) { String item = (String) sortIt.next(); String displayName = item.substring(0, item.lastIndexOf(DELIM)); String collId = item.substring(item.lastIndexOf(DELIM) + 1); if(! collectionId.equals(collId) && ! wsCollectionId.equals(collId)) { List members = getBrowseItems(collId, expandedCollections, highlightedItems, sortedBy, sortedAsc, (BrowseItem) null, false, state); if(members != null && members.size() > 0) { BrowseItem root = (BrowseItem) members.remove(0); root.addMembers(members); root.setName(displayName); other_sites.add(root); } } } return other_sites; } /** * Prepare the current page of site collections to display. * @return List of BrowseItem objects to display on this page. */ protected static List prepPage(SessionState state) { List rv = new Vector(); // access the page size int pageSize = ((Integer) state.getAttribute(STATE_PAGESIZE)).intValue(); // cleanup prior prep state.removeAttribute(STATE_NUM_MESSAGES); // are we going next or prev, first or last page? boolean goNextPage = state.getAttribute(STATE_GO_NEXT_PAGE) != null; boolean goPrevPage = state.getAttribute(STATE_GO_PREV_PAGE) != null; boolean goFirstPage = state.getAttribute(STATE_GO_FIRST_PAGE) != null; boolean goLastPage = state.getAttribute(STATE_GO_LAST_PAGE) != null; state.removeAttribute(STATE_GO_NEXT_PAGE); state.removeAttribute(STATE_GO_PREV_PAGE); state.removeAttribute(STATE_GO_FIRST_PAGE); state.removeAttribute(STATE_GO_LAST_PAGE); // are we going next or prev message? boolean goNext = state.getAttribute(STATE_GO_NEXT) != null; boolean goPrev = state.getAttribute(STATE_GO_PREV) != null; state.removeAttribute(STATE_GO_NEXT); state.removeAttribute(STATE_GO_PREV); // read all channel messages List allMessages = readAllResources(state); if (allMessages == null) { return rv; } String messageIdAtTheTopOfThePage = null; Object topMsgId = state.getAttribute(STATE_TOP_PAGE_MESSAGE); if(topMsgId == null) { // do nothing } else if(topMsgId instanceof Integer) { messageIdAtTheTopOfThePage = ((Integer) topMsgId).toString(); } else if(topMsgId instanceof String) { messageIdAtTheTopOfThePage = (String) topMsgId; } // if we have no prev page and do have a top message, then we will stay "pinned" to the top boolean pinToTop = ( (messageIdAtTheTopOfThePage != null) && (state.getAttribute(STATE_PREV_PAGE_EXISTS) == null) && !goNextPage && !goPrevPage && !goNext && !goPrev && !goFirstPage && !goLastPage); // if we have no next page and do have a top message, then we will stay "pinned" to the bottom boolean pinToBottom = ( (messageIdAtTheTopOfThePage != null) && (state.getAttribute(STATE_NEXT_PAGE_EXISTS) == null) && !goNextPage && !goPrevPage && !goNext && !goPrev && !goFirstPage && !goLastPage); // how many messages, total int numMessages = allMessages.size(); if (numMessages == 0) { return rv; } // save the number of messges state.setAttribute(STATE_NUM_MESSAGES, new Integer(numMessages)); // find the position of the message that is the top first on the page int posStart = 0; if (messageIdAtTheTopOfThePage != null) { // find the next page posStart = findResourceInList(allMessages, messageIdAtTheTopOfThePage); // if missing, start at the top if (posStart == -1) { posStart = 0; } } // if going to the next page, adjust if (goNextPage) { posStart += pageSize; } // if going to the prev page, adjust else if (goPrevPage) { posStart -= pageSize; if (posStart < 0) posStart = 0; } // if going to the first page, adjust else if (goFirstPage) { posStart = 0; } // if going to the last page, adjust else if (goLastPage) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // pinning if (pinToTop) { posStart = 0; } else if (pinToBottom) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // get the last page fully displayed if (posStart + pageSize > numMessages) { posStart = numMessages - pageSize; if (posStart < 0) posStart = 0; } // compute the end to a page size, adjusted for the number of messages available int posEnd = posStart + (pageSize-1); if (posEnd >= numMessages) posEnd = numMessages-1; int numMessagesOnThisPage = (posEnd - posStart) + 1; // select the messages on this page for (int i = posStart; i <= posEnd; i++) { rv.add(allMessages.get(i)); } // save which message is at the top of the page BrowseItem itemAtTheTopOfThePage = (BrowseItem) allMessages.get(posStart); state.setAttribute(STATE_TOP_PAGE_MESSAGE, itemAtTheTopOfThePage.getId()); state.setAttribute(STATE_TOP_MESSAGE_INDEX, new Integer(posStart)); // which message starts the next page (if any) int next = posStart + pageSize; if (next < numMessages) { state.setAttribute(STATE_NEXT_PAGE_EXISTS, ""); } else { state.removeAttribute(STATE_NEXT_PAGE_EXISTS); } // which message ends the prior page (if any) int prev = posStart - 1; if (prev >= 0) { state.setAttribute(STATE_PREV_PAGE_EXISTS, ""); } else { state.removeAttribute(STATE_PREV_PAGE_EXISTS); } if (state.getAttribute(STATE_VIEW_ID) != null) { int viewPos = findResourceInList(allMessages, (String) state.getAttribute(STATE_VIEW_ID)); // are we moving to the next message if (goNext) { // advance viewPos++; if (viewPos >= numMessages) viewPos = numMessages-1; } // are we moving to the prev message if (goPrev) { // retreat viewPos--; if (viewPos < 0) viewPos = 0; } // update the view message state.setAttribute(STATE_VIEW_ID, ((BrowseItem) allMessages.get(viewPos)).getId()); // if the view message is no longer on the current page, adjust the page // Note: next time through this will get processed if (viewPos < posStart) { state.setAttribute(STATE_GO_PREV_PAGE, ""); } else if (viewPos > posEnd) { state.setAttribute(STATE_GO_NEXT_PAGE, ""); } if (viewPos > 0) { state.setAttribute(STATE_PREV_EXISTS,""); } else { state.removeAttribute(STATE_PREV_EXISTS); } if (viewPos < numMessages-1) { state.setAttribute(STATE_NEXT_EXISTS,""); } else { state.removeAttribute(STATE_NEXT_EXISTS); } } return rv; } // prepPage /** * Find the resource with this id in the list. * @param messages The list of messages. * @param id The message id. * @return The index position in the list of the message with this id, or -1 if not found. */ protected static int findResourceInList(List resources, String id) { for (int i = 0; i < resources.size(); i++) { // if this is the one, return this index if (((BrowseItem) (resources.get(i))).getId().equals(id)) return i; } // not found return -1; } // findResourceInList protected static User getUserProperty(ResourceProperties props, String name) { String id = props.getProperty(name); if (id != null) { try { return UserDirectoryService.getUser(id); } catch (UserNotDefinedException e) { } } return null; } /** * Find the resource name of a given resource id or filepath. * * @param id * The resource id. * @return the resource name. */ protected static String isolateName(String id) { if (id == null) return null; if (id.length() == 0) return null; // take after the last resource path separator, not counting one at the very end if there boolean lastIsSeparator = id.charAt(id.length() - 1) == '/'; return id.substring(id.lastIndexOf('/', id.length() - 2) + 1, (lastIsSeparator ? id.length() - 1 : id.length())); } // isolateName } // ResourcesAction
SAK-5459 - fix dropbox top level folder revise NPE git-svn-id: 5b1b1b0f989bbd918ddd0a77f47569c690fd24bc@11659 66ffb92e-73f9-0310-93c1-f5514f145a0a
content/content-tool/tool/src/java/org/sakaiproject/content/tool/ResourcesAction.java
SAK-5459 - fix dropbox top level folder revise NPE
<ide><path>ontent/content-tool/tool/src/java/org/sakaiproject/content/tool/ResourcesAction.java <ide> <ide> String previousCollectionId = ""; <ide> Vector pathitems = new Vector(); <del> while(currentCollectionId != null && ! currentCollectionId.equals(navRoot) && ! currentCollectionId.equals(previousCollectionId)) <add> while ((currentCollectionId != null) && (!currentCollectionId.equals(navRoot)) && (!currentCollectionId.equals(previousCollectionId)) && (!contentService.isRootCollection(previousCollectionId))) <ide> { <ide> pathitems.add(currentCollectionId); <ide> previousCollectionId = currentCollectionId;
Java
apache-2.0
bbab04ef3a1893f9692d135102caaa65b57b340a
0
apache/incubator-provisionr,axemblr/axemblr-provisionr,axemblr/axemblr-provisionr,apache/incubator-provisionr,apache/incubator-provisionr
/* * Copyright (c) 2012 S.C. Axemblr Software Solutions S.R.L * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axemblr.provisionr.core.activiti; import com.google.common.base.Stopwatch; import java.util.concurrent.TimeUnit; import org.activiti.engine.ProcessEngine; import org.activiti.engine.impl.cfg.StandaloneInMemProcessEngineConfiguration; import org.activiti.engine.impl.jobexecutor.DefaultJobExecutor; import static org.fest.assertions.api.Assertions.assertThat; import org.junit.Test; public class ConfigurableFailedJobCommandFactoryTest { @Test public void testConfigurableNumberOfRetries() throws InterruptedException { DefaultJobExecutor jobExecutor = new DefaultJobExecutor(); jobExecutor.setCorePoolSize(2); jobExecutor.setQueueSize(2); jobExecutor.setMaxJobsPerAcquisition(5); jobExecutor.setWaitTimeInMillis(50); jobExecutor.setLockTimeInMillis(180000); ProcessEngine processEngine = new StandaloneInMemProcessEngineConfiguration() .setJobExecutorActivate(true) .setJobExecutor(jobExecutor) .setFailedJobCommandFactory(new ConfigurableFailedJobCommandFactory(2, 1)) .buildProcessEngine(); processEngine.getRepositoryService().createDeployment() .addClasspathResource("diagrams/alwaysFail.bpmn20.xml").deploy(); Stopwatch stopwatch = new Stopwatch().start(); processEngine.getRuntimeService().startProcessInstanceByKey("alwaysFail"); while (AlwaysFailTask.COUNTER.get() != 3 /* = 1 normal execution + 2 retries */) { TimeUnit.MILLISECONDS.sleep(100); } stopwatch.stop(); assertThat(stopwatch.elapsedTime(TimeUnit.SECONDS)).isGreaterThanOrEqualTo(2); processEngine.close(); } }
core/src/test/java/com/axemblr/provisionr/core/activiti/ConfigurableFailedJobCommandFactoryTest.java
/* * Copyright (c) 2012 S.C. Axemblr Software Solutions S.R.L * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.axemblr.provisionr.core.activiti; import com.google.common.base.Stopwatch; import java.util.concurrent.TimeUnit; import org.activiti.engine.ProcessEngine; import org.activiti.engine.impl.cfg.StandaloneInMemProcessEngineConfiguration; import org.activiti.engine.impl.jobexecutor.DefaultJobExecutor; import static org.fest.assertions.api.Assertions.assertThat; import org.junit.Test; public class ConfigurableFailedJobCommandFactoryTest { @Test public void testConfigurableNumberOfRetries() throws InterruptedException { DefaultJobExecutor jobExecutor = new DefaultJobExecutor(); jobExecutor.setCorePoolSize(2); jobExecutor.setQueueSize(2); jobExecutor.setMaxJobsPerAcquisition(5); jobExecutor.setWaitTimeInMillis(50); jobExecutor.setLockTimeInMillis(180000); ProcessEngine processEngine = new StandaloneInMemProcessEngineConfiguration() .setJobExecutorActivate(true) .setJobExecutor(jobExecutor) .setFailedJobCommandFactory(new ConfigurableFailedJobCommandFactory(2, 1)) .buildProcessEngine(); processEngine.getRepositoryService().createDeployment() .addClasspathResource("diagrams/alwaysFail.bpmn20.xml").deploy(); Stopwatch stopwatch = new Stopwatch().start(); processEngine.getRuntimeService().startProcessInstanceByKey("alwaysFail"); while (AlwaysFailTask.COUNTER.get() != 3 /* = 1 normal execution + 2 retries */) { TimeUnit.SECONDS.sleep(1); } stopwatch.stop(); assertThat(stopwatch.elapsedTime(TimeUnit.SECONDS)).isGreaterThanOrEqualTo(2); processEngine.close(); } }
Updated counter check timeout to make test more relevant
core/src/test/java/com/axemblr/provisionr/core/activiti/ConfigurableFailedJobCommandFactoryTest.java
Updated counter check timeout to make test more relevant
<ide><path>ore/src/test/java/com/axemblr/provisionr/core/activiti/ConfigurableFailedJobCommandFactoryTest.java <ide> processEngine.getRuntimeService().startProcessInstanceByKey("alwaysFail"); <ide> <ide> while (AlwaysFailTask.COUNTER.get() != 3 /* = 1 normal execution + 2 retries */) { <del> TimeUnit.SECONDS.sleep(1); <add> TimeUnit.MILLISECONDS.sleep(100); <ide> } <ide> <ide> stopwatch.stop();
Java
mit
error: pathspec 'module/app/com/blopker/wamplay/models/messages/MessageType.java' did not match any file(s) known to git
848c9be6c82162d6c50d54b818dd3ae6ceb27db9
1
blopker/WAMPlay,blopker/WAMPlay
package com.blopker.wamplay.models.messages; import com.blopker.wamplay.controllers.messageHandlers.*; public enum MessageType { WELCOME(0, null), // PREFIX(1), CALL(2, new RPCHandler()), CALLRESULT(3, null), CALLERROR(4, null), SUBSCRIBE(5, new SubscribeHandler()), UNSUBSCRIBE(6, new UnsubscribeHandler()), PUBLISH(7, new PublishHandler()), EVENT(8, null); private final int typeCode; private final MessageHandler handler; private MessageType(int typeCode, MessageHandler handler) { this.typeCode = typeCode; this.handler = handler; } public int getTypeCode() { return typeCode; } public MessageHandler getHandler(){ return this.handler; } public static MessageType getType(int type) throws EnumConstantNotPresentException{ for (MessageType messageType : MessageType.values()) { if (messageType.getTypeCode() == type) { return messageType; } } throw new EnumConstantNotPresentException(MessageType.class, Integer.toString(type)); } }
module/app/com/blopker/wamplay/models/messages/MessageType.java
fixed enum name
module/app/com/blopker/wamplay/models/messages/MessageType.java
fixed enum name
<ide><path>odule/app/com/blopker/wamplay/models/messages/MessageType.java <add>package com.blopker.wamplay.models.messages; <add> <add>import com.blopker.wamplay.controllers.messageHandlers.*; <add> <add>public enum MessageType { <add> WELCOME(0, null), <add>// PREFIX(1), <add> CALL(2, new RPCHandler()), <add> CALLRESULT(3, null), <add> CALLERROR(4, null), <add> SUBSCRIBE(5, new SubscribeHandler()), <add> UNSUBSCRIBE(6, new UnsubscribeHandler()), <add> PUBLISH(7, new PublishHandler()), <add> EVENT(8, null); <add> <add> private final int typeCode; <add> private final MessageHandler handler; <add> <add> private MessageType(int typeCode, MessageHandler handler) { <add> this.typeCode = typeCode; <add> this.handler = handler; <add> } <add> <add> public int getTypeCode() { <add> return typeCode; <add> } <add> <add> public MessageHandler getHandler(){ <add> return this.handler; <add> } <add> <add> public static MessageType getType(int type) throws EnumConstantNotPresentException{ <add> for (MessageType messageType : MessageType.values()) { <add> if (messageType.getTypeCode() == type) { <add> return messageType; <add> } <add> } <add> throw new EnumConstantNotPresentException(MessageType.class, Integer.toString(type)); <add> } <add>}
JavaScript
apache-2.0
ee232d114224190c2aa62ba73711dc2b0efa6ee6
0
mwasiluk/angular-surveys,mwasiluk/angular-surveys
angular.module('mwFormViewer').directive('mwFormViewer', function ($rootScope) { return { replace: true, restrict: 'AE', scope: { formData: '=', responseData: '=', templateData: '=?', readOnly: '=?', options: '=?', formStatus: '=?', //wrapper for internal angular form object onSubmit: '&', api: '=?' }, templateUrl: 'mw-form-viewer.html', controllerAs: 'ctrl', bindToController: true, controller: function($timeout, $interpolate){ var ctrl = this; // Put initialization logic inside `$onInit()` // to make sure bindings have been initialized. ctrl.$onInit = function() { ctrl.defaultOptions = { nestedForm: false, autoStart: false, disableSubmit: false }; ctrl.options = angular.extend({}, ctrl.defaultOptions, ctrl.options); ctrl.submitStatus='NOT_SUBMITTED'; ctrl.formSubmitted=false; sortPagesByNumber(); ctrl.pageIdToPage={}; ctrl.formData.pages.forEach(function(page){ ctrl.pageIdToPage[page.id]=page; }); ctrl.buttons={ prevPage: { visible: false, disabled: false }, nextPage: { visible: false, disabled: false }, submitForm: { visible: false, disabled: false } }; ctrl.resetPages(); if(ctrl.api){ ctrl.api.reset = function(){ for (var prop in ctrl.responseData) { if (ctrl.responseData.hasOwnProperty(prop)) { delete ctrl.responseData[prop]; } } ctrl.buttons.submitForm.visible=false; ctrl.buttons.prevPage.visible=false; ctrl.buttons.nextPage.visible=false; ctrl.currentPage=null; $timeout(ctrl.resetPages, 0); } } }; ctrl.submitForm = function(){ ctrl.formSubmitted=true; ctrl.submitStatus='IN_PROGRESS'; ctrl.setCurrentPage(null); var resultPromise = ctrl.onSubmit(); resultPromise.then(function(){ ctrl.submitStatus='SUCCESS'; }).catch(function(){ ctrl.submitStatus='ERROR'; }); }; ctrl.setCurrentPage = function (page) { ctrl.currentPage = page; if(!page){ ctrl.buttons.submitForm.visible=false; ctrl.buttons.prevPage.visible=false; ctrl.buttons.nextPage.visible=false; return; } ctrl.setDefaultNextPage(); ctrl.initResponsesForCurrentPage(); }; ctrl.setDefaultNextPage = function(){ var index = ctrl.formData.pages.indexOf(ctrl.currentPage); ctrl.currentPage.isFirst = index==0; ctrl.currentPage.isLast = index==ctrl.formData.pages.length-1; ctrl.buttons.submitForm.visible=ctrl.currentPage.isLast; ctrl.buttons.prevPage.visible=!ctrl.currentPage.isFirst; ctrl.buttons.nextPage.visible=!ctrl.currentPage.isLast; if(ctrl.currentPage.isLast){ ctrl.nextPage=null; }else{ ctrl.nextPage=ctrl.formData.pages[index+1]; } if(ctrl.currentPage.pageFlow){ var formSubmit = false; if(ctrl.currentPage.pageFlow.formSubmit){ ctrl.nextPage=null; formSubmit = true; }else if(ctrl.currentPage.pageFlow.page){ ctrl.nextPage=ctrl.pageIdToPage[ctrl.currentPage.pageFlow.page.id]; ctrl.buttons.nextPage.visible=true; }else if(ctrl.currentPage.isLast){ ctrl.nextPage=null; formSubmit = true; } ctrl.buttons.submitForm.visible=formSubmit; ctrl.buttons.nextPage.visible=!formSubmit; } }; ctrl.initResponsesForCurrentPage = function(){ ctrl.currentPage.elements.forEach(function(element){ var question = element.question; if(question && !ctrl.responseData[question.id]){ ctrl.responseData[question.id]={}; } }); }; ctrl.beginResponse=function(){ if(ctrl.formData.pages.length>0){ ctrl.setCurrentPage(ctrl.formData.pages[0]); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); } }; ctrl.resetPages = function(){ ctrl.prevPages=[]; ctrl.currentPage=null; ctrl.nextPage = null; ctrl.formSubmitted=false; if(ctrl.options.autoStart){ ctrl.beginResponse(); } }; ctrl.goToPrevPage= function(){ var prevPage = ctrl.prevPages.pop(); ctrl.setCurrentPage(prevPage); ctrl.updateNextPageBasedOnAllAnswers(); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); }; ctrl.goToNextPage= function(){ ctrl.prevPages.push(ctrl.currentPage); ctrl.updateNextPageBasedOnAllAnswers(); ctrl.setCurrentPage(ctrl.nextPage); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); }; ctrl.updateNextPageBasedOnAllAnswers = function(){ ctrl.currentPage.elements.forEach(function(element){ ctrl.updateNextPageBasedOnPageElementAnswers(element); }); ctrl.buttons.submitForm.visible=!ctrl.nextPage; ctrl.buttons.nextPage.visible=!!ctrl.nextPage; }; ctrl.updateNextPageBasedOnPageElementAnswers = function (element) { var question = element.question; if (question && question.pageFlowModifier) { question.offeredAnswers.forEach(function (answer) { if (answer.pageFlow) { if(ctrl.responseData[question.id].selectedAnswer == answer.id){ if (answer.pageFlow.formSubmit) { ctrl.nextPage = null; } else if (answer.pageFlow.page) { ctrl.nextPage = ctrl.pageIdToPage[answer.pageFlow.page.id]; } } } }); } }; ctrl.onResponseChanged = function(pageElement){ ctrl.setDefaultNextPage(); ctrl.updateNextPageBasedOnAllAnswers(); }; function sortPagesByNumber() { ctrl.formData.pages.sort(function(a,b){ return a.number - b.number; }); } ctrl.print=function(input){ if (input&&ctrl.templateData){ return $interpolate(input)(ctrl.templateData); } return input; }; // Prior to v1.5, we need to call `$onInit()` manually. // (Bindings will always be pre-assigned in these versions.) if (angular.version.major === 1 && angular.version.minor < 5) { ctrl.$onInit(); } }, link: function (scope, ele, attrs){ var ctrl = scope.ctrl; if(ctrl.formStatus){ ctrl.formStatus.form = ctrl.form; } scope.$on('mwForm.pageEvents.changePage', function(event,data){ if(typeof data.page !== "undefined" && data.page < ctrl.formData.pages.length){ ctrl.resetPages(); for(var i =0; i < data.page;i++){ ctrl.prevPages.push(ctrl.formData.pages[i]); } var currenPge=ctrl.formData.pages[data.page]; ctrl.setCurrentPage(currenPge); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:currenPge}); ctrl.updateNextPageBasedOnAllAnswers(); } }); } }; });
src/viewer/form-viewer.directive.js
angular.module('mwFormViewer').directive('mwFormViewer', function ($rootScope) { return { replace: true, restrict: 'AE', scope: { formData: '=', responseData: '=', templateData: '=?', readOnly: '=?', options: '=?', formStatus: '=?', //wrapper for internal angular form object onSubmit: '&', api: '=?' }, templateUrl: 'mw-form-viewer.html', controllerAs: 'ctrl', bindToController: true, controller: function($timeout, $interpolate){ var ctrl = this; // Put initialization logic inside `$onInit()` // to make sure bindings have been initialized. ctrl.$onInit = function() { ctrl.defaultOptions = { nestedForm: false, autoStart: false, disableSubmit: false }; ctrl.options = angular.extend({}, ctrl.defaultOptions, ctrl.options); ctrl.submitStatus='NOT_SUBMITTED'; ctrl.formSubmitted=false; sortPagesByNumber(); ctrl.pageIdToPage={}; ctrl.formData.pages.forEach(function(page){ ctrl.pageIdToPage[page.id]=page; }); ctrl.buttons={ prevPage: { visible: false, disabled: false }, nextPage: { visible: false, disabled: false }, submitForm: { visible: false, disabled: false } }; ctrl.resetPages(); if(ctrl.api){ ctrl.api.reset = function(){ for (var prop in ctrl.responseData) { if (ctrl.responseData.hasOwnProperty(prop)) { delete ctrl.responseData[prop]; } } ctrl.buttons.submitForm.visible=false; ctrl.buttons.prevPage.visible=false; ctrl.buttons.nextPage.visible=false; ctrl.currentPage=null; $timeout(ctrl.resetPages, 0); } } }; ctrl.submitForm = function(){ ctrl.formSubmitted=true; ctrl.submitStatus='IN_PROGRESS'; ctrl.setCurrentPage(null); var resultPromise = ctrl.onSubmit(); resultPromise.then(function(){ ctrl.submitStatus='SUCCESS'; }).catch(function(){ ctrl.submitStatus='ERROR'; }); }; ctrl.setCurrentPage = function (page) { ctrl.currentPage = page; if(!page){ ctrl.buttons.submitForm.visible=false; ctrl.buttons.prevPage.visible=false; ctrl.buttons.nextPage.visible=false; return; } ctrl.setDefaultNextPage(); ctrl.initResponsesForCurrentPage(); }; ctrl.setDefaultNextPage = function(){ var index = ctrl.formData.pages.indexOf(ctrl.currentPage); ctrl.currentPage.isFirst = index==0; ctrl.currentPage.isLast = index==ctrl.formData.pages.length-1; ctrl.buttons.submitForm.visible=ctrl.currentPage.isLast; ctrl.buttons.prevPage.visible=!ctrl.currentPage.isFirst; ctrl.buttons.nextPage.visible=!ctrl.currentPage.isLast; if(ctrl.currentPage.isLast){ ctrl.nextPage=null; }else{ ctrl.nextPage=ctrl.formData.pages[index+1]; } if(ctrl.currentPage.pageFlow){ var formSubmit = false; if(ctrl.currentPage.pageFlow.formSubmit){ ctrl.nextPage=null; formSubmit = true; }else if(ctrl.currentPage.pageFlow.page){ ctrl.nextPage=ctrl.pageIdToPage[ctrl.currentPage.pageFlow.page.id]; ctrl.buttons.nextPage.visible=true; }else if(ctrl.currentPage.isLast){ ctrl.nextPage=null; formSubmit = true; } ctrl.buttons.submitForm.visible=formSubmit; ctrl.buttons.nextPage.visible=!formSubmit; } }; ctrl.initResponsesForCurrentPage = function(){ ctrl.currentPage.elements.forEach(function(element){ var question = element.question; if(question && !ctrl.responseData[question.id]){ ctrl.responseData[question.id]={}; } }); }; ctrl.beginResponse=function(){ if(ctrl.formData.pages.length>0){ ctrl.setCurrentPage(ctrl.formData.pages[0]); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); } }; ctrl.resetPages = function(){ ctrl.prevPages=[]; ctrl.currentPage=null; ctrl.nextPage = null; ctrl.formSubmitted=false; if(ctrl.options.autoStart){ ctrl.beginResponse(); } }; ctrl.goToPrevPage= function(){ var prevPage = ctrl.prevPages.pop(); ctrl.setCurrentPage(prevPage); ctrl.updateNextPageBasedOnAllAnswers(); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); }; ctrl.goToNextPage= function(){ ctrl.prevPages.push(ctrl.currentPage); ctrl.updateNextPageBasedOnAllAnswers(); ctrl.setCurrentPage(ctrl.nextPage); $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); }; ctrl.updateNextPageBasedOnAllAnswers = function(){ ctrl.currentPage.elements.forEach(function(element){ ctrl.updateNextPageBasedOnPageElementAnswers(element); }); ctrl.buttons.submitForm.visible=!ctrl.nextPage; ctrl.buttons.nextPage.visible=!!ctrl.nextPage; }; ctrl.updateNextPageBasedOnPageElementAnswers = function (element) { var question = element.question; if (question && question.pageFlowModifier) { question.offeredAnswers.forEach(function (answer) { if (answer.pageFlow) { if(ctrl.responseData[question.id].selectedAnswer == answer.id){ if (answer.pageFlow.formSubmit) { ctrl.nextPage = null; } else if (answer.pageFlow.page) { ctrl.nextPage = ctrl.pageIdToPage[answer.pageFlow.page.id]; } } } }); } }; ctrl.onResponseChanged = function(pageElement){ ctrl.setDefaultNextPage(); ctrl.updateNextPageBasedOnAllAnswers(); }; function sortPagesByNumber() { ctrl.formData.pages.sort(function(a,b){ return a.number - b.number; }); } ctrl.print=function(input){ if (input&&ctrl.templateData){ return $interpolate(input)(ctrl.templateData); } return input; }; // Prior to v1.5, we need to call `$onInit()` manually. // (Bindings will always be pre-assigned in these versions.) if (angular.version.major === 1 && angular.version.minor < 5) { ctrl.$onInit(); } }, link: function (scope, ele, attrs){ var ctrl = scope.ctrl; if(ctrl.formStatus){ ctrl.formStatus.form = ctrl.form; } } }; });
Adding event to change page manually.
src/viewer/form-viewer.directive.js
Adding event to change page manually.
<ide><path>rc/viewer/form-viewer.directive.js <ide> $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:ctrl.currentPage}); <ide> } <ide> }; <del> <add> <ide> ctrl.resetPages = function(){ <ide> ctrl.prevPages=[]; <ide> <ide> if(ctrl.formStatus){ <ide> ctrl.formStatus.form = ctrl.form; <ide> } <add> <add> scope.$on('mwForm.pageEvents.changePage', function(event,data){ <add> if(typeof data.page !== "undefined" && data.page < ctrl.formData.pages.length){ <add> ctrl.resetPages(); <add> for(var i =0; i < data.page;i++){ <add> ctrl.prevPages.push(ctrl.formData.pages[i]); <add> } <add> var currenPge=ctrl.formData.pages[data.page]; <add> ctrl.setCurrentPage(currenPge); <add> $rootScope.$broadcast("mwForm.pageEvents.pageCurrentChanged",{currentPage:currenPge}); <add> ctrl.updateNextPageBasedOnAllAnswers(); <add> } <add> }); <ide> <ide> <ide> }
Java
bsd-2-clause
f37295c63f3c6d3c89800c18f1c570709ed121e3
0
10000TB/galileo,10000TB/galileo
/* Copyright (c) 2013, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.graph; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.UUID; import galileo.dataset.BlockMetadata; import galileo.dataset.FileBlock; import galileo.fs.FileSystem; import galileo.serialization.SerializationException; import galileo.serialization.Serializer; import galileo.util.GeoHash; /** * Handles functionality concerning the physical (on-disk) graph. The graph is * built on disk via a hierarchical tree of directories. */ public class PhysicalGraph { private File storageDirectory; public PhysicalGraph(File storageDirectory) { this.storageDirectory = storageDirectory; } /** * Scans the entire filesystem and returns paths to all blocks in the * system. * * @return ArrayList of block paths. */ public ArrayList<String> getBlockPaths() { return scanDirectory(storageDirectory); } /** * Scans a directory (and its subdirectories) for blocks. * * @param directory * Directory to scan for blocks. * * @return ArrayList of String paths to blocks on disk. */ public ArrayList<String> scanDirectory(File directory) { ArrayList<String> blockPaths = new ArrayList<String>(); scanSubDirectory(directory, blockPaths); return blockPaths; } /** * Scans a directory (and its subdirectories) for blocks. * * @param directory * Directory file descriptor to scan * * @param fileList * ArrayList of Strings to populate with FileBlock paths. */ private void scanSubDirectory(File directory, ArrayList<String> fileList) { for (File file : directory.listFiles()) { if (file.isDirectory()) { scanSubDirectory(file, fileList); continue; } String fileName = file.getAbsolutePath(); if (fileName.endsWith(FileSystem.METADATA_EXTENSION)) { fileList.add(fileName); } } } /** * Stores a <code>FileBlock</code> on disk. * * @param block * Block to store on disk * * @return * String path to the block's location on disk. */ public String storeBlock(FileBlock block, byte[] blockBytes) throws IOException { UUID blockUuid = UUID.nameUUIDFromBytes(blockBytes); String blockDirPath = storageDirectory + "/" + getStorageDirectory(block); String blockPath = blockDirPath + "/" + blockUuid.toString(); /* Ensure the storage directory is there. */ File blockDirectory = new File(blockDirPath); if (!blockDirectory.exists()) { if (!blockDirectory.mkdirs()) { throw new IOException("Failed to create directory (" + blockDirPath + ") for block."); } } /* Write the block content first */ FileOutputStream blockOutStream = new FileOutputStream(blockPath + FileSystem.BLOCK_EXTENSION); byte[] blockData = block.getData(); blockOutStream.write(blockData); blockOutStream.close(); /* Write the metadata separately. */ FileOutputStream metaOutStream = new FileOutputStream(blockPath + FileSystem.METADATA_EXTENSION); byte[] metadata = Serializer.serialize(block.getMetadata()); metaOutStream.write(metadata); metaOutStream.close(); return blockPath; } /** * Construct a <code>FileBlock</code> from a (block, metadata) file pair * located on disk. * * @param blockPath * Path to the block and metadata files; simply use the block UUID * without the block or metadata extension; they are added automatically * to the path. */ public FileBlock loadBlock(String blockPath) throws FileNotFoundException, IOException, SerializationException { File metaFile = new File(blockPath + FileSystem.METADATA_EXTENSION); File dataFile = new File(blockPath + FileSystem.BLOCK_EXTENSION); byte[] metaBytes = new byte[(int) metaFile.length()]; byte[] dataBytes = new byte[(int) dataFile.length()]; FileInputStream metaInStream = new FileInputStream(metaFile); metaInStream.read(metaBytes); metaInStream.close(); FileInputStream dataInStream = new FileInputStream(dataFile); dataInStream.read(dataBytes); dataInStream.close(); return new FileBlock(dataBytes, metaBytes); } public BlockMetadata loadMetadata(String metaPath) throws FileNotFoundException, IOException, SerializationException { File metaFile = new File(metaPath); byte[] metaBytes = new byte[(int) metaFile.length()]; FileInputStream metaInStream = new FileInputStream(metaFile); metaInStream.read(metaBytes); metaInStream.close(); return Serializer.deserialize(BlockMetadata.class, metaBytes); } /** * Determine where a <code>FileBlock</code> should be stored on the host * filesystem. * * Current format: * year/month/day/geohash/feature * * @param block * Block to find the storage directory for. * * @return * Storage directory for block */ private String getStorageDirectory(FileBlock block) { String directory = ""; BlockMetadata metadata = block.getMetadata(); Date blockDate = metadata.getTemporalProperties().getLowerBound(); /* Date */ SimpleDateFormat formatter = new SimpleDateFormat(); formatter.applyPattern("yyyy/M/d/"); directory = formatter.format(blockDate); /* GeoHash */ //directory += GeoHash.encode(metadata.getSpatialRange(), 2); directory += GeoHash.encode( metadata.getSpatialProperties().getSpatialRange(), 2); return directory; } }
src/galileo/graph/PhysicalGraph.java
/* Copyright (c) 2013, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.graph; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.UUID; import galileo.dataset.BlockMetadata; import galileo.dataset.FileBlock; import galileo.serialization.SerializationException; import galileo.serialization.Serializer; import galileo.util.GeoHash; /** * Handles functionality concerning the physical (on-disk) graph. The graph is * built on disk via a hierarchical tree of directories. */ public class PhysicalGraph { public static final String METADATA_EXTENSION = ".gmeta"; public static final String BLOCK_EXTENSION = ".gblock"; private File storageDirectory; private boolean readOnly = false; public PhysicalGraph(File storageDirectory) { this.storageDirectory = storageDirectory; } public PhysicalGraph(File storageDirectory, boolean readOnly) { this.storageDirectory = storageDirectory; this.readOnly = readOnly; } /** * Scans the entire filesystem and returns paths to all blocks in the * system. * * @return ArrayList of block paths. */ public ArrayList<String> getBlockPaths() { return scanDirectory(storageDirectory); } /** * Scans a directory (and its subdirectories) for blocks. * * @param directory * Directory to scan for blocks. * * @return ArrayList of String paths to blocks on disk. */ public ArrayList<String> scanDirectory(File directory) { ArrayList<String> blockPaths = new ArrayList<String>(); scanSubDirectory(directory, blockPaths); return blockPaths; } /** * Scans a directory (and its subdirectories) for blocks. * * @param directory * Directory file descriptor to scan * * @param fileList * ArrayList of Strings to populate with FileBlock paths. */ private void scanSubDirectory(File directory, ArrayList<String> fileList) { for (File file : directory.listFiles()) { if (file.isDirectory()) { scanSubDirectory(file, fileList); continue; } String fileName = file.getAbsolutePath(); if (fileName.endsWith(METADATA_EXTENSION)) { fileList.add(fileName); } } } /** * Stores a <code>FileBlock</code> on disk. * * @param block * Block to store on disk * * @return * String path to the block's location on disk. * * @throws StorageNodeException * If there are problems storing the bytes */ public String storeBlock(FileBlock block, byte[] blockBytes) throws IOException { UUID blockUuid = UUID.nameUUIDFromBytes(blockBytes); String blockDirPath = storageDirectory + "/" + getStorageDirectory(block); String blockPath = blockDirPath + "/" + blockUuid.toString(); /* Ensure the storage directory is there. */ File blockDirectory = new File(blockDirPath); if (!blockDirectory.exists()) { if (!blockDirectory.mkdirs()) { throw new IOException("Failed to create directory (" + blockDirPath + ") for block."); } } /* Write the block content first */ FileOutputStream blockOutStream = new FileOutputStream(blockPath + BLOCK_EXTENSION); byte[] blockData = block.getData(); blockOutStream.write(blockData); blockOutStream.close(); /* Write the metadata separately. */ FileOutputStream metaOutStream = new FileOutputStream(blockPath + METADATA_EXTENSION); byte[] metadata = Serializer.serialize(block.getMetadata()); metaOutStream.write(metadata); metaOutStream.close(); return blockPath; } /** * Construct a <code>FileBlock</code> from a (block, metadata) file pair * located on disk. * * @param blockPath * Path to the block and metadata files; simply use the block UUID * without the block or metadata extension; they are added automatically * to the path. */ public FileBlock loadBlock(String blockPath) throws FileNotFoundException, IOException, SerializationException { File metaFile = new File(blockPath + METADATA_EXTENSION); File dataFile = new File(blockPath + BLOCK_EXTENSION); byte[] metaBytes = new byte[(int) metaFile.length()]; byte[] dataBytes = new byte[(int) dataFile.length()]; FileInputStream metaInStream = new FileInputStream(metaFile); metaInStream.read(metaBytes); metaInStream.close(); FileInputStream dataInStream = new FileInputStream(dataFile); dataInStream.read(dataBytes); dataInStream.close(); return new FileBlock(dataBytes, metaBytes); } public BlockMetadata loadMetadata(String metaPath) throws FileNotFoundException, IOException, SerializationException { File metaFile = new File(metaPath); byte[] metaBytes = new byte[(int) metaFile.length()]; FileInputStream metaInStream = new FileInputStream(metaFile); metaInStream.read(metaBytes); metaInStream.close(); return Serializer.deserialize(BlockMetadata.class, metaBytes); } /** * Determine where a <code>FileBlock</code> should be stored on the host * filesystem. * * Current format: * year/month/day/geohash/feature * * @param block * Block to find the storage directory for. * * @return * Storage directory for block */ private String getStorageDirectory(FileBlock block) { String directory = ""; BlockMetadata metadata = block.getMetadata(); Date blockDate = metadata.getTemporalProperties().getLowerBound(); /* Date */ SimpleDateFormat formatter = new SimpleDateFormat(); formatter.applyPattern("yyyy/M/d/"); directory = formatter.format(blockDate); /* GeoHash */ //directory += GeoHash.encode(metadata.getSpatialRange(), 2); directory += GeoHash.encode( metadata.getSpatialProperties().getSpatialRange(), 2); return directory; } /** * Determines whether the Galileo filesystem is read-only. * * @return <code>true</code> if the filesystem is read-only. */ public boolean isReadOnly() { return readOnly; } }
PhysicalGraph refactor
src/galileo/graph/PhysicalGraph.java
PhysicalGraph refactor
<ide><path>rc/galileo/graph/PhysicalGraph.java <ide> <ide> import galileo.dataset.BlockMetadata; <ide> import galileo.dataset.FileBlock; <del> <add>import galileo.fs.FileSystem; <ide> import galileo.serialization.SerializationException; <ide> import galileo.serialization.Serializer; <ide> import galileo.util.GeoHash; <ide> * built on disk via a hierarchical tree of directories. <ide> */ <ide> public class PhysicalGraph { <del> public static final String METADATA_EXTENSION = ".gmeta"; <del> public static final String BLOCK_EXTENSION = ".gblock"; <del> <ide> private File storageDirectory; <del> private boolean readOnly = false; <ide> <ide> public PhysicalGraph(File storageDirectory) { <ide> this.storageDirectory = storageDirectory; <del> } <del> <del> public PhysicalGraph(File storageDirectory, boolean readOnly) { <del> this.storageDirectory = storageDirectory; <del> this.readOnly = readOnly; <ide> } <ide> <ide> /** <ide> } <ide> <ide> String fileName = file.getAbsolutePath(); <del> if (fileName.endsWith(METADATA_EXTENSION)) { <add> if (fileName.endsWith(FileSystem.METADATA_EXTENSION)) { <ide> fileList.add(fileName); <ide> } <ide> } <ide> * <ide> * @return <ide> * String path to the block's location on disk. <del> * <del> * @throws StorageNodeException <del> * If there are problems storing the bytes <ide> */ <ide> public String storeBlock(FileBlock block, byte[] blockBytes) <ide> throws IOException { <ide> <ide> /* Write the block content first */ <ide> FileOutputStream blockOutStream <del> = new FileOutputStream(blockPath + BLOCK_EXTENSION); <add> = new FileOutputStream(blockPath + FileSystem.BLOCK_EXTENSION); <ide> byte[] blockData = block.getData(); <ide> blockOutStream.write(blockData); <ide> blockOutStream.close(); <ide> <ide> /* Write the metadata separately. */ <ide> FileOutputStream metaOutStream <del> = new FileOutputStream(blockPath + METADATA_EXTENSION); <add> = new FileOutputStream(blockPath + FileSystem.METADATA_EXTENSION); <ide> byte[] metadata = Serializer.serialize(block.getMetadata()); <ide> metaOutStream.write(metadata); <ide> metaOutStream.close(); <ide> */ <ide> public FileBlock loadBlock(String blockPath) <ide> throws FileNotFoundException, IOException, SerializationException { <del> File metaFile = new File(blockPath + METADATA_EXTENSION); <del> File dataFile = new File(blockPath + BLOCK_EXTENSION); <add> File metaFile = new File(blockPath + FileSystem.METADATA_EXTENSION); <add> File dataFile = new File(blockPath + FileSystem.BLOCK_EXTENSION); <ide> <ide> byte[] metaBytes = new byte[(int) metaFile.length()]; <ide> byte[] dataBytes = new byte[(int) dataFile.length()]; <ide> <ide> return directory; <ide> } <del> <del> /** <del> * Determines whether the Galileo filesystem is read-only. <del> * <del> * @return <code>true</code> if the filesystem is read-only. <del> */ <del> public boolean isReadOnly() { <del> return readOnly; <del> } <ide> }
Java
apache-2.0
1096eac726df37ded0847ab587a39cf809c1f584
0
EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,palantir/atlasdb,palantir/atlasdb
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.impl; import static java.util.Collections.emptyMap; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.lang.ArrayUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.primitives.UnsignedBytes; import com.palantir.atlasdb.AtlasDbConstants; import com.palantir.atlasdb.encoding.PtBytes; import com.palantir.atlasdb.keyvalue.api.BatchColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.CheckAndSetException; import com.palantir.atlasdb.keyvalue.api.CheckAndSetRequest; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.ColumnSelection; import com.palantir.atlasdb.keyvalue.api.KeyAlreadyExistsException; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.atlasdb.keyvalue.api.RangeRequest; import com.palantir.atlasdb.keyvalue.api.RangeRequests; import com.palantir.atlasdb.keyvalue.api.RowColumnRangeIterator; import com.palantir.atlasdb.keyvalue.api.RowResult; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.api.Value; import com.palantir.common.base.ClosableIterator; public abstract class AbstractKeyValueServiceTest { protected static final TableReference TEST_TABLE = TableReference.createFromFullyQualifiedName("ns.pt_kvs_test"); protected static final TableReference TEST_NONEXISTING_TABLE = TableReference.createFromFullyQualifiedName("ns2.some_nonexisting_table"); protected static final byte[] row0 = "row0".getBytes(); protected static final byte[] row1 = "row1".getBytes(); protected static final byte[] row2 = "row2".getBytes(); protected static final byte[] column0 = "column0".getBytes(); protected static final byte[] column1 = "column1".getBytes(); protected static final byte[] column2 = "column2".getBytes(); protected static final byte[] value00 = "value00".getBytes(); protected static final byte[] value01 = "value01".getBytes(); protected static final byte[] value10 = "value10".getBytes(); protected static final byte[] value12 = "value12".getBytes(); protected static final byte[] value21 = "value21".getBytes(); protected static final byte[] value22 = "value22".getBytes(); protected static final byte[] value0_t0 = "value0_t0".getBytes(); protected static final byte[] value0_t1 = "value1_t1".getBytes(); protected static final byte[] value0_t5 = "value5_t5".getBytes(); protected static final long TEST_TIMESTAMP = 1000000l; private static final long MAX_TIMESTAMP = Long.MAX_VALUE; protected static KeyValueService keyValueService = null; protected boolean reverseRangesSupported() { return true; } @Before public void setUp() throws Exception { if (keyValueService == null) { keyValueService = getKeyValueService(); } keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); } @After public void tearDown() throws Exception { keyValueService.truncateTables(ImmutableSet.of(TEST_TABLE)); } @AfterClass public static void tearDownKvs() { if (keyValueService != null) { keyValueService.close(); } } @Test public void testGetRowColumnSelection() { Cell cell1 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col1")); Cell cell2 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col2")); Cell cell3 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col3")); byte[] val = PtBytes.toBytes("val"); keyValueService.put(TEST_TABLE, ImmutableMap.of(cell1, val, cell2, val, cell3, val), 0); Map<Cell, Value> rows1 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.all(), 1); Assert.assertEquals(ImmutableSet.of(cell1, cell2, cell3), rows1.keySet()); Map<Cell, Value> rows2 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.of(cell1.getColumnName())), 1); assertEquals(ImmutableSet.of(cell1), rows2.keySet()); Map<Cell, Value> rows3 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.of(cell1.getColumnName(), cell3.getColumnName())), 1); assertEquals(ImmutableSet.of(cell1, cell3), rows3.keySet()); Map<Cell, Value> rows4 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.<byte[]>of()), 1); // This has changed recently - now empty column set means // that all columns are selected. assertEquals(ImmutableSet.of(cell1, cell2, cell3), rows4.keySet()); } @Test public void testGetRowsAllColumns() { putTestDataForSingleTimestamp(); Map<Cell, Value> values = keyValueService.getRows(TEST_TABLE, Arrays.asList(row1, row2), ColumnSelection.all(), TEST_TIMESTAMP + 1); assertEquals(4, values.size()); assertEquals(null, values.get(Cell.create(row1, column1))); assertArrayEquals(value10, values.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value12, values.get(Cell.create(row1, column2)).getContents()); assertArrayEquals(value21, values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, values.get(Cell.create(row2, column2)).getContents()); } private Map<Cell, Value> getValuesForRow(Map<byte[], RowColumnRangeIterator> values, byte[] row, int number) { Map<Cell, Value> results = Maps.newHashMap(); Iterator<Entry<Cell, Value>> it = Collections.emptyIterator(); if (values.containsKey(row)) { it = Iterators.limit(values.get(row), number); } while (it.hasNext()) { Entry<Cell, Value> result = it.next(); results.put(result.getKey(), result.getValue()); } return results; } @Test public void testGetRowColumnRange() { putTestDataForSingleTimestamp(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row1, 1); assertEquals(1, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column0)).getContents(), value10); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column0), PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row1, 1); assertEquals(1, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column2)).getContents(), value12); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column0), column2, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); assertEquals(0, getValuesForRow(values, row1, 1).size()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column2), PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); assertEquals(0, getValuesForRow(values, row1, 1).size()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, Integer.MAX_VALUE), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row1, 2); assertEquals(2, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column0)).getContents(), value10); assertArrayEquals(batchValues.get(Cell.create(row1, column2)).getContents(), value12); } @Test public void testGetRowColumnRangeHistorical() { putTestDataForMultipleTimestamps(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 2); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row0, 1); assertEquals(1, batchValues.size()); assertArrayEquals(value0_t1, batchValues.get(Cell.create(row0, column0)).getContents()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row0, 1); assertEquals(1, batchValues.size()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row0, column0)).getContents()); } @Test public void testGetRowColumnRangeMultipleHistorical() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t1), TEST_TIMESTAMP + 1); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t1), TEST_TIMESTAMP + 1); // The initial multiget will get results for column0 only, then the next page for column1 will not include // the TEST_TIMESTAMP result so we have to get another page for column1. Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, RangeRequests.nextLexicographicName(column1), 2), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row1, 2); assertEquals(2, batchValues.size()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row1, column1)).getContents()); } @Test public void testGetRowColumnRangeMultipleRows() { putTestDataForSingleTimestamp(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1, row0, row2), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(ImmutableSet.of(row0, row1, row2), values.keySet()); Map<Cell, Value> row0Values = getValuesForRow(values, row0, 2); assertArrayEquals(value00, row0Values.get(Cell.create(row0, column0)).getContents()); assertArrayEquals(value01, row0Values.get(Cell.create(row0, column1)).getContents()); Map<Cell, Value> row1Values = getValuesForRow(values, row1, 2); assertArrayEquals(value10, row1Values.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value12, row1Values.get(Cell.create(row1, column2)).getContents()); Map<Cell, Value> row2Values = getValuesForRow(values, row2, 2); assertArrayEquals(value21, row2Values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, row2Values.get(Cell.create(row2, column2)).getContents()); } @Test public void testGetRowColumnRangeCellBatchSingleRow() { putTestDataForSingleTimestamp(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column0), PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column0), column2), 1, TEST_TIMESTAMP + 1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column2), PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), Integer.MAX_VALUE, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchMultipleRows() { putTestDataForSingleTimestamp(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1, row0, row2), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 3, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertNextElementMatches(values, Cell.create(row0, column0), value00); assertNextElementMatches(values, Cell.create(row0, column1), value01); assertNextElementMatches(values, Cell.create(row2, column1), value21); assertNextElementMatches(values, Cell.create(row2, column2), value22); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchHistorical() { putTestDataForMultipleTimestamps(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 2); assertNextElementMatches(values, Cell.create(row0, column0), value0_t1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row0, column0), value0_t0); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchMultipleHistorical() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t1), TEST_TIMESTAMP + 1); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t1), TEST_TIMESTAMP + 1); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, RangeRequests.nextLexicographicName(column1)), 2, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value0_t0); assertNextElementMatches(values, Cell.create(row1, column1), value0_t0); assertFalse(values.hasNext()); } private static void assertNextElementMatches(RowColumnRangeIterator iterator, Cell expectedCell, byte[] expectedContents) { assertTrue(iterator.hasNext()); Map.Entry<Cell, Value> entry = iterator.next(); assertEquals(expectedCell, entry.getKey()); assertArrayEquals(expectedContents, entry.getValue().getContents()); } @Test public void testGetRowsWhenMultipleVersions() { putTestDataForMultipleTimestamps(); Map<Cell, Value> result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.all(), TEST_TIMESTAMP + 1); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t0, TEST_TIMESTAMP))); result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.all(), TEST_TIMESTAMP + 2); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t1, TEST_TIMESTAMP + 1))); } @Test public void testGetRowsWhenMultipleVersionsAndColumnsSelected() { putTestDataForMultipleTimestamps(); Map<Cell, Value> result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.create(ImmutableSet.of(column0)), TEST_TIMESTAMP + 1); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t0, TEST_TIMESTAMP))); result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.create(ImmutableSet.of(column0)), TEST_TIMESTAMP + 2); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t1, TEST_TIMESTAMP + 1))); } @Test public void testGetWhenMultipleVersions() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); Value val0 = Value.create(value0_t0, TEST_TIMESTAMP); Value val1 = Value.create(value0_t1, TEST_TIMESTAMP + 1); assertTrue(keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP)).isEmpty()); Map<Cell, Value> result = keyValueService.get( TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 1)); assertTrue(result.containsKey(cell)); assertEquals(1, result.size()); assertTrue(result.containsValue(val0)); result = keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 2)); assertEquals(1, result.size()); assertTrue(result.containsKey(cell)); assertTrue(result.containsValue(val1)); result = keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 3)); assertEquals(1, result.size()); assertTrue(result.containsKey(cell)); assertTrue(result.containsValue(val1)); } @Test public void testGetRowsWithSelectedColumns() { putTestDataForSingleTimestamp(); ColumnSelection columns1and2 = ColumnSelection.create(Arrays.asList(column1, column2)); Map<Cell, Value> values = keyValueService.getRows(TEST_TABLE, Arrays.asList(row1, row2), columns1and2, TEST_TIMESTAMP + 1); assertEquals(3, values.size()); assertEquals(null, values.get(Cell.create(row1, column0))); assertArrayEquals(value12, values.get(Cell.create(row1, column2)).getContents()); assertArrayEquals(value21, values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, values.get(Cell.create(row2, column2)).getContents()); } @Test public void testGetLatestTimestamps() { putTestDataForMultipleTimestamps(); Map<Cell, Long> timestamps = keyValueService.getLatestTimestamps(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), TEST_TIMESTAMP + 2)); assertTrue("Incorrect number of values returned.", timestamps.size() == 1); assertEquals("Incorrect value returned.", new Long(TEST_TIMESTAMP + 1), timestamps.get(Cell.create(row0, column0))); } @Test public void testGetWithMultipleVersions() { putTestDataForMultipleTimestamps(); Map<Cell, Value> values = keyValueService.get(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), TEST_TIMESTAMP + 2)); assertTrue("Incorrect number of values returned.", values.size() == 1); assertEquals("Incorrect value returned.", Value.create(value0_t1, TEST_TIMESTAMP + 1), values.get(Cell.create(row0, column0))); } @Test public void testGetAllTableNames() { final TableReference anotherTable = TableReference.createWithEmptyNamespace("AnotherTable"); assertEquals(1, keyValueService.getAllTableNames().size()); assertEquals(TEST_TABLE, keyValueService.getAllTableNames().iterator().next()); keyValueService.createTable(anotherTable, AtlasDbConstants.GENERIC_TABLE_METADATA); assertEquals(2, keyValueService.getAllTableNames().size()); assertTrue(keyValueService.getAllTableNames().contains(anotherTable)); assertTrue(keyValueService.getAllTableNames().contains(TEST_TABLE)); keyValueService.dropTable(anotherTable); assertEquals(1, keyValueService.getAllTableNames().size()); assertEquals(TEST_TABLE, keyValueService.getAllTableNames().iterator().next()); } @Test public void testTableMetadata() { assertEquals(AtlasDbConstants.GENERIC_TABLE_METADATA.length, keyValueService.getMetadataForTable(TEST_TABLE).length); keyValueService.putMetadataForTable(TEST_TABLE, ArrayUtils.EMPTY_BYTE_ARRAY); assertEquals(0, keyValueService.getMetadataForTable(TEST_TABLE).length); keyValueService.putMetadataForTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); assertTrue(Arrays.equals(AtlasDbConstants.GENERIC_TABLE_METADATA, keyValueService.getMetadataForTable(TEST_TABLE))); } private static <V, T extends Iterator<RowResult<V>>> void assertRangeSizeAndOrdering(T it, int expectedSize, RangeRequest rangeRequest) { if (!it.hasNext()) { assertEquals(expectedSize, 0); return; } byte[] row = it.next().getRowName(); int size = 1; final boolean reverse = rangeRequest.isReverse(); final byte[] startRow = rangeRequest.getStartInclusive(); final byte[] endRow = rangeRequest.getEndExclusive(); if (startRow.length > 0) { if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(startRow, row) <= 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(startRow, row) >= 0; } } while (it.hasNext()) { byte[] nextRow = it.next().getRowName(); if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(row, nextRow) <= 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(row, nextRow) >= 0; } row = nextRow; size++; } if (endRow.length > 0) { if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(row, endRow) < 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(row, endRow) > 0; } } assertEquals(expectedSize, size); } @Test public void testGetRange() { testGetRange(reverseRangesSupported()); } public void testGetRange(boolean reverseSupported) { putTestDataForSingleTimestamp(); // Unbounded final RangeRequest all = RangeRequest.all(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, all, TEST_TIMESTAMP + 1), 3, all); if (reverseSupported) { final RangeRequest allReverse = RangeRequest.reverseBuilder().build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, allReverse, TEST_TIMESTAMP + 1), 3, allReverse); } // Upbounded final RangeRequest upbounded = RangeRequest.builder().endRowExclusive(row2).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, upbounded, TEST_TIMESTAMP + 1), 2, upbounded); if (reverseSupported) { final RangeRequest upboundedReverse = RangeRequest.reverseBuilder().endRowExclusive(row0).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, upboundedReverse, TEST_TIMESTAMP + 1), 2, upboundedReverse); } // Downbounded final RangeRequest downbounded = RangeRequest.builder().startRowInclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, downbounded, TEST_TIMESTAMP + 1), 2, downbounded); if (reverseSupported) { final RangeRequest downboundedReverse = RangeRequest.reverseBuilder().startRowInclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, downboundedReverse, TEST_TIMESTAMP + 1), 2, downboundedReverse); } // Both-bounded final RangeRequest bothbounded = RangeRequest.builder().startRowInclusive(row1).endRowExclusive(row2).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, bothbounded, TEST_TIMESTAMP + 1), 1, bothbounded); if (reverseSupported) { final RangeRequest bothboundedReverse = RangeRequest.reverseBuilder().startRowInclusive(row2).endRowExclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, bothboundedReverse, TEST_TIMESTAMP + 1), 1, bothboundedReverse); } // Precise test for lower-bounded RangeRequest rangeRequest = downbounded; ClosableIterator<RowResult<Value>> rangeResult = keyValueService.getRange( TEST_TABLE, rangeRequest, TEST_TIMESTAMP + 1); assertTrue(keyValueService.getRange(TEST_TABLE, rangeRequest, TEST_TIMESTAMP).hasNext() == false); assertTrue(rangeResult.hasNext()); assertEquals( RowResult.create( row1, ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()).put( column0, Value.create(value10, TEST_TIMESTAMP)).put( column2, Value.create(value12, TEST_TIMESTAMP)).build()), rangeResult.next()); assertTrue(rangeResult.hasNext()); assertEquals( RowResult.create( row2, ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()).put( column1, Value.create(value21, TEST_TIMESTAMP)).put( column2, Value.create(value22, TEST_TIMESTAMP)).build()), rangeResult.next()); rangeResult.close(); } @Test public void testGetAllTimestamps() { putTestDataForMultipleTimestamps(); final Cell cell = Cell.create(row0, column0); final Set<Cell> cellSet = ImmutableSet.of(cell); Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps( TEST_TABLE, cellSet, TEST_TIMESTAMP); assertEquals(0, timestamps.size()); timestamps = keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 1); assertEquals(1, timestamps.size()); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP)); timestamps = keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 2); assertEquals(2, timestamps.size()); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP)); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP + 1)); assertEquals( timestamps, keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 3)); } @Test public void testDelete() { putTestDataForSingleTimestamp(); assertEquals(3, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column0), TEST_TIMESTAMP)); assertEquals(3, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column1), TEST_TIMESTAMP)); assertEquals(2, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row1, column0), TEST_TIMESTAMP)); assertEquals(2, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row1, column2), TEST_TIMESTAMP)); assertEquals(1, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); } @Test public void testDeleteMultipleVersions() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); ClosableIterator<RowResult<Value>> result = keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1); assertTrue(result.hasNext()); keyValueService.delete(TEST_TABLE, ImmutableMultimap.of(cell, TEST_TIMESTAMP)); result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1); assertTrue(!result.hasNext()); result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 2); assertTrue(result.hasNext()); } @Test public void testPutWithTimestamps() { putTestDataForMultipleTimestamps(); final Cell cell = Cell.create(row0, column0); final Value val1 = Value.create(value0_t1, TEST_TIMESTAMP + 1); final Value val5 = Value.create(value0_t5, TEST_TIMESTAMP + 5); keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(cell, val5)); assertEquals( val5, keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 6)).get(cell)); assertEquals( val1, keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 5)).get(cell)); keyValueService.delete(TEST_TABLE, ImmutableMultimap.of(cell, TEST_TIMESTAMP + 5)); } @Test public void testGetRangeWithTimestamps() { testGetRangeWithTimestamps(false); if (reverseRangesSupported()) { testGetRangeWithTimestamps(true); } } private void testGetRangeWithTimestamps(boolean reverse) { putTestDataForMultipleTimestamps(); final RangeRequest range; if (!reverse) { range = RangeRequest.builder().startRowInclusive(row0).endRowExclusive(row1).build(); } else { range = RangeRequest.reverseBuilder().startRowInclusive(row0).build(); } ClosableIterator<RowResult<Set<Long>>> rangeWithHistory = keyValueService.getRangeOfTimestamps( TEST_TABLE, range, TEST_TIMESTAMP + 2); RowResult<Set<Long>> row0 = rangeWithHistory.next(); assertTrue(!rangeWithHistory.hasNext()); rangeWithHistory.close(); assertEquals(1, Iterables.size(row0.getCells())); Entry<Cell, Set<Long>> cell0 = row0.getCells().iterator().next(); assertEquals(2, cell0.getValue().size()); assertTrue(cell0.getValue().contains(TEST_TIMESTAMP)); assertTrue(cell0.getValue().contains(TEST_TIMESTAMP + 1)); } @Test public void testKeyAlreadyExists() { // Test that it does not throw some random exceptions putTestDataForSingleTimestamp(); try { putTestDataForSingleTimestamp(); // Legal } catch (KeyAlreadyExistsException e) { Assert.fail("Must not throw when overwriting with same value!"); } keyValueService.putWithTimestamps( TEST_TABLE, ImmutableMultimap.of( Cell.create(row0, column0), Value.create(value00, TEST_TIMESTAMP + 1))); try { keyValueService.putWithTimestamps( TEST_TABLE, ImmutableMultimap.of( Cell.create(row0, column0), Value.create(value00, TEST_TIMESTAMP + 1))); // Legal } catch (KeyAlreadyExistsException e) { Assert.fail("Must not throw when overwriting with same value!"); } try { keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column0), Value.create(value01, TEST_TIMESTAMP + 1))); // Legal } catch (KeyAlreadyExistsException e) { // Legal } // The first try might not throw as putUnlessExists must only be exclusive with other putUnlessExists. try { keyValueService.putUnlessExists(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value00)); // Legal } catch (KeyAlreadyExistsException e) { // Legal } try { keyValueService.putUnlessExists(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value00)); Assert.fail("putUnlessExists must throw when overwriting the same cell!"); } catch (KeyAlreadyExistsException e) { // Legal } } @Test public void testCheckAndSetFromEmpty() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); verifyCheckAndSet(cell, value00); } @Test public void testCheckAndSetFromOtherValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); CheckAndSetRequest secondRequest = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value00, value01); keyValueService.checkAndSet(secondRequest); verifyCheckAndSet(cell, value01); } private void verifyCheckAndSet(Cell key, byte[] expectedValue) { Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(key), 1L); assertEquals(1, timestamps.size()); assertTrue(timestamps.containsEntry(key, AtlasDbConstants.TRANSACTION_TS)); ClosableIterator<RowResult<Value>> result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), AtlasDbConstants.TRANSACTION_TS + 1); // Check first result is right byte[] actual = result.next().getOnlyColumnValue().getContents(); assertArrayEquals(String.format("Value \"%s\" different from expected \"%s\"", new String(actual, StandardCharsets.UTF_8), new String(expectedValue, StandardCharsets.UTF_8)), expectedValue, actual); // Check no more results assertFalse(result.hasNext()); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromWrongValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); CheckAndSetRequest secondRequest = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value01, value00); keyValueService.checkAndSet(secondRequest); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromValueWhenNoValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value00, value01); keyValueService.checkAndSet(request); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromNoValueWhenValueIsPresent() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); keyValueService.checkAndSet(request); } @Test public void testAddGCSentinelValues() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); Multimap<Cell, Long> timestampsBefore = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(2, timestampsBefore.size()); assertTrue(!timestampsBefore.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); keyValueService.addGarbageCollectionSentinelValues(TEST_TABLE, ImmutableSet.of(cell)); Multimap<Cell, Long> timestampsAfter1 = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(3, timestampsAfter1.size()); assertTrue(timestampsAfter1.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); keyValueService.addGarbageCollectionSentinelValues(TEST_TABLE, ImmutableSet.of(cell)); Multimap<Cell, Long> timestampsAfter2 = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(3, timestampsAfter2.size()); assertTrue(timestampsAfter2.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); } @Test public void testGetRangeThrowsOnError() { try { keyValueService.getRange(TEST_NONEXISTING_TABLE, RangeRequest.all(), MAX_TIMESTAMP).hasNext(); Assert.fail("getRange must throw on failure"); } catch (RuntimeException e) { // Expected } } @Test public void testGetRangeOfTimestampsThrowsOnError() { try { keyValueService.getRangeOfTimestamps(TEST_NONEXISTING_TABLE, RangeRequest.all(), MAX_TIMESTAMP).hasNext(); Assert.fail("getRangeOfTimestamps must throw on failure"); } catch (RuntimeException e) { // Expected } } @Test public void testCannotModifyValuesAfterWrite() { Cell cell = Cell.create(row0, column0); byte[] data = new byte[1]; byte[] originalData = copyOf(data); writeToCell(cell, data); modifyValue(data); assertThat(getForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGetRows() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getRowsForCell(cell)); assertThat(getRowsForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGet() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getForCell(cell)); assertThat(getForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGetRange() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getOnlyItemInTableRange()); assertThat(getOnlyItemInTableRange(), is(originalData)); } private void modifyValue(byte[] retrievedValue) { retrievedValue[0] = (byte) 50; } private byte[] copyOf(byte[] contents) { return Arrays.copyOf(contents, contents.length); } private void writeToCell(Cell cell, byte[] data) { Value val = Value.create(data, TEST_TIMESTAMP + 1); keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(cell, val)); } private byte[] getRowsForCell(Cell cell) { return keyValueService.getRows(TEST_TABLE, ImmutableSet.of(cell.getRowName()), ColumnSelection.all(), TEST_TIMESTAMP + 3) .get(cell).getContents(); } private byte[] getForCell(Cell cell) { return keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 3)).get(cell).getContents(); } private byte[] getOnlyItemInTableRange() { try (ClosableIterator<RowResult<Value>> rangeIterator = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 3) ){ byte[] contents = rangeIterator.next().getOnlyColumnValue().getContents(); assertFalse("There should only be one row in the table", rangeIterator.hasNext()); return contents; } } @Test public void shouldAllowNotHavingAnyDynamicColumns() { keyValueService.createTable(DynamicColumnTable.reference(), DynamicColumnTable.metadata()); byte[] row = PtBytes.toBytes(123L); Cell cell = Cell.create(row, dynamicColumn(1)); Map<Cell, Long> valueToGet = ImmutableMap.of(cell, MAX_TIMESTAMP); assertThat(keyValueService.get(DynamicColumnTable.reference(), valueToGet), is(emptyMap())); } @Test public void shouldAllowRemovingAllCellsInDynamicColumns() { keyValueService.createTable(DynamicColumnTable.reference(), DynamicColumnTable.metadata()); byte[] row = PtBytes.toBytes(123L); byte[] value = PtBytes.toBytes(123L); long timestamp = 456L; Cell cell1 = Cell.create(row, dynamicColumn(1)); Cell cell2 = Cell.create(row, dynamicColumn(2)); Map<Cell, Long> valuesToDelete = ImmutableMap.of(cell1, timestamp, cell2, timestamp); Map<Cell, byte[]> valuesToPut = ImmutableMap.of(cell1, value, cell2, value); keyValueService.put(DynamicColumnTable.reference(), valuesToPut, timestamp); keyValueService.delete(DynamicColumnTable.reference(), Multimaps.forMap(valuesToDelete)); Map<Cell, Value> values = keyValueService.getRows( DynamicColumnTable.reference(), ImmutableList.of(row), ColumnSelection.all(), MAX_TIMESTAMP); assertThat(values, is(emptyMap())); } @Test public void shouldAllowSameTablenameDifferentNamespace() { TableReference fooBar = TableReference.createUnsafe("foo.bar"); TableReference bazBar = TableReference.createUnsafe("baz.bar"); // try create table in same call keyValueService.createTables( ImmutableMap.of( fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA, bazBar, AtlasDbConstants.GENERIC_TABLE_METADATA)); // try create table spanned over different calls keyValueService.createTable(fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.createTable(bazBar, AtlasDbConstants.GENERIC_TABLE_METADATA); // test tables actually created assertThat(keyValueService.getAllTableNames(), hasItems(fooBar, bazBar)); // clean up keyValueService.dropTables(ImmutableSet.of(fooBar, bazBar)); } @Test public void truncateShouldBeIdempotent() { TableReference fooBar = TableReference.createUnsafe("foo.bar"); keyValueService.createTable(fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.truncateTable(fooBar); keyValueService.truncateTable(fooBar); keyValueService.dropTable(fooBar); } @Test public void truncateOfNonExistantTableShouldThrow() { try { keyValueService.truncateTable(TEST_NONEXISTING_TABLE); Assert.fail("truncate must throw on failure"); } catch (RuntimeException e) { // expected } } @Test public void dropTableShouldBeIdempotent() { keyValueService.dropTable(TEST_NONEXISTING_TABLE); keyValueService.dropTable(TEST_NONEXISTING_TABLE); } @Test public void createTableShouldBeIdempotent() { keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); } @Test public void compactingShouldNotFail() { keyValueService.compactInternally(TEST_TABLE); } private byte[] dynamicColumn(long columnId) { return PtBytes.toBytes(columnId); } protected void putTestDataForMultipleTimestamps() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value0_t1), TEST_TIMESTAMP + 1); } protected void putTestDataForSingleTimestamp() { /* | column0 column1 column2 * -----+--------------------------------- * row0 | "value00" "value01" - * row1 | "value10" - "value12" * row2 | - "value21" "value22" */ Map<Cell, byte[]> values = new HashMap<Cell, byte[]>(); values.put(Cell.create(row0, column0), value00); values.put(Cell.create(row0, column1), value01); values.put(Cell.create(row1, column0), value10); values.put(Cell.create(row1, column2), value12); values.put(Cell.create(row2, column1), value21); values.put(Cell.create(row2, column2), value22); keyValueService.put(TEST_TABLE, values, TEST_TIMESTAMP); } protected abstract KeyValueService getKeyValueService(); }
atlasdb-tests-shared/src/main/java/com/palantir/atlasdb/keyvalue/impl/AbstractKeyValueServiceTest.java
/** * Copyright 2015 Palantir Technologies * * Licensed under the BSD-3 License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.impl; import static java.util.Collections.emptyMap; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.lang.ArrayUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.primitives.UnsignedBytes; import com.palantir.atlasdb.AtlasDbConstants; import com.palantir.atlasdb.encoding.PtBytes; import com.palantir.atlasdb.keyvalue.api.BatchColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.CheckAndSetException; import com.palantir.atlasdb.keyvalue.api.CheckAndSetRequest; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.ColumnSelection; import com.palantir.atlasdb.keyvalue.api.KeyAlreadyExistsException; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.atlasdb.keyvalue.api.RangeRequest; import com.palantir.atlasdb.keyvalue.api.RangeRequests; import com.palantir.atlasdb.keyvalue.api.RowColumnRangeIterator; import com.palantir.atlasdb.keyvalue.api.RowResult; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.api.Value; import com.palantir.common.base.ClosableIterator; public abstract class AbstractKeyValueServiceTest { protected static final TableReference TEST_TABLE = TableReference.createFromFullyQualifiedName("ns.pt_kvs_test"); protected static final TableReference TEST_NONEXISTING_TABLE = TableReference.createFromFullyQualifiedName("ns2.some_nonexisting_table"); protected static final byte[] row0 = "row0".getBytes(); protected static final byte[] row1 = "row1".getBytes(); protected static final byte[] row2 = "row2".getBytes(); protected static final byte[] column0 = "column0".getBytes(); protected static final byte[] column1 = "column1".getBytes(); protected static final byte[] column2 = "column2".getBytes(); protected static final byte[] value00 = "value00".getBytes(); protected static final byte[] value01 = "value01".getBytes(); protected static final byte[] value10 = "value10".getBytes(); protected static final byte[] value12 = "value12".getBytes(); protected static final byte[] value21 = "value21".getBytes(); protected static final byte[] value22 = "value22".getBytes(); protected static final byte[] value0_t0 = "value0_t0".getBytes(); protected static final byte[] value0_t1 = "value1_t1".getBytes(); protected static final byte[] value0_t5 = "value5_t5".getBytes(); protected static final long TEST_TIMESTAMP = 1000000l; private static final long MAX_TIMESTAMP = Long.MAX_VALUE; protected static KeyValueService keyValueService = null; protected boolean reverseRangesSupported() { return true; } @Before public void setUp() throws Exception { if (keyValueService == null) { keyValueService = getKeyValueService(); } keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); } @After public void tearDown() throws Exception { keyValueService.truncateTables(ImmutableSet.of(TEST_TABLE)); } @AfterClass public static void tearDownKvs() { if (keyValueService != null) { keyValueService.close(); } } @Test public void testGetRowColumnSelection() { Cell cell1 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col1")); Cell cell2 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col2")); Cell cell3 = Cell.create(PtBytes.toBytes("row"), PtBytes.toBytes("col3")); byte[] val = PtBytes.toBytes("val"); keyValueService.put(TEST_TABLE, ImmutableMap.of(cell1, val, cell2, val, cell3, val), 0); Map<Cell, Value> rows1 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.all(), 1); Assert.assertEquals(ImmutableSet.of(cell1, cell2, cell3), rows1.keySet()); Map<Cell, Value> rows2 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.of(cell1.getColumnName())), 1); assertEquals(ImmutableSet.of(cell1), rows2.keySet()); Map<Cell, Value> rows3 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.of(cell1.getColumnName(), cell3.getColumnName())), 1); assertEquals(ImmutableSet.of(cell1, cell3), rows3.keySet()); Map<Cell, Value> rows4 = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(cell1.getRowName()), ColumnSelection.create(ImmutableList.<byte[]>of()), 1); // This has changed recently - now empty column set means // that all columns are selected. assertEquals(ImmutableSet.of(cell1, cell2, cell3), rows4.keySet()); } @Test public void testGetRowsAllColumns() { putTestDataForSingleTimestamp(); Map<Cell, Value> values = keyValueService.getRows(TEST_TABLE, Arrays.asList(row1, row2), ColumnSelection.all(), TEST_TIMESTAMP + 1); assertEquals(4, values.size()); assertEquals(null, values.get(Cell.create(row1, column1))); assertArrayEquals(value10, values.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value12, values.get(Cell.create(row1, column2)).getContents()); assertArrayEquals(value21, values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, values.get(Cell.create(row2, column2)).getContents()); } private Map<Cell, Value> getValuesForRow(Map<byte[], RowColumnRangeIterator> values, byte[] row, int number) { Map<Cell, Value> results = Maps.newHashMap(); Iterator<Entry<Cell, Value>> it = Collections.emptyIterator(); if (values.containsKey(row)) { it = Iterators.limit(values.get(row), number); } while (it.hasNext()) { Entry<Cell, Value> result = it.next(); results.put(result.getKey(), result.getValue()); } return results; } @Test public void testGetRowColumnRange() { putTestDataForSingleTimestamp(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row1, 1); assertEquals(1, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column0)).getContents(), value10); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column0), PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row1, 1); assertEquals(1, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column2)).getContents(), value12); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column0), column2, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); assertEquals(0, getValuesForRow(values, row1, 1).size()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(RangeRequests.nextLexicographicName(column2), PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); assertEquals(0, getValuesForRow(values, row1, 1).size()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, Integer.MAX_VALUE), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row1, 2); assertEquals(2, batchValues.size()); assertArrayEquals(batchValues.get(Cell.create(row1, column0)).getContents(), value10); assertArrayEquals(batchValues.get(Cell.create(row1, column2)).getContents(), value12); } @Test public void testGetRowColumnRangeHistorical() { putTestDataForMultipleTimestamps(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 2); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row0, 1); assertEquals(1, batchValues.size()); assertArrayEquals(value0_t1, batchValues.get(Cell.create(row0, column0)).getContents()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); batchValues = getValuesForRow(values, row0, 1); assertEquals(1, batchValues.size()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row0, column0)).getContents()); } @Test public void testGetRowColumnRangeMultipleHistorical() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t1), TEST_TIMESTAMP + 1); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t1), TEST_TIMESTAMP + 1); // The initial multiget will get results for column0 only, then the next page for column1 will not include // the TEST_TIMESTAMP result so we have to get another page for column1. Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, RangeRequests.nextLexicographicName(column1), 2), TEST_TIMESTAMP + 1); assertEquals(1, values.size()); Map<Cell, Value> batchValues = getValuesForRow(values, row1, 2); assertEquals(2, batchValues.size()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value0_t0, batchValues.get(Cell.create(row1, column1)).getContents()); } @Test public void testGetRowColumnRangeMultipleRows() { putTestDataForSingleTimestamp(); Map<byte[], RowColumnRangeIterator> values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1, row0, row2), BatchColumnRangeSelection.create(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY, 1), TEST_TIMESTAMP + 1); assertEquals(ImmutableSet.of(row0, row1, row2), values.keySet()); Map<Cell, Value> row0Values = getValuesForRow(values, row0, 2); assertArrayEquals(value00, row0Values.get(Cell.create(row0, column0)).getContents()); assertArrayEquals(value01, row0Values.get(Cell.create(row0, column1)).getContents()); Map<Cell, Value> row1Values = getValuesForRow(values, row1, 2); assertArrayEquals(value10, row1Values.get(Cell.create(row1, column0)).getContents()); assertArrayEquals(value12, row1Values.get(Cell.create(row1, column2)).getContents()); Map<Cell, Value> row2Values = getValuesForRow(values, row2, 2); assertArrayEquals(value21, row2Values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, row2Values.get(Cell.create(row2, column2)).getContents()); } @Test public void testGetRowColumnRangeCellBatchSingleRow() { putTestDataForSingleTimestamp(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column0), PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column0), column2), 1, TEST_TIMESTAMP + 1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(RangeRequests.nextLexicographicName(column2), PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), Integer.MAX_VALUE, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchMultipleRows() { putTestDataForSingleTimestamp(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1, row0, row2), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 3, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value10); assertNextElementMatches(values, Cell.create(row1, column2), value12); assertNextElementMatches(values, Cell.create(row0, column0), value00); assertNextElementMatches(values, Cell.create(row0, column1), value01); assertNextElementMatches(values, Cell.create(row2, column1), value21); assertNextElementMatches(values, Cell.create(row2, column2), value22); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchHistorical() { putTestDataForMultipleTimestamps(); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 2); assertNextElementMatches(values, Cell.create(row0, column0), value0_t1); assertFalse(values.hasNext()); values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row0), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, PtBytes.EMPTY_BYTE_ARRAY), 1, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row0, column0), value0_t0); assertFalse(values.hasNext()); } @Test public void testGetRowColumnRangeCellBatchMultipleHistorical() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column0), value0_t1), TEST_TIMESTAMP + 1); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row1, column1), value0_t1), TEST_TIMESTAMP + 1); RowColumnRangeIterator values = keyValueService.getRowsColumnRange(TEST_TABLE, ImmutableList.of(row1), new ColumnRangeSelection(PtBytes.EMPTY_BYTE_ARRAY, RangeRequests.nextLexicographicName(column1)), 2, TEST_TIMESTAMP + 1); assertNextElementMatches(values, Cell.create(row1, column0), value0_t0); assertNextElementMatches(values, Cell.create(row1, column1), value0_t0); assertFalse(values.hasNext()); } private static void assertNextElementMatches(RowColumnRangeIterator iterator, Cell expectedCell, byte[] expectedContents) { assertTrue(iterator.hasNext()); Map.Entry<Cell, Value> entry = iterator.next(); assertEquals(expectedCell, entry.getKey()); assertArrayEquals(expectedContents, entry.getValue().getContents()); } @Test public void testGetRowsWhenMultipleVersions() { putTestDataForMultipleTimestamps(); Map<Cell, Value> result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.all(), TEST_TIMESTAMP + 1); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t0, TEST_TIMESTAMP))); result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.all(), TEST_TIMESTAMP + 2); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t1, TEST_TIMESTAMP + 1))); } @Test public void testGetRowsWhenMultipleVersionsAndColumnsSelected() { putTestDataForMultipleTimestamps(); Map<Cell, Value> result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.create(ImmutableSet.of(column0)), TEST_TIMESTAMP + 1); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t0, TEST_TIMESTAMP))); result = keyValueService.getRows( TEST_TABLE, ImmutableSet.of(row0), ColumnSelection.create(ImmutableSet.of(column0)), TEST_TIMESTAMP + 2); assertEquals(1, result.size()); assertTrue(result.containsKey(Cell.create(row0, column0))); assertTrue(result.containsValue(Value.create(value0_t1, TEST_TIMESTAMP + 1))); } @Test public void testGetWhenMultipleVersions() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); Value val0 = Value.create(value0_t0, TEST_TIMESTAMP); Value val1 = Value.create(value0_t1, TEST_TIMESTAMP + 1); assertTrue(keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP)).isEmpty()); Map<Cell, Value> result = keyValueService.get( TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 1)); assertTrue(result.containsKey(cell)); assertEquals(1, result.size()); assertTrue(result.containsValue(val0)); result = keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 2)); assertEquals(1, result.size()); assertTrue(result.containsKey(cell)); assertTrue(result.containsValue(val1)); result = keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 3)); assertEquals(1, result.size()); assertTrue(result.containsKey(cell)); assertTrue(result.containsValue(val1)); } @Test public void testGetRowsWithSelectedColumns() { putTestDataForSingleTimestamp(); ColumnSelection columns1and2 = ColumnSelection.create(Arrays.asList(column1, column2)); Map<Cell, Value> values = keyValueService.getRows(TEST_TABLE, Arrays.asList(row1, row2), columns1and2, TEST_TIMESTAMP + 1); assertEquals(3, values.size()); assertEquals(null, values.get(Cell.create(row1, column0))); assertArrayEquals(value12, values.get(Cell.create(row1, column2)).getContents()); assertArrayEquals(value21, values.get(Cell.create(row2, column1)).getContents()); assertArrayEquals(value22, values.get(Cell.create(row2, column2)).getContents()); } @Test public void testGetLatestTimestamps() { putTestDataForMultipleTimestamps(); Map<Cell, Long> timestamps = keyValueService.getLatestTimestamps(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), TEST_TIMESTAMP + 2)); assertTrue("Incorrect number of values returned.", timestamps.size() == 1); assertEquals("Incorrect value returned.", new Long(TEST_TIMESTAMP + 1), timestamps.get(Cell.create(row0, column0))); } @Test public void testGetWithMultipleVersions() { putTestDataForMultipleTimestamps(); Map<Cell, Value> values = keyValueService.get(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), TEST_TIMESTAMP + 2)); assertTrue("Incorrect number of values returned.", values.size() == 1); assertEquals("Incorrect value returned.", Value.create(value0_t1, TEST_TIMESTAMP + 1), values.get(Cell.create(row0, column0))); } @Test public void testGetAllTableNames() { final TableReference anotherTable = TableReference.createWithEmptyNamespace("AnotherTable"); assertEquals(1, keyValueService.getAllTableNames().size()); assertEquals(TEST_TABLE, keyValueService.getAllTableNames().iterator().next()); keyValueService.createTable(anotherTable, AtlasDbConstants.GENERIC_TABLE_METADATA); assertEquals(2, keyValueService.getAllTableNames().size()); assertTrue(keyValueService.getAllTableNames().contains(anotherTable)); assertTrue(keyValueService.getAllTableNames().contains(TEST_TABLE)); keyValueService.dropTable(anotherTable); assertEquals(1, keyValueService.getAllTableNames().size()); assertEquals(TEST_TABLE, keyValueService.getAllTableNames().iterator().next()); } @Test public void testTableMetadata() { assertEquals(AtlasDbConstants.GENERIC_TABLE_METADATA.length, keyValueService.getMetadataForTable(TEST_TABLE).length); keyValueService.putMetadataForTable(TEST_TABLE, ArrayUtils.EMPTY_BYTE_ARRAY); assertEquals(0, keyValueService.getMetadataForTable(TEST_TABLE).length); keyValueService.putMetadataForTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); assertTrue(Arrays.equals(AtlasDbConstants.GENERIC_TABLE_METADATA, keyValueService.getMetadataForTable(TEST_TABLE))); } private static <V, T extends Iterator<RowResult<V>>> void assertRangeSizeAndOrdering(T it, int expectedSize, RangeRequest rangeRequest) { if (!it.hasNext()) { assertEquals(expectedSize, 0); return; } byte[] row = it.next().getRowName(); int size = 1; final boolean reverse = rangeRequest.isReverse(); final byte[] startRow = rangeRequest.getStartInclusive(); final byte[] endRow = rangeRequest.getEndExclusive(); if (startRow.length > 0) { if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(startRow, row) <= 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(startRow, row) >= 0; } } while (it.hasNext()) { byte[] nextRow = it.next().getRowName(); if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(row, nextRow) <= 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(row, nextRow) >= 0; } row = nextRow; size++; } if (endRow.length > 0) { if (!reverse) { assert UnsignedBytes.lexicographicalComparator().compare(row, endRow) < 0; } else { assert UnsignedBytes.lexicographicalComparator().compare(row, endRow) > 0; } } assertEquals(expectedSize, size); } @Test public void testGetRange() { testGetRange(reverseRangesSupported()); } public void testGetRange(boolean reverseSupported) { putTestDataForSingleTimestamp(); // Unbounded final RangeRequest all = RangeRequest.all(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, all, TEST_TIMESTAMP + 1), 3, all); if (reverseSupported) { final RangeRequest allReverse = RangeRequest.reverseBuilder().build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, allReverse, TEST_TIMESTAMP + 1), 3, allReverse); } // Upbounded final RangeRequest upbounded = RangeRequest.builder().endRowExclusive(row2).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, upbounded, TEST_TIMESTAMP + 1), 2, upbounded); if (reverseSupported) { final RangeRequest upboundedReverse = RangeRequest.reverseBuilder().endRowExclusive(row0).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, upboundedReverse, TEST_TIMESTAMP + 1), 2, upboundedReverse); } // Downbounded final RangeRequest downbounded = RangeRequest.builder().startRowInclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, downbounded, TEST_TIMESTAMP + 1), 2, downbounded); if (reverseSupported) { final RangeRequest downboundedReverse = RangeRequest.reverseBuilder().startRowInclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, downboundedReverse, TEST_TIMESTAMP + 1), 2, downboundedReverse); } // Both-bounded final RangeRequest bothbounded = RangeRequest.builder().startRowInclusive(row1).endRowExclusive(row2).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, bothbounded, TEST_TIMESTAMP + 1), 1, bothbounded); if (reverseSupported) { final RangeRequest bothboundedReverse = RangeRequest.reverseBuilder().startRowInclusive(row2).endRowExclusive(row1).build(); assertRangeSizeAndOrdering(keyValueService.getRange(TEST_TABLE, bothboundedReverse, TEST_TIMESTAMP + 1), 1, bothboundedReverse); } // Precise test for lower-bounded RangeRequest rangeRequest = downbounded; ClosableIterator<RowResult<Value>> rangeResult = keyValueService.getRange( TEST_TABLE, rangeRequest, TEST_TIMESTAMP + 1); assertTrue(keyValueService.getRange(TEST_TABLE, rangeRequest, TEST_TIMESTAMP).hasNext() == false); assertTrue(rangeResult.hasNext()); assertEquals( RowResult.create( row1, ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()).put( column0, Value.create(value10, TEST_TIMESTAMP)).put( column2, Value.create(value12, TEST_TIMESTAMP)).build()), rangeResult.next()); assertTrue(rangeResult.hasNext()); assertEquals( RowResult.create( row2, ImmutableSortedMap.orderedBy(UnsignedBytes.lexicographicalComparator()).put( column1, Value.create(value21, TEST_TIMESTAMP)).put( column2, Value.create(value22, TEST_TIMESTAMP)).build()), rangeResult.next()); rangeResult.close(); } @Test public void testGetAllTimestamps() { putTestDataForMultipleTimestamps(); final Cell cell = Cell.create(row0, column0); final Set<Cell> cellSet = ImmutableSet.of(cell); Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps( TEST_TABLE, cellSet, TEST_TIMESTAMP); assertEquals(0, timestamps.size()); timestamps = keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 1); assertEquals(1, timestamps.size()); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP)); timestamps = keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 2); assertEquals(2, timestamps.size()); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP)); assertTrue(timestamps.containsEntry(cell, TEST_TIMESTAMP + 1)); assertEquals( timestamps, keyValueService.getAllTimestamps(TEST_TABLE, cellSet, TEST_TIMESTAMP + 3)); } @Test public void testDelete() { putTestDataForSingleTimestamp(); assertEquals(3, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column0), TEST_TIMESTAMP)); assertEquals(3, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column1), TEST_TIMESTAMP)); assertEquals(2, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row1, column0), TEST_TIMESTAMP)); assertEquals(2, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); keyValueService.delete( TEST_TABLE, ImmutableMultimap.of(Cell.create(row1, column2), TEST_TIMESTAMP)); assertEquals(1, Iterators.size(keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1))); } @Test public void testDeleteMultipleVersions() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); ClosableIterator<RowResult<Value>> result = keyValueService.getRange( TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1); assertTrue(result.hasNext()); keyValueService.delete(TEST_TABLE, ImmutableMultimap.of(cell, TEST_TIMESTAMP)); result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 1); assertTrue(!result.hasNext()); result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 2); assertTrue(result.hasNext()); } @Test public void testPutWithTimestamps() { putTestDataForMultipleTimestamps(); final Cell cell = Cell.create(row0, column0); final Value val1 = Value.create(value0_t1, TEST_TIMESTAMP + 1); final Value val5 = Value.create(value0_t5, TEST_TIMESTAMP + 5); keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(cell, val5)); assertEquals( val5, keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 6)).get(cell)); assertEquals( val1, keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 5)).get(cell)); keyValueService.delete(TEST_TABLE, ImmutableMultimap.of(cell, TEST_TIMESTAMP + 5)); } @Test public void testGetRangeWithTimestamps() { testGetRangeWithTimestamps(false); if (reverseRangesSupported()) { testGetRangeWithTimestamps(true); } } private void testGetRangeWithTimestamps(boolean reverse) { putTestDataForMultipleTimestamps(); final RangeRequest range; if (!reverse) { range = RangeRequest.builder().startRowInclusive(row0).endRowExclusive(row1).build(); } else { range = RangeRequest.reverseBuilder().startRowInclusive(row0).build(); } ClosableIterator<RowResult<Set<Long>>> rangeWithHistory = keyValueService.getRangeOfTimestamps( TEST_TABLE, range, TEST_TIMESTAMP + 2); RowResult<Set<Long>> row0 = rangeWithHistory.next(); assertTrue(!rangeWithHistory.hasNext()); rangeWithHistory.close(); assertEquals(1, Iterables.size(row0.getCells())); Entry<Cell, Set<Long>> cell0 = row0.getCells().iterator().next(); assertEquals(2, cell0.getValue().size()); assertTrue(cell0.getValue().contains(TEST_TIMESTAMP)); assertTrue(cell0.getValue().contains(TEST_TIMESTAMP + 1)); } @Test public void testKeyAlreadyExists() { // Test that it does not throw some random exceptions putTestDataForSingleTimestamp(); try { putTestDataForSingleTimestamp(); // Legal } catch (KeyAlreadyExistsException e) { Assert.fail("Must not throw when overwriting with same value!"); } keyValueService.putWithTimestamps( TEST_TABLE, ImmutableMultimap.of( Cell.create(row0, column0), Value.create(value00, TEST_TIMESTAMP + 1))); try { keyValueService.putWithTimestamps( TEST_TABLE, ImmutableMultimap.of( Cell.create(row0, column0), Value.create(value00, TEST_TIMESTAMP + 1))); // Legal } catch (KeyAlreadyExistsException e) { Assert.fail("Must not throw when overwriting with same value!"); } try { keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(Cell.create(row0, column0), Value.create(value01, TEST_TIMESTAMP + 1))); // Legal } catch (KeyAlreadyExistsException e) { // Legal } // The first try might not throw as putUnlessExists must only be exclusive with other putUnlessExists. try { keyValueService.putUnlessExists(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value00)); // Legal } catch (KeyAlreadyExistsException e) { // Legal } try { keyValueService.putUnlessExists(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value00)); Assert.fail("putUnlessExists must throw when overwriting the same cell!"); } catch (KeyAlreadyExistsException e) { // Legal } } @Test public void testCheckAndSetFromEmpty() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), 1L); assertEquals(1, timestamps.size()); assertTrue(timestamps.containsEntry(cell, AtlasDbConstants.TRANSACTION_TS)); } @Test public void testCheckAndSetFromOtherValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); CheckAndSetRequest secondRequest = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value00, value01); keyValueService.checkAndSet(secondRequest); Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), 1L); assertEquals(1, timestamps.size()); assertTrue(timestamps.containsEntry(cell, AtlasDbConstants.TRANSACTION_TS)); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromWrongValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); CheckAndSetRequest secondRequest = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value01, value00); keyValueService.checkAndSet(secondRequest); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromValueWhenNoValue() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value00, value01); keyValueService.checkAndSet(request); } @Test(expected = CheckAndSetException.class) public void testCheckAndSetFromNoValueWhenValueIsPresent() { Cell cell = Cell.create(row0, column0); CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); keyValueService.checkAndSet(request); keyValueService.checkAndSet(request); } @Test public void testAddGCSentinelValues() { putTestDataForMultipleTimestamps(); Cell cell = Cell.create(row0, column0); Multimap<Cell, Long> timestampsBefore = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(2, timestampsBefore.size()); assertTrue(!timestampsBefore.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); keyValueService.addGarbageCollectionSentinelValues(TEST_TABLE, ImmutableSet.of(cell)); Multimap<Cell, Long> timestampsAfter1 = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(3, timestampsAfter1.size()); assertTrue(timestampsAfter1.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); keyValueService.addGarbageCollectionSentinelValues(TEST_TABLE, ImmutableSet.of(cell)); Multimap<Cell, Long> timestampsAfter2 = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), MAX_TIMESTAMP); assertEquals(3, timestampsAfter2.size()); assertTrue(timestampsAfter2.containsEntry(cell, Value.INVALID_VALUE_TIMESTAMP)); } @Test public void testGetRangeThrowsOnError() { try { keyValueService.getRange(TEST_NONEXISTING_TABLE, RangeRequest.all(), MAX_TIMESTAMP).hasNext(); Assert.fail("getRange must throw on failure"); } catch (RuntimeException e) { // Expected } } @Test public void testGetRangeOfTimestampsThrowsOnError() { try { keyValueService.getRangeOfTimestamps(TEST_NONEXISTING_TABLE, RangeRequest.all(), MAX_TIMESTAMP).hasNext(); Assert.fail("getRangeOfTimestamps must throw on failure"); } catch (RuntimeException e) { // Expected } } @Test public void testCannotModifyValuesAfterWrite() { Cell cell = Cell.create(row0, column0); byte[] data = new byte[1]; byte[] originalData = copyOf(data); writeToCell(cell, data); modifyValue(data); assertThat(getForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGetRows() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getRowsForCell(cell)); assertThat(getRowsForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGet() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getForCell(cell)); assertThat(getForCell(cell), is(originalData)); } @Test public void testCannotModifyValuesAfterGetRange() { Cell cell = Cell.create(row0, column0); byte[] originalData = new byte[1]; writeToCell(cell, originalData); modifyValue(getOnlyItemInTableRange()); assertThat(getOnlyItemInTableRange(), is(originalData)); } private void modifyValue(byte[] retrievedValue) { retrievedValue[0] = (byte) 50; } private byte[] copyOf(byte[] contents) { return Arrays.copyOf(contents, contents.length); } private void writeToCell(Cell cell, byte[] data) { Value val = Value.create(data, TEST_TIMESTAMP + 1); keyValueService.putWithTimestamps(TEST_TABLE, ImmutableMultimap.of(cell, val)); } private byte[] getRowsForCell(Cell cell) { return keyValueService.getRows(TEST_TABLE, ImmutableSet.of(cell.getRowName()), ColumnSelection.all(), TEST_TIMESTAMP + 3) .get(cell).getContents(); } private byte[] getForCell(Cell cell) { return keyValueService.get(TEST_TABLE, ImmutableMap.of(cell, TEST_TIMESTAMP + 3)).get(cell).getContents(); } private byte[] getOnlyItemInTableRange() { try (ClosableIterator<RowResult<Value>> rangeIterator = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), TEST_TIMESTAMP + 3) ){ byte[] contents = rangeIterator.next().getOnlyColumnValue().getContents(); assertFalse("There should only be one row in the table", rangeIterator.hasNext()); return contents; } } @Test public void shouldAllowNotHavingAnyDynamicColumns() { keyValueService.createTable(DynamicColumnTable.reference(), DynamicColumnTable.metadata()); byte[] row = PtBytes.toBytes(123L); Cell cell = Cell.create(row, dynamicColumn(1)); Map<Cell, Long> valueToGet = ImmutableMap.of(cell, MAX_TIMESTAMP); assertThat(keyValueService.get(DynamicColumnTable.reference(), valueToGet), is(emptyMap())); } @Test public void shouldAllowRemovingAllCellsInDynamicColumns() { keyValueService.createTable(DynamicColumnTable.reference(), DynamicColumnTable.metadata()); byte[] row = PtBytes.toBytes(123L); byte[] value = PtBytes.toBytes(123L); long timestamp = 456L; Cell cell1 = Cell.create(row, dynamicColumn(1)); Cell cell2 = Cell.create(row, dynamicColumn(2)); Map<Cell, Long> valuesToDelete = ImmutableMap.of(cell1, timestamp, cell2, timestamp); Map<Cell, byte[]> valuesToPut = ImmutableMap.of(cell1, value, cell2, value); keyValueService.put(DynamicColumnTable.reference(), valuesToPut, timestamp); keyValueService.delete(DynamicColumnTable.reference(), Multimaps.forMap(valuesToDelete)); Map<Cell, Value> values = keyValueService.getRows( DynamicColumnTable.reference(), ImmutableList.of(row), ColumnSelection.all(), MAX_TIMESTAMP); assertThat(values, is(emptyMap())); } @Test public void shouldAllowSameTablenameDifferentNamespace() { TableReference fooBar = TableReference.createUnsafe("foo.bar"); TableReference bazBar = TableReference.createUnsafe("baz.bar"); // try create table in same call keyValueService.createTables( ImmutableMap.of( fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA, bazBar, AtlasDbConstants.GENERIC_TABLE_METADATA)); // try create table spanned over different calls keyValueService.createTable(fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.createTable(bazBar, AtlasDbConstants.GENERIC_TABLE_METADATA); // test tables actually created assertThat(keyValueService.getAllTableNames(), hasItems(fooBar, bazBar)); // clean up keyValueService.dropTables(ImmutableSet.of(fooBar, bazBar)); } @Test public void truncateShouldBeIdempotent() { TableReference fooBar = TableReference.createUnsafe("foo.bar"); keyValueService.createTable(fooBar, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.truncateTable(fooBar); keyValueService.truncateTable(fooBar); keyValueService.dropTable(fooBar); } @Test public void truncateOfNonExistantTableShouldThrow() { try { keyValueService.truncateTable(TEST_NONEXISTING_TABLE); Assert.fail("truncate must throw on failure"); } catch (RuntimeException e) { // expected } } @Test public void dropTableShouldBeIdempotent() { keyValueService.dropTable(TEST_NONEXISTING_TABLE); keyValueService.dropTable(TEST_NONEXISTING_TABLE); } @Test public void createTableShouldBeIdempotent() { keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); keyValueService.createTable(TEST_TABLE, AtlasDbConstants.GENERIC_TABLE_METADATA); } @Test public void compactingShouldNotFail() { keyValueService.compactInternally(TEST_TABLE); } private byte[] dynamicColumn(long columnId) { return PtBytes.toBytes(columnId); } protected void putTestDataForMultipleTimestamps() { keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value0_t0), TEST_TIMESTAMP); keyValueService.put(TEST_TABLE, ImmutableMap.of(Cell.create(row0, column0), value0_t1), TEST_TIMESTAMP + 1); } protected void putTestDataForSingleTimestamp() { /* | column0 column1 column2 * -----+--------------------------------- * row0 | "value00" "value01" - * row1 | "value10" - "value12" * row2 | - "value21" "value22" */ Map<Cell, byte[]> values = new HashMap<Cell, byte[]>(); values.put(Cell.create(row0, column0), value00); values.put(Cell.create(row0, column1), value01); values.put(Cell.create(row1, column0), value10); values.put(Cell.create(row1, column2), value12); values.put(Cell.create(row2, column1), value21); values.put(Cell.create(row2, column2), value22); keyValueService.put(TEST_TABLE, values, TEST_TIMESTAMP); } protected abstract KeyValueService getKeyValueService(); }
Verify CAS'd values are correct
atlasdb-tests-shared/src/main/java/com/palantir/atlasdb/keyvalue/impl/AbstractKeyValueServiceTest.java
Verify CAS'd values are correct
<ide><path>tlasdb-tests-shared/src/main/java/com/palantir/atlasdb/keyvalue/impl/AbstractKeyValueServiceTest.java <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertTrue; <del>import static org.junit.Assert.fail; <del> <add> <add>import java.nio.charset.StandardCharsets; <ide> import java.util.Arrays; <ide> import java.util.Collections; <ide> import java.util.HashMap; <ide> CheckAndSetRequest request = CheckAndSetRequest.newCell(TEST_TABLE, cell, value00); <ide> keyValueService.checkAndSet(request); <ide> <del> Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), 1L); <del> <del> assertEquals(1, timestamps.size()); <del> assertTrue(timestamps.containsEntry(cell, AtlasDbConstants.TRANSACTION_TS)); <add> verifyCheckAndSet(cell, value00); <ide> } <ide> <ide> @Test <ide> CheckAndSetRequest secondRequest = CheckAndSetRequest.singleCell(TEST_TABLE, cell, value00, value01); <ide> keyValueService.checkAndSet(secondRequest); <ide> <del> Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(cell), 1L); <add> verifyCheckAndSet(cell, value01); <add> } <add> <add> private void verifyCheckAndSet(Cell key, byte[] expectedValue) { <add> Multimap<Cell, Long> timestamps = keyValueService.getAllTimestamps(TEST_TABLE, ImmutableSet.of(key), 1L); <ide> <ide> assertEquals(1, timestamps.size()); <del> assertTrue(timestamps.containsEntry(cell, AtlasDbConstants.TRANSACTION_TS)); <add> assertTrue(timestamps.containsEntry(key, AtlasDbConstants.TRANSACTION_TS)); <add> <add> ClosableIterator<RowResult<Value>> result = keyValueService.getRange(TEST_TABLE, RangeRequest.all(), <add> AtlasDbConstants.TRANSACTION_TS + 1); <add> <add> // Check first result is right <add> byte[] actual = result.next().getOnlyColumnValue().getContents(); <add> assertArrayEquals(String.format("Value \"%s\" different from expected \"%s\"", <add> new String(actual, StandardCharsets.UTF_8), <add> new String(expectedValue, StandardCharsets.UTF_8)), <add> expectedValue, <add> actual); <add> <add> // Check no more results <add> assertFalse(result.hasNext()); <ide> } <ide> <ide> @Test(expected = CheckAndSetException.class)
Java
apache-2.0
1a0a0cd3292c25561595d0f63394939958a3cdfb
0
ontopia/ontopia,ontopia/ontopia,ontopia/ontopia,ontopia/ontopia,ontopia/ontopia
/* * #! * Ontopia Rest * #- * Copyright (C) 2001 - 2016 The Ontopia Project * #- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * !# */ package net.ontopia.topicmaps.rest.v1.name; import net.ontopia.topicmaps.rest.v1.scoped.ScopedRouter; import net.ontopia.topicmaps.rest.v1.variant.VariantsResource; import org.restlet.Context; public class NamesRouter extends ScopedRouter { public NamesRouter(Context context) { super(context); setName("Names router"); setDescription("Binds the resources related to name operations"); //add attach("", TopicNameResource.class); // list // ClassInstanceIndexIF.getTopicNames attach("/typed/{type}", TopicNamesResource.class); // ClassInstanceIndexIF.getTopicNameTypes attach("/types", TopicNameTypesResource.class); // NameIndexIF.getTopicNames attach("/index", IndexResource.class); // single attach("/{id}", TopicNameResource.class); // variants attach("/{id}/variants", VariantsResource.class); } }
ontopia-rest/src/main/java/net/ontopia/topicmaps/rest/v1/name/NamesRouter.java
/* * #! * Ontopia Rest * #- * Copyright (C) 2001 - 2016 The Ontopia Project * #- * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * !# */ package net.ontopia.topicmaps.rest.v1.name; import net.ontopia.topicmaps.rest.v1.scoped.ScopedRouter; import net.ontopia.topicmaps.rest.v1.variant.VariantsResource; import org.restlet.Context; public class NamesRouter extends ScopedRouter { public NamesRouter(Context context) { super(context); setName("Names router"); setDescription("Binds the resources related to name operations"); //add attach("", TopicNameResource.class); // list // ClassInstanceIndexIF.getTopicNames attach("/typed/{type}", TopicNamesResource.class); // ClassInstanceIndexIF.getTopicNameTypes attach("/types/{type}", TopicNameTypesResource.class); // NameIndexIF.getTopicNames attach("/index", IndexResource.class); // single attach("/{id}", TopicNameResource.class); // variants attach("/{id}/variants", VariantsResource.class); } }
Removed unused parameter
ontopia-rest/src/main/java/net/ontopia/topicmaps/rest/v1/name/NamesRouter.java
Removed unused parameter
<ide><path>ntopia-rest/src/main/java/net/ontopia/topicmaps/rest/v1/name/NamesRouter.java <ide> attach("/typed/{type}", TopicNamesResource.class); <ide> <ide> // ClassInstanceIndexIF.getTopicNameTypes <del> attach("/types/{type}", TopicNameTypesResource.class); <add> attach("/types", TopicNameTypesResource.class); <ide> <ide> // NameIndexIF.getTopicNames <ide> attach("/index", IndexResource.class);
Java
mit
83a5cc32567f5b817768d53fa64613cdb1673220
0
simple-elf/selenide,codeborne/selenide,simple-elf/selenide,simple-elf/selenide,codeborne/selenide,simple-elf/selenide,codeborne/selenide
package integration; import com.codeborne.selenide.ElementsCollection; import org.junit.jupiter.api.Assumptions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static com.codeborne.selenide.CollectionCondition.size; import static com.codeborne.selenide.CollectionCondition.sizeGreaterThan; import static com.codeborne.selenide.Condition.text; import static com.codeborne.selenide.Selenide.$$; import static com.codeborne.selenide.WebDriverRunner.isPhantomjs; class CollectionReloadingTest extends IntegrationTest { @BeforeEach void openTestPage() { openFile("collection_with_delays.html"); } @Test void reloadsCollectionOnEveryCall() { Assumptions.assumeFalse(isPhantomjs()); ElementsCollection collection = $$("#collection li"); collection.get(0).shouldHave(text("Element #0")); collection.get(10).shouldHave(text("Element #10")); } @Test void canTakeSnapshotOfCollection() { ElementsCollection collection = $$("#collection li"); ElementsCollection snapshot = collection.snapshot(); int currentSize = snapshot.size(); collection.shouldHave(sizeGreaterThan(currentSize)); snapshot.shouldHave(size(currentSize)); } }
src/test/java/integration/CollectionReloadingTest.java
package integration; import com.codeborne.selenide.ElementsCollection; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static com.codeborne.selenide.CollectionCondition.size; import static com.codeborne.selenide.CollectionCondition.sizeGreaterThan; import static com.codeborne.selenide.Condition.text; import static com.codeborne.selenide.Selenide.$$; class CollectionReloadingTest extends IntegrationTest { @BeforeEach void openTestPage() { openFile("collection_with_delays.html"); } @Test void reloadsCollectionOnEveryCall() { ElementsCollection collection = $$("#collection li"); collection.get(0).shouldHave(text("Element #0")); collection.get(10).shouldHave(text("Element #10")); } @Test void canTakeSnapshotOfCollection() { ElementsCollection collection = $$("#collection li"); ElementsCollection snapshot = collection.snapshot(); int currentSize = snapshot.size(); collection.shouldHave(sizeGreaterThan(currentSize)); snapshot.shouldHave(size(currentSize)); } }
ignore unstable test in PhantomJS
src/test/java/integration/CollectionReloadingTest.java
ignore unstable test in PhantomJS
<ide><path>rc/test/java/integration/CollectionReloadingTest.java <ide> <ide> import com.codeborne.selenide.ElementsCollection; <ide> <add>import org.junit.jupiter.api.Assumptions; <ide> import org.junit.jupiter.api.BeforeEach; <ide> import org.junit.jupiter.api.Test; <ide> <ide> import static com.codeborne.selenide.CollectionCondition.sizeGreaterThan; <ide> import static com.codeborne.selenide.Condition.text; <ide> import static com.codeborne.selenide.Selenide.$$; <add>import static com.codeborne.selenide.WebDriverRunner.isPhantomjs; <ide> <ide> class CollectionReloadingTest extends IntegrationTest { <ide> @BeforeEach <ide> <ide> @Test <ide> void reloadsCollectionOnEveryCall() { <add> Assumptions.assumeFalse(isPhantomjs()); <add> <ide> ElementsCollection collection = $$("#collection li"); <ide> collection.get(0).shouldHave(text("Element #0")); <ide> collection.get(10).shouldHave(text("Element #10"));
Java
apache-2.0
d5eee063333b68c394bcc5a8e8f4f8f98fdf7337
0
pku-meizi/meizi,pku-meizi/meizi,pku-meizi/meizi,pku-meizi/meizi
package com.meiziaccess.controller; import com.meiziaccess.model.ItemMedia; import com.meiziaccess.model.ItemMediaRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.InputStreamResource; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by user-u1 on 2016/4/12. */ @Controller public class OrderController { @Value("${configure.IPAdderss}") private String IPAddress; //当前服务器ip @Autowired private ItemMediaRepository itemMediaRepository; //订单PostAPI @RequestMapping(value = "/order", method = RequestMethod.POST, produces = "application/json;charset-UTF-8") @ResponseBody public Map<String, Object> order(@RequestBody ItemMedia ord){ Map<String, Object> order_return = new HashMap<String, Object>(); System.out.println(ord.getUuid() + ", " + ord.getEntire()+ ", " + ord.getStarttime() + ", " + ord.getEndtime() + ", " + ord.getHighdef_video_path()); String url = "http://" + IPAddress + "/media?uuid=" + ord.getUuid(); //处理视频,修改链接和地址 ord.setStatus(0); ord.setUrl(url); List<ItemMedia> list = itemMediaRepository.findMediaByUuid(ord.getUuid()); ItemMedia itemMedia; if(list.isEmpty()){ itemMedia = itemMediaRepository.save(ord); }else{ itemMedia = list.get(0); } //返回字段 order_return.put("uuid", itemMedia.getUuid()); order_return.put("status", itemMedia.getStatus()); order_return.put("url", itemMedia.getUrl()); return order_return; } //视频下载链接 @RequestMapping(value = "/media", method = RequestMethod.GET) public ResponseEntity<InputStreamResource> downloadFile( Long uuid) throws IOException { //生成相应的文件下载链接 // String filePath = "E:/" + 1 + ".rmvb"; // 通过uuid查找高码视频路径 System.out.println("uuid = " + uuid); List<ItemMedia> list = itemMediaRepository.findMediaByUuid(uuid); if(list.isEmpty()) return null; ItemMedia itemMedia = list.get(0); System.out.println(itemMedia.getHighdef_video_path()); // String filePath = "/home/derc/video/" + id + ".rmvb"; String filePath = itemMedia.getHighdef_video_path(); FileSystemResource file = new FileSystemResource(filePath); HttpHeaders headers = new HttpHeaders(); headers.add("Cache-Control", "no-cache, no-store, must-revalidate"); headers.add("Content-Disposition", String.format("attachment; filename=\"%s\"", file.getFilename())); headers.add("Pragma", "no-cache"); headers.add("Expires", "0"); return ResponseEntity .ok() .headers(headers) .contentLength(file.contentLength()) .contentType(MediaType.parseMediaType("application/octet-stream")) .body(new InputStreamResource(file.getInputStream())); } }
src/main/java/com/meiziaccess/controller/OrderController.java
package com.meiziaccess.controller; import com.meiziaccess.model.ItemMedia; import com.meiziaccess.model.ItemMediaRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.InputStreamResource; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by user-u1 on 2016/4/12. */ @Controller public class OrderController { @Value("${configure.IPAdderss}") private String IPAddress; //当前服务器ip @Autowired private ItemMediaRepository itemMediaRepository; //订单PostAPI @RequestMapping(value = "/order", method = RequestMethod.POST, produces = "application/json;charset-UTF-8") @ResponseBody public Map<String, Object> order(@RequestBody ItemMedia ord){ Map<String, Object> order_return = new HashMap<String, Object>(); System.out.println(ord.getUuid() + ", " + ord.getEntire()+ ", " + ord.getStarttime() + ", " + ord.getEndtime() + ", " + ord.getHighdef_video_path()); String url = "http://" + IPAddress + "/media?uuid=" + ord.getUuid(); //处理视频,修改链接和地址 ord.setStatus(1); ord.setUrl(url); List<ItemMedia> list = itemMediaRepository.findMediaByUuid(ord.getUuid()); ItemMedia itemMedia; if(list.isEmpty()){ itemMedia = itemMediaRepository.save(ord); }else{ itemMedia = list.get(0); } //返回字段 order_return.put("uuid", itemMedia.getUuid()); order_return.put("status", itemMedia.getStatus()); order_return.put("url", itemMedia.getUrl()); return order_return; } //视频下载链接 @RequestMapping(value = "/media", method = RequestMethod.GET) public ResponseEntity<InputStreamResource> downloadFile( Long uuid) throws IOException { //生成相应的文件下载链接 // String filePath = "E:/" + 1 + ".rmvb"; // 通过uuid查找高码视频路径 System.out.println("uuid = " + uuid); List<ItemMedia> list = itemMediaRepository.findMediaByUuid(uuid); if(list.isEmpty()) return null; ItemMedia itemMedia = list.get(0); System.out.println(itemMedia.getHighdef_video_path()); // String filePath = "/home/derc/video/" + id + ".rmvb"; String filePath = itemMedia.getHighdef_video_path(); FileSystemResource file = new FileSystemResource(filePath); HttpHeaders headers = new HttpHeaders(); headers.add("Cache-Control", "no-cache, no-store, must-revalidate"); headers.add("Content-Disposition", String.format("attachment; filename=\"%s\"", file.getFilename())); headers.add("Pragma", "no-cache"); headers.add("Expires", "0"); return ResponseEntity .ok() .headers(headers) .contentLength(file.contentLength()) .contentType(MediaType.parseMediaType("application/octet-stream")) .body(new InputStreamResource(file.getInputStream())); } }
updata
src/main/java/com/meiziaccess/controller/OrderController.java
updata
<ide><path>rc/main/java/com/meiziaccess/controller/OrderController.java <ide> String url = "http://" + IPAddress + "/media?uuid=" + ord.getUuid(); <ide> <ide> //处理视频,修改链接和地址 <del> ord.setStatus(1); <add> ord.setStatus(0); <ide> ord.setUrl(url); <ide> List<ItemMedia> list = itemMediaRepository.findMediaByUuid(ord.getUuid()); <ide> ItemMedia itemMedia;
Java
mit
60590712fd31b8295811b6cc7899db523d7492f0
0
GlowstonePlusPlus/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus,GlowstonePlusPlus/GlowstonePlusPlus,GlowstoneMC/GlowstonePlusPlus
package net.glowstone.net; import io.netty.channel.ChannelFuture; import java.net.InetSocketAddress; import java.util.concurrent.CountDownLatch; import net.glowstone.GlowServer; /** * Represents a network server. * * <p>Modified implementation of {@link com.flowpowered.network.NetworkServer}. */ public abstract class GlowNetworkServer { private final GlowServer server; protected CountDownLatch latch; /** * Creates an instance for the specified server. * * @param server the associated GlowServer * @param latch The countdown latch used during server startup to wait for network server * binding. */ public GlowNetworkServer(GlowServer server, CountDownLatch latch) { this.server = server; this.latch = latch; } public abstract ChannelFuture bind(InetSocketAddress address); public GlowServer getServer() { return server; } public void onBindSuccess(InetSocketAddress address) { latch.countDown(); } public abstract void onBindFailure(InetSocketAddress address, Throwable t); public abstract void shutdown(); }
src/main/java/net/glowstone/net/GlowNetworkServer.java
package net.glowstone.net; import io.netty.channel.ChannelFuture; import java.net.InetSocketAddress; import java.util.concurrent.CountDownLatch; import net.glowstone.GlowServer; /** * Represents a network server. * * Modified implementation of {@link com.flowpowered.network.NetworkServer}. */ public abstract class GlowNetworkServer { private final GlowServer server; protected CountDownLatch latch; public GlowNetworkServer(GlowServer server, CountDownLatch latch) { this.server = server; this.latch = latch; } public abstract ChannelFuture bind(InetSocketAddress address); public GlowServer getServer() { return server; } public void onBindSuccess(InetSocketAddress address) { latch.countDown(); } public abstract void onBindFailure(InetSocketAddress address, Throwable t); public abstract void shutdown(); }
Fix CheckStyle issues in net/GlowNetworkServer.java (#662)
src/main/java/net/glowstone/net/GlowNetworkServer.java
Fix CheckStyle issues in net/GlowNetworkServer.java (#662)
<ide><path>rc/main/java/net/glowstone/net/GlowNetworkServer.java <ide> /** <ide> * Represents a network server. <ide> * <del> * Modified implementation of {@link com.flowpowered.network.NetworkServer}. <add> * <p>Modified implementation of {@link com.flowpowered.network.NetworkServer}. <ide> */ <ide> public abstract class GlowNetworkServer { <ide> <ide> private final GlowServer server; <ide> protected CountDownLatch latch; <ide> <add> /** <add> * Creates an instance for the specified server. <add> * <add> * @param server the associated GlowServer <add> * @param latch The countdown latch used during server startup to wait for network server <add> * binding. <add> */ <ide> public GlowNetworkServer(GlowServer server, CountDownLatch latch) { <ide> this.server = server; <ide> this.latch = latch;
JavaScript
mit
a9540a9107e5c461f60ffa8a6b89ea006e13fc77
0
jmettraux/h.js,jmettraux/h.js,jmettraux/h.js
// // Copyright (c) 2015-2018, John Mettraux, [email protected] // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Made in Japan // var H = (function() { "use strict"; var self = this; this.VERSION = '1.1.2'; this.toArray = function(a) { return Array.prototype.slice.call(a); }; var dashData = function(s) { return s.replace(/\[-([-_a-zA-Z0-9]+)(=|\])/g, '[data-$1$2'); }; var qs = function(start, sel, all) { sel = dashData(sel); return all ? start.querySelectorAll(sel) : start.querySelector(sel); }; var toEltRefine = function(start, sel) { if ( ! sel) { sel = start; start = document; } if ( ! start) { start = document; } if ((typeof start) === 'string') start = qs(document, start); return [ start, sel ]; }; var toElt = function(start, sel) { var se = toEltRefine(start, sel); var sta = se[0], sel = se[1]; if ((typeof sel) !== 'string') return sel; var m = sel.match(/^\^([^ ]+)(.*)$/); if (m) { sta = self.closest(sta, m[1]); sel = m[2].trim(); } if ( ! sta) return null; if (sel) return qs(sta, sel); return sta; }; var toElts = function(start, sel) { var se = toEltRefine(start, sel); var sta = se[0], sel = se[1]; var es = null; if ((typeof sel) === 'string') { var m = sel.match(/^\^([^ ]+)(.*)$/); if (m) { sta = self.closest(sta, m[1]); sel = m[2].trim(); } es = sel.length > 0 ? qs(sta, sel, true) : [ sta ]; } else { es = [ sel ]; } var r = []; for (var i = 0, l = es.length; i < l; i++) { r.push(es[i]); }; return r; }; this.elt = function(start, sel) { return toElt(start, sel); }; this.elts = function(start, sel) { return toElts(start, sel); }; this.forEach = function(start, sel, fun) { if ((typeof sel) === 'function') { fun = sel; sel = null; } var r = toElts(start, sel); r.forEach(fun); return r; }; this.map = function(start, sel, fun) { if ((typeof sel) === 'function') { fun = sel; sel = null; } return toElts(start, sel).map(fun); }; this.tdim = function(start, sel) { var elt = toElt(start, sel); if ( ! elt) return null; var left = 0; var top = 0; var e = elt; while (e) { left += e.offsetLeft; top += e.offsetTop; e = e.offsetParent; } return { top: top, bottom: top + elt.offsetHeight, left: left, right: left + elt.offsetWidth, height: elt.offsetHeight, width: elt.offsetWidth } } this.dim = function(start, sel) { var e = toElt(start, sel); if ( ! e) return null; return { top: e.offsetTop, bottom: e.offsetTop + e.offsetHeight, left: e.offsetLeft, right: e.offsetLeft + e.offsetWidth, height: e.offsetHeight, width: e.offsetWidth } }; this.path = function(start, sel) { var e = toElt(start, sel); if ( ! e) return null; if (e.id) return '#' + e.id; var pp = self.path(e.parentElement); // > var nn = e.nodeName.toLowerCase(); var cs = self.classArray(e); cs = cs.length > 0 ? '.' + cs.join('.') : ''; var an = e.getAttribute('name'); an = an ? '[name="' + an + '"]' : ''; if (cs !== '' || an !== '') return pp + ' > ' + nn + cs + an; //var sb = e; var n = 0; //while (sb.nodeType === Node.ELEMENT_NODE && (sb = sb.previousSibling)) n++; var sb = e; var n = 0; while (sb) { if (sb.nodeType === 1) n++; sb = sb.previousSibling; } // return pp + ' > ' + ':nth-child(' + n + ')'; }; var onOrOff = function(dir, start, sel, eventName, eventHandler) { if ( ! eventHandler) { eventHandler = eventName; eventName = sel; sel = start; start = document; } var es = toElts(start, sel); for (var i = 0; ; i++) { var e = es[i]; if ( ! e) break; if (dir === 'on') e.addEventListener(eventName, eventHandler); else /* off */ e.removeEventListener(eventName, eventHandler); } }; this.on = function(start, sel, eventName, eventHandler) { onOrOff('on', start, sel, eventName, eventHandler); }; this.off = function(start, sel, eventName, eventHandler) { onOrOff('off', start, sel, eventName, eventHandler); }; var indexNext = function(sel) { var d = sel.indexOf('.'); var s = sel.indexOf('#'); if (d < 0) return s; if (s < 0) return d; return d < s ? d : s; }; this.toNode = function(html, sel) { if ((typeof html) !== 'string') return sel ? self.elt(html, sel) : html; var e = document.createElement('div'); e.innerHTML = html; // :-( e = e.children[0]; return sel ? self.elt(e, sel) : e; }; var defaultOn = function(type, method, uri) { return function(res) { if (type === 'load') console.log([ method + ' ' + uri, res ]); else console.log([ method + ' ' + uri + ' connection problem', res ]); } }; var isHeaders = function(o) { if ((typeof o) !== 'object') return false; for (var k in o) { if ((typeof o[k]) !== 'string') return false; if ( ! k.match(/^[A-Z][A-Za-z0-9-]+$/)) return false; } return true; }; this.request = function(method, uri, headers, data, callbacks) { // shuffle args var as = { met: method, uri: uri }; if (arguments.length >= 5) { as.hds = headers; as.dat = data; as.cbs = callbacks; } else if (arguments.length === 4) { // met uri dat cbs || met uri hds cbs if (isHeaders(headers)) as.hds = headers; else as.dat = headers; as.cbs = data; } else if (arguments.length === 3) { as.cbs = headers; } else { throw "not enough arguments for H.request"; } if ((typeof as.cbs) === 'function') as.cbs = { onok: as.cbs }; if ( ! as.hds) as.hds = {}; // prepare request var r = new XMLHttpRequest(); r.open(as.met, as.uri, true); for (var k in as.hds) r.setRequestHeader(k, as.hds[k]); if (as.dat) { var con = as.dat.constructor.toString(); var typ = typeof as.dat; var cot = as.hds['Content-Type'] || '/json'; if (con.match(/FormData/)) { //r.setRequestHeader('Content-Type', 'application/form-data'); } else if (cot.match(/\/json\b/) || typ !== 'string') { r.setRequestHeader('Content-Type', 'application/json; charset=UTF-8'); as.dat = typ === 'string' ? as.dat : JSON.stringify(as.dat); } else { as.dat = as.dat.toString(); } } // prepare callbacks r.onload = function() { var o = { status: r.status, request: r }; o.data = null; try { o.data = JSON.parse(r.responseText); } catch (ex) {}; if (as.cbs.onok && r.status === 200) as.cbs.onok(o); else (as.cbs.onload || defaultOn('load', as.met, as.uri))(o); }; r.onerror = as.cbs.onerror || defaultOn('error', as.met, as.uri); // request r.send(as.dat); }; this.upload = function(uri, inputFileElt_s, data, callbacks) { if ( ! callbacks) { callbacks = data; data = {}; } var fd = new FormData(); for (var k in data) fd.append(k, data[k]); var isMulti = Array.isArray(inputFileElt_s); var elts = isMulti ? inputFileElt_s : [ inputFileElt_s ]; var fcount = 0; elts.forEach(function(elt) { var files = elt.files; for (var i = 0, l = files.length; i < l; i++) { fcount = fcount + 1; var f = files[i]; var l = null; for (var j = 0, al = elt.attributes.length; j < al; j++) { var a = elt.attributes.item(j); if (a.name.match(/^data-(.*-)?lang$/)) { l = a.value; break; } } var k = 'file-'; if (l || isMulti) k = k + elt.name + '-'; if (l) k = k + l + '-'; k = k + i; fd.append(k, f, f.name); } }); if (fcount < 1) return 0; var onok = callbacks.onok; callbacks.onok = function(res) { if (callbacks.clear !== false) { elts.forEach(function(elt) { elt.value = ''; }); } onok(res); }; self.request('POST', uri, fd, callbacks); return fcount; }; this.matches = function(start, sel, pat) { if ( ! pat) { pat = sel; sel = start; start = null; } var elt = toElt(start, sel); if (elt.matches) return elt.matches(pat); if (elt.matchesSelector) return elt.matchesSelector(pat); if (elt.msMatchesSelector) return elt.msMatchesSelector(pat); throw "H.js got fed something that doesn't respond to .matches() or .matchesSelector()"; }; this.closest = function(start, sel, sel1) { if ( ! sel1) { sel1 = sel; sel = start; start = null; } var elt = toElt(start, sel); sel1 = dashData(sel1); if (self.matches(elt, sel1)) return elt; return elt.parentElement ? self.closest(elt.parentElement, sel1) : null; }; // adapted from http://upshots.org/javascript/jquery-copy-style-copycss // this.style = function(start, sel) { var elt = toElt(start, sel); var r = {}; var style = null; if (window.getComputedStyle) { style = window.getComputedStyle(elt, null); for (var i = 0, l = style.length; i < l; i++) { var p = style[i]; var n = p.replace( /-([a-za])/g, function(a, b) { return b.toUpperCase(); }) r[n] = style.getPropertyValue(p); } return r; } if (style = elt.currentStyle) { for (var p in style) r[p] = style[p]; return r; } if (style = elt.style) { for (var p in style) { var s = style[p]; if ((typeof s) !== 'function') r[p] = s; } //return r; } return r; }; this.hasClass = function(start, sel, cla) { if ( ! cla) { cla = sel; sel = start; start = null; } var elt = toElt(start, sel); if (cla[0] === '.') cla = cla.substring(1); try { if (elt.classList) return elt.classList.contains(cla); return (new RegExp('\\b' + cla + '\\b')).test(elt.className); } catch (ex) { return false; } }; this.isHidden = function(start, sel) { var a = self.toArray(arguments); a.push('.hidden'); return self.hasClass.apply(null, a); }; var visit = function(start, sel, bof, onTrue, onFalse) { self.forEach(start, sel, function(e) { var b = (typeof bof === 'function') ? bof(e) : bof; var fun = b ? onTrue : onFalse; if (fun) fun(e); }); }; var reClass = function(elt, cla, dir) { if (cla[0] === '.') cla = cla.substring(1); elt.classList[dir === 'r' ? 'remove' : 'add'](cla); }; var rearg_sta_sel_nam_las = function(args, las) { var a = args[0], b = args[1], c = args[2], d = args[3]; if (args.length < 2) throw "at least 2 arguments required"; if (args.length === 2) return { sta: a, sel: null, nam: b, las: las }; if (args.length > 3) return { sta: a, sel: b, nam: c, las: d }; // sta/sel/nam or sta/nam/las ? if ((typeof c) === 'string' && c.match(/^\.?[^ ]+$/)) return { sta: a, sel: b, nam: c, las: las }; return { sta: a, sel: null, nam: b, las: c }; }; var toggle = function(start, sel, cla, bof, mod) { var add = function(e) { reClass(e, cla, 'a'); }; var rem = function(e) { reClass(e, cla, 'r'); }; var pos = add, neg = rem; if (mod === 'ra') { pos = rem; neg = add; } else if (mod === 'a') { neg = null; } else if (mod === 'r') { pos = rem; neg = null; } visit(start, sel, bof, pos, neg); }; this.addClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'a'); } this.removeClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'r'); }; this.toggleClass = function(start, sel, cla) { if ( ! cla) { cla = sel; sel = start; start = null; } var bof = function(e) { return ! self.hasClass(e, cla); }; toggle(start, sel, cla, bof, 'ar'); }; this.toggle = this.toggleClass; this.setClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'ar'); }; this.renameClass = function(start, sel, cla0, cla1) { if ( ! cla1) { cla1 = cla0; cla0 = sel; sel = start; start = null; } var bof = function(e) { return self.hasClass(e, cla0); }; var fun = function(e) { self.removeClass(e, cla0); self.addClass(e, cla1); }; visit(start, sel, bof, fun, null); }; this.classArray = function(start, sel) { var e = self.elt(start, sel); var l = e.classList || e.className.split(' '); var a = []; for (var i = 0, l = e.classList.length; i < l; i++) a.push(e.classList[i]); return a; }; var rearg_sta_sel_las = function(args, las) { var a = args[0], b = args[1], c = args[2]; if (args.length === 1) return { sta: a, sel: null, las: las }; if (args.length > 2) return { sta: a, sel: b, las: c }; if (args.length === 2) { if ((typeof b) === 'string') return { sta: a, sel: b, las: las }; return { sta: a, sel: null, las: b }; } throw "called without arguments"; }; this.show = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.shown', as.las, 'ar'); }; this.unshow = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.shown', as.las, 'ra'); }; this.hide = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.hidden', as.las, 'ar'); }; this.unhide = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.hidden', as.las, 'ra'); }; var able = function(start, sel, bof, dir) { var en = function(e) { e.removeAttribute('disabled') }; var dis = function(e) { e.setAttribute('disabled', 'disabled'); }; visit(start, sel, bof, dir === 'e' ? en : dis, dir === 'e' ? dis : en); }; this.enable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); able(as.sta, as.sel, as.las, 'e'); }; this.disable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); able(as.sta, as.sel, as.las, 'd'); }; this.cenable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.disabled', as.las, 'ra'); }; this.cdisable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.disabled', as.las, 'ar'); }; this.isDisabled = function(start, sel) { var elt = toElt(start, sel); return ( (typeof elt.getAttribute('disabled')) === 'string' || self.hasClass(elt, '.disabled') ); }; this.getAtt = function(start, sel, aname/*, default*/) { var as = rearg_sta_sel_nam_las(arguments, undefined); var r = /(.*\[([-_a-zA-Z0-9]+)\].*)+/; var ms = as.sel && as.sel.match(r); var mn = as.nam && as.nam.match(r); if ( ! as.sel && as.nam && mn) { as.sel = as.nam; as.nam = mn[mn.length - 1]; } else if (ms) { as.las = as.nam; as.nam = ms[ms.length - 1]; } var e = self.elt(as.sta, as.sel); //if ( ! e) throw "elt not found, cannot read attributes"; if ( ! e) return as.las; if (as.nam && as.nam.substr(0, 1) === '-') as.nam = 'data' + as.nam; var av = e.getAttribute(as.nam) return av === null ? as.las : av; }; var FALSIES = [ false, null, undefined, NaN, '' ]; var isFalsy = function(v) { return FALSIES.indexOf(v) > -1; } this.getAtti = function(start, sel, aname/*, default*/) { var v = self.getAtt.apply(null, arguments); v = parseInt('' + v, 10); return isFalsy(v) ? null : v; }; this.getAttf = function(start, sel, aname/*, default*/) { var v = self.getAtt.apply(null, arguments); v = parseFloat('' + v); return isFalsy(v) ? null : v; }; this.text = function(start, sel/*, default*/) { var as = rearg_sta_sel_las(arguments); var e = self.elt(as.sta, as.sel); if ( ! e) throw "elt not found, cannot read text"; var t = e.textContent.trim(); return (t === '' && as.las) ? as.las : t; }; this.get = function(start, sel/*, false */) { var a = self.toArray(arguments); var l = true; if (typeof a[a.length - 1] === 'boolean') l = a.pop(); var e = self.elt.apply(null, a); var v = e ? e.value : null; v = v ? v.trim() : ''; return l === false && v.length === 0 ? null : v; }; this.getb = function(start, sel/*, default */) { var a = self.toArray(arguments); var d = null; if (typeof a[a.length - 1] === 'boolean') d = a.pop(); var v = self.get.apply(null, a).toLowerCase(); if (d !== null && v === '') return d; return v === 'true' || v === 'yes'; }; this.getf = function(start, sel/*, default */) { var a = self.toArray(arguments); var l = a[a.length - 1]; var d = null; if (typeof l === 'number') d = a.pop(); if (d !== null) a.push(false); var v = self.get.apply(null, a); if (v === null) { if (l === false) return v; if (d) return d; v = '0.0' } return parseFloat(v); }; this.geti = function(start, sel/*, default */) { var a = self.toArray(arguments); var l = a[a.length - 1]; var d = null; if (typeof l === 'number') d = a.pop(); if (d !== null) a.push(false); var v = self.get.apply(null, a); if (v === null) { if (l === false) return v; v = d ? '' + d : '0' } return parseInt(v, 10); }; this.set = function(start, sel, value) { var a = self.toArray(arguments); var v = a.pop(); v = (v === null || v === undefined) ? '' : '' + v; var e = self.elt.apply(null, a); if (e) e.value = v; return v; }; this.capitalize = function(s) { return s.charAt(0).toUpperCase() + s.slice(1); }; this.decapitalize = function(s) { return s.charAt(0).toLowerCase() + s.slice(1); }; this.toCamelCase = function(s, cap) { var s = s.replace( /([_-][a-z])/g, function(x) { return x.substring(1).toUpperCase(); }); return cap ? self.capitalize(s) : s; }; this.prepend = function(start, sel, elt) { if ( ! elt) { elt = sel; sel = start; start = null; } var e = toElt(start, sel); e.parentNode.insertBefore(elt, e); }; this.postpend = function(start, sel, elt) { if ( ! elt) { elt = sel; sel = start; start = null; } var e = toElt(start, sel); e.parentNode.insertBefore(elt, e.nextSibling); }; this.remove = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toElts(as.sta, as.sel).forEach(function(e) { if ((typeof as.las === 'function') ? as.las(e) : as.las) { e.parentElement.removeChild(e); } }); }; this.clean = function(start, sel, cla) { var elt = toElt(start, sel); if (cla && cla[0] !== '.') cla = '.' + cla; if (cla) self.forEach(elt, cla, function(e) { e.parentElement.removeChild(e); }); else while (elt.firstChild) elt.removeChild(elt.firstChild); return elt; }; this.onDocumentReady = function(fev) { if (document.readyState != 'loading') fev(); else document.addEventListener('DOMContentLoaded', fev); }; this.makeGrower = function(name) { var scan = function(s) { var m, r = []; s.replace(/([#.][^#.]+)/g, function(x) { r.push({ k: x[0], n: x.substring(1, x.length) }); }); return r; }; return function() { var e = document.createElement(name); for (var i = 0, l = arguments.length; i < l; i++) { var a = arguments[i]; if (a === false) return null; // skip this subtree if (a === null) continue; // ignore null (skipped) children var s = (typeof a === 'string'); if (s && (a[0] === '.' || a[0] === '#') && ! a.match(/\s/)) scan(a).forEach(function(x) { if (x.k === '#') e.id = x.n; else e.classList.add(x.n); }); else if (s) e.appendChild(document.createTextNode(a)); else if (a.nodeType && a.innerHTML) e.appendChild(a); else if (typeof a === 'object') for (var k in a) { e.setAttribute(k, a[k]); }; } return e; }; }; this.create = function(tagname/*, rest */) { var as = Array.prototype.slice.call(arguments, 1); var m = tagname.match(/^([a-zA-Z0-9]+)?([.#].+)$/) if (m) { tagname = m[1] || 'div'; as.unshift(m[2]); } return self.makeGrower(tagname).apply(null, as); }; this.grow = function(fun) { var growers = 'var ' + 'a abbr address area article aside audio b base bdi bdo blockquote br button canvas caption cite code col colgroup datalist dd del details dfn dialog div dl dt em embed fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 header hr i iframe img input ins kbd keygen label legend li main map mark menu menuitem meta meter nav noscript object ol optgroup option output p param picture pre progress q rp rt ruby s samp script section select small source span strong style sub summary sup table tbody td textarea tfoot th thead time title tr track u ul video wbr' .split(' ') .map(function(t) { return t + '=H.makeGrower("' + t + '")' }) .join(','); // NB: "var" not included var f = fun.toString().trim(); f = f.substring(f.indexOf('{') + 1, f.lastIndexOf('}')); return eval(growers + ';' + f); }; this.delay = function(ms, fun) { var t = null; return function() { var as = arguments; window.clearTimeout(t); t = window.setTimeout(function() { fun.apply(this, as) }, ms); }; }; this.makeWorker = function(workerFunction/*, wrap=true*/) { var s = workerFunction.toString(); var w = arguments[1]; w = (w === undefined) || ( !! w); if (w) s = "self.addEventListener('message', " + s + ", false);"; else s = s.substring(s.indexOf('{') + 1, s.lastIndexOf('}')); var r = document && document.location && document.location.href; if (r) { var j = r.lastIndexOf('/'); if (j < 0) j = r.length - 1; r = r.substring(0, j) + '/'; s = "var rootUrl = \"" + r + "\";" + s; } var b = new Blob([ s ]); var w = new Worker(window.URL.createObjectURL(b)); w.on = function(t, cb) { w.addEventListener(t, cb, false); }; return w; }; // // done. return this; }).apply({}); // end H
src/h.js
// // Copyright (c) 2015-2018, John Mettraux, [email protected] // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // Made in Japan // var H = (function() { "use strict"; var self = this; this.VERSION = '1.1.2'; this.toArray = function(a) { return Array.prototype.slice.call(a); }; var dashData = function(s) { return s.replace(/\[-([-_a-zA-Z0-9]+)(=|\])/g, '[data-$1$2'); }; var qs = function(start, sel, all) { sel = dashData(sel); return all ? start.querySelectorAll(sel) : start.querySelector(sel); }; var toEltRefine = function(start, sel) { if ( ! sel) { sel = start; start = document; } if ( ! start) { start = document; } if ((typeof start) === 'string') start = qs(document, start); return [ start, sel ]; }; var toElt = function(start, sel) { var se = toEltRefine(start, sel); var sta = se[0], sel = se[1]; if ((typeof sel) !== 'string') return sel; var m = sel.match(/^\^([^ ]+)(.*)$/); if (m) { sta = self.closest(sta, m[1]); sel = m[2].trim(); } if ( ! sta) return null; if (sel) return qs(sta, sel); return sta; }; var toElts = function(start, sel) { var se = toEltRefine(start, sel); var sta = se[0], sel = se[1]; var es = null; if ((typeof sel) === 'string') { var m = sel.match(/^\^([^ ]+)(.*)$/); if (m) { sta = self.closest(sta, m[1]); sel = m[2].trim(); } es = sel.length > 0 ? qs(sta, sel, true) : [ sta ]; } else { es = [ sel ]; } var r = []; for (var i = 0, l = es.length; i < l; i++) { r.push(es[i]); }; return r; }; this.elt = function(start, sel) { return toElt(start, sel); }; this.elts = function(start, sel) { return toElts(start, sel); }; this.forEach = function(start, sel, fun) { if ((typeof sel) === 'function') { fun = sel; sel = null; } var r = toElts(start, sel); r.forEach(fun); return r; }; this.map = function(start, sel, fun) { if ((typeof sel) === 'function') { fun = sel; sel = null; } return toElts(start, sel).map(fun); }; this.tdim = function(start, sel) { var elt = toElt(start, sel); if ( ! elt) return null; var left = 0; var top = 0; var e = elt; while (e) { left += e.offsetLeft; top += e.offsetTop; e = e.offsetParent; } return { top: top, bottom: top + elt.offsetHeight, left: left, right: left + elt.offsetWidth, height: elt.offsetHeight, width: elt.offsetWidth } } this.dim = function(start, sel) { var e = toElt(start, sel); if ( ! e) return null; return { top: e.offsetTop, bottom: e.offsetTop + e.offsetHeight, left: e.offsetLeft, right: e.offsetLeft + e.offsetWidth, height: e.offsetHeight, width: e.offsetWidth } }; this.path = function(start, sel) { var e = toElt(start, sel); if ( ! e) return null; if (e.id) return '#' + e.id; var pp = self.path(e.parentElement); // > var nn = e.nodeName.toLowerCase(); var cs = self.classArray(e); cs = cs.length > 0 ? '.' + cs.join('.') : ''; var an = e.getAttribute('name'); an = an ? '[name="' + an + '"]' : ''; if (cs !== '' || an !== '') return pp + ' > ' + nn + cs + an; //var sb = e; var n = 0; //while (sb.nodeType === Node.ELEMENT_NODE && (sb = sb.previousSibling)) n++; var sb = e; var n = 0; while (sb) { if (sb.nodeType === 1) n++; sb = sb.previousSibling; } // return pp + ' > ' + ':nth-child(' + n + ')'; }; var onOrOff = function(dir, start, sel, eventName, eventHandler) { if ( ! eventHandler) { eventHandler = eventName; eventName = sel; sel = start; start = document; } var es = toElts(start, sel); for (var i = 0; ; i++) { var e = es[i]; if ( ! e) break; if (dir === 'on') e.addEventListener(eventName, eventHandler); else /* off */ e.removeEventListener(eventName, eventHandler); } }; this.on = function(start, sel, eventName, eventHandler) { onOrOff('on', start, sel, eventName, eventHandler); }; this.off = function(start, sel, eventName, eventHandler) { onOrOff('off', start, sel, eventName, eventHandler); }; var indexNext = function(sel) { var d = sel.indexOf('.'); var s = sel.indexOf('#'); if (d < 0) return s; if (s < 0) return d; return d < s ? d : s; }; this.toNode = function(html, sel) { if ((typeof html) !== 'string') return sel ? self.elt(html, sel) : html; var e = document.createElement('div'); e.innerHTML = html; // :-( e = e.children[0]; return sel ? self.elt(e, sel) : e; }; var defaultOn = function(type, method, uri) { return function(res) { if (type === 'load') console.log([ method + ' ' + uri, res ]); else console.log([ method + ' ' + uri + ' connection problem', res ]); } }; var isHeaders = function(o) { if ((typeof o) !== 'object') return false; for (var k in o) { if ((typeof o[k]) !== 'string') return false; if ( ! k.match(/^[A-Z][A-Za-z0-9-]+$/)) return false; } return true; }; this.request = function(method, uri, headers, data, callbacks) { // shuffle args var as = { met: method, uri: uri }; if (arguments.length >= 5) { as.hds = headers; as.dat = data; as.cbs = callbacks; } else if (arguments.length === 4) { // met uri dat cbs || met uri hds cbs if (isHeaders(headers)) as.hds = headers; else as.dat = headers; as.cbs = data; } else if (arguments.length === 3) { as.cbs = headers; } else { throw "not enough arguments for H.request"; } if ((typeof as.cbs) === 'function') as.cbs = { onok: as.cbs }; if ( ! as.hds) as.hds = {}; // prepare request var r = new XMLHttpRequest(); r.open(as.met, as.uri, true); for (var k in as.hds) r.setRequestHeader(k, as.hds[k]); if (as.dat) { var con = as.dat.constructor.toString(); var typ = typeof as.dat; var cot = as.hds['Content-Type'] || '/json'; if (con.match(/FormData/)) { //r.setRequestHeader('Content-Type', 'application/form-data'); } else if (cot.match(/\/json\b/) || typ !== 'string') { r.setRequestHeader('Content-Type', 'application/json; charset=UTF-8'); as.dat = typ === 'string' ? as.dat : JSON.stringify(as.dat); } else { as.dat = as.dat.toString(); } } // prepare callbacks r.onload = function() { var o = { status: r.status, request: r }; o.data = null; try { o.data = JSON.parse(r.responseText); } catch (ex) {}; if (as.cbs.onok && r.status === 200) as.cbs.onok(o); else (as.cbs.onload || defaultOn('load', as.met, as.uri))(o); }; r.onerror = as.cbs.onerror || defaultOn('error', as.met, as.uri); // request r.send(as.dat); }; this.upload = function(uri, inputFileElt_s, data, callbacks) { if ( ! callbacks) { callbacks = data; data = {}; } var fd = new FormData(); for (var k in data) fd.append(k, data[k]); var isMulti = Array.isArray(inputFileElt_s); var elts = isMulti ? inputFileElt_s : [ inputFileElt_s ]; var fcount = 0; elts.forEach(function(elt) { var files = elt.files; for (var i = 0, l = files.length; i < l; i++) { fcount = fcount + 1; var f = files[i]; var l = null; for (var j = 0, al = elt.attributes.length; j < al; j++) { var a = elt.attributes.item(j); if (a.name.match(/^data-(.*-)?lang$/)) { l = a.value; break; } } var k = 'file-'; if (l || isMulti) k = k + elt.name + '-'; if (l) k = k + l + '-'; k = k + i; fd.append(k, f, f.name); } }); if (fcount < 1) return 0; var onok = callbacks.onok; callbacks.onok = function(res) { if (callbacks.clear !== false) { elts.forEach(function(elt) { elt.value = ''; }); } onok(res); }; self.request('POST', uri, fd, callbacks); return fcount; }; this.matches = function(start, sel, pat) { if ( ! pat) { pat = sel; sel = start; start = null; } var elt = toElt(start, sel); if (elt.matches) return elt.matches(pat); if (elt.matchesSelector) return elt.matchesSelector(pat); if (elt.msMatchesSelector) return elt.msMatchesSelector(pat); throw "H.js got fed something that doesn't respond to .matches() or .matchesSelector()"; }; this.closest = function(start, sel, sel1) { if ( ! sel1) { sel1 = sel; sel = start; start = null; } var elt = toElt(start, sel); sel1 = dashData(sel1); if (self.matches(elt, sel1)) return elt; return elt.parentElement ? self.closest(elt.parentElement, sel1) : null; }; // adapted from http://upshots.org/javascript/jquery-copy-style-copycss // this.style = function(start, sel) { var elt = toElt(start, sel); var r = {}; var style = null; if (window.getComputedStyle) { style = window.getComputedStyle(elt, null); for (var i = 0, l = style.length; i < l; i++) { var p = style[i]; var n = p.replace( /-([a-za])/g, function(a, b) { return b.toUpperCase(); }) r[n] = style.getPropertyValue(p); } return r; } if (style = elt.currentStyle) { for (var p in style) r[p] = style[p]; return r; } if (style = elt.style) { for (var p in style) { var s = style[p]; if ((typeof s) !== 'function') r[p] = s; } //return r; } return r; }; this.hasClass = function(start, sel, cla) { if ( ! cla) { cla = sel; sel = start; start = null; } var elt = toElt(start, sel); if (cla[0] === '.') cla = cla.substring(1); try { if (elt.classList) return elt.classList.contains(cla); return (new RegExp('\\b' + cla + '\\b')).test(elt.className); } catch (ex) { return false; } }; this.isHidden = function(start, sel) { var a = self.toArray(arguments); a.push('.hidden'); return self.hasClass.apply(null, a); }; var visit = function(start, sel, bof, onTrue, onFalse) { self.forEach(start, sel, function(e) { var b = (typeof bof === 'function') ? bof(e) : bof; var fun = b ? onTrue : onFalse; if (fun) fun(e); }); }; var reClass = function(elt, cla, dir) { if (cla[0] === '.') cla = cla.substring(1); elt.classList[dir === 'r' ? 'remove' : 'add'](cla); }; var rearg_sta_sel_nam_las = function(args, las) { var a = args[0], b = args[1], c = args[2], d = args[3]; if (args.length < 2) throw "at least 2 arguments required"; if (args.length === 2) return { sta: a, sel: null, nam: b, las: las }; if (args.length > 3) return { sta: a, sel: b, nam: c, las: d }; // sta/sel/nam or sta/nam/las ? if ((typeof c) === 'string' && c.match(/^\.?[^ ]+$/)) return { sta: a, sel: b, nam: c, las: las }; return { sta: a, sel: null, nam: b, las: c }; }; var toggle = function(start, sel, cla, bof, mod) { var add = function(e) { reClass(e, cla, 'a'); }; var rem = function(e) { reClass(e, cla, 'r'); }; var pos = add, neg = rem; if (mod === 'ra') { pos = rem; neg = add; } else if (mod === 'a') { neg = null; } else if (mod === 'r') { pos = rem; neg = null; } visit(start, sel, bof, pos, neg); }; this.addClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'a'); } this.removeClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'r'); }; this.toggleClass = function(start, sel, cla) { if ( ! cla) { cla = sel; sel = start; start = null; } var bof = function(e) { return ! self.hasClass(e, cla); }; toggle(start, sel, cla, bof, 'ar'); }; this.toggle = this.toggleClass; this.setClass = function(start, sel, cla, bof) { var as = rearg_sta_sel_nam_las(arguments, true); toggle(as.sta, as.sel, as.nam, as.las, 'ar'); }; this.renameClass = function(start, sel, cla0, cla1) { if ( ! cla1) { cla1 = cla0; cla0 = sel; sel = start; start = null; } var bof = function(e) { return self.hasClass(e, cla0); }; var fun = function(e) { self.removeClass(e, cla0); self.addClass(e, cla1); }; visit(start, sel, bof, fun, null); }; this.classArray = function(start, sel) { var e = self.elt(start, sel); var l = e.classList || e.className.split(' '); var a = []; for (var i = 0, l = e.classList.length; i < l; i++) a.push(e.classList[i]); return a; }; var rearg_sta_sel_las = function(args, las) { var a = args[0], b = args[1], c = args[2]; if (args.length === 1) return { sta: a, sel: null, las: las }; if (args.length > 2) return { sta: a, sel: b, las: c }; if (args.length === 2) { if ((typeof b) === 'string') return { sta: a, sel: b, las: las }; return { sta: a, sel: null, las: b }; } throw "called without arguments"; }; this.show = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.shown', as.las, 'ar'); }; this.unshow = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.shown', as.las, 'ra'); }; this.hide = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.hidden', as.las, 'ar'); }; this.unhide = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.hidden', as.las, 'ra'); }; var able = function(start, sel, bof, dir) { var en = function(e) { e.removeAttribute('disabled') }; var dis = function(e) { e.setAttribute('disabled', 'disabled'); }; visit(start, sel, bof, dir === 'e' ? en : dis, dir === 'e' ? dis : en); }; this.enable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); able(as.sta, as.sel, as.las, 'e'); }; this.disable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); able(as.sta, as.sel, as.las, 'd'); }; this.cenable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.disabled', as.las, 'ra'); }; this.cdisable = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toggle(as.sta, as.sel, '.disabled', as.las, 'ar'); }; this.isDisabled = function(start, sel) { var elt = toElt(start, sel); return ( (typeof elt.getAttribute('disabled')) === 'string' || self.hasClass(elt, '.disabled') ); }; this.getAtt = function(start, sel, aname/*, default*/) { var as = rearg_sta_sel_nam_las(arguments, undefined); var r = /(.*\[([-_a-zA-Z0-9]+)\].*)+/; var ms = as.sel && as.sel.match(r); var mn = as.nam && as.nam.match(r); if ( ! as.sel && as.nam && mn) { as.sel = as.nam; as.nam = mn[mn.length - 1]; } else if (ms) { as.las = as.nam; as.nam = ms[ms.length - 1]; } var e = self.elt(as.sta, as.sel); //if ( ! e) throw "elt not found, cannot read attributes"; if ( ! e) return as.las; if (as.nam && as.nam.substr(0, 1) === '-') as.nam = 'data' + as.nam; var av = e.getAttribute(as.nam) return av === null ? as.las : av; }; var FALSIES = [ false, null, undefined, NaN, '' ]; var isFalsy = function(v) { return FALSIES.indexOf(v) > -1; } this.getAtti = function(start, sel, aname/*, default*/) { var v = self.getAtt.apply(null, arguments); v = parseInt('' + v, 10); return isFalsy(v) ? null : v; }; this.getAttf = function(start, sel, aname/*, default*/) { var v = self.getAtt.apply(null, arguments); v = parseFloat('' + v); return isFalsy(v) ? null : v; }; this.text = function(start, sel/*, default*/) { var as = rearg_sta_sel_las(arguments); var e = self.elt(as.sta, as.sel); if ( ! e) throw "elt not found, cannot read text"; var t = e.textContent.trim(); return (t === '' && as.las) ? as.las : t; }; this.get = function(start, sel/*, false */) { var a = self.toArray(arguments); var l = true; if (typeof a[a.length - 1] === 'boolean') l = a.pop(); var e = self.elt.apply(null, a); var v = e ? e.value : null; v = v ? v.trim() : ''; return l === false && v.length === 0 ? null : v; }; this.getb = function(start, sel/*, default */) { var a = self.toArray(arguments); var d = null; if (typeof a[a.length - 1] === 'boolean') d = a.pop(); var v = self.get.apply(null, a).toLowerCase(); if (d !== null && v === '') return d; return v === 'true' || v === 'yes'; }; this.getf = function(start, sel/*, default */) { var a = self.toArray(arguments); var l = a[a.length - 1]; var d = null; if (typeof l === 'number') d = a.pop(); if (d !== null) a.push(false); var v = self.get.apply(null, a); if (v === null) { if (l === false) return v; if (d) return d; v = '0.0' } return parseFloat(v); }; this.geti = function(start, sel/*, default */) { var a = self.toArray(arguments); var l = a[a.length - 1]; var d = null; if (typeof l === 'number') d = a.pop(); if (d !== null) a.push(false); var v = self.get.apply(null, a); if (v === null) { if (l === false) return v; v = d ? '' + d : '0' } return parseInt(v, 10); }; this.set = function(start, sel, value) { var a = self.toArray(arguments); var v = a.pop(); v = (v === null || v === undefined) ? '' : '' + v; var e = self.elt.apply(null, a); if (e) e.value = v; return v; }; this.capitalize = function(s) { return s.charAt(0).toUpperCase() + s.slice(1); }; this.decapitalize = function(s) { return s.charAt(0).toLowerCase() + s.slice(1); }; this.toCamelCase = function(s, cap) { var s = s.replace( /([_-][a-z])/g, function(x) { return x.substring(1).toUpperCase(); }); return cap ? self.capitalize(s) : s; }; this.prepend = function(start, sel, elt) { if ( ! elt) { elt = sel; sel = start; start = null; } var e = toElt(start, sel); e.parentNode.insertBefore(elt, e); }; this.postpend = function(start, sel, elt) { if ( ! elt) { elt = sel; sel = start; start = null; } var e = toElt(start, sel); e.parentNode.insertBefore(elt, e.nextSibling); }; this.remove = function(start, sel, bof) { var as = rearg_sta_sel_las(arguments, true); toElts(as.sta, as.sel).forEach(function(e) { if ((typeof as.las === 'function') ? as.las(e) : as.las) { e.parentElement.removeChild(e); } }); }; this.clean = function(start, sel, cla) { var elt = toElt(start, sel); if (cla && cla[0] !== '.') cla = '.' + cla; if (cla) self.forEach(elt, cla, function(e) { e.parentElement.removeChild(e); }); else while (elt.firstChild) elt.removeChild(elt.firstChild); return elt; }; this.onDocumentReady = function(fev) { if (document.readyState != 'loading') fev(); else document.addEventListener('DOMContentLoaded', fev); }; this.makeGrower = function(name) { var scan = function(s) { var m, r = []; s.replace(/([#.][^#.]+)/g, function(x) { r.push({ k: x[0], n: x.substring(1, x.length) }); }); return r; }; return function() { var e = document.createElement(name); for (var i = 0, l = arguments.length; i < l; i++) { var a = arguments[i]; if (a === false) return null; // skip this subtree if (a === null) continue; // ignore null (skipped) children var s = (typeof a === 'string'); if (s && (a[0] === '.' || a[0] === '#') && ! a.match(/\s/)) scan(a).forEach(function(x) { if (x.k === '#') e.id = x.n; else e.classList.add(x.n); }); else if (s) e.appendChild(document.createTextNode(a)); else if (a.nodeType && a.innerHTML) e.appendChild(a); else if (typeof a === 'object') for (var k in a) { e.setAttribute(k, a[k]); }; } return e; }; }; this.create = function(tagname/*, rest */) { var as = Array.prototype.slice.call(arguments, 1); var m = tagname.match(/^([a-zA-Z0-9]+)?([.#].+)$/) if (m) { tagname = m[1] || 'div'; as.unshift(m[2]); } return self.makeGrower(tagname).apply(null, as); }; this.grow = function(fun) { var growers = 'var ' + 'a abbr address area article aside audio b base bdi bdo blockquote br button canvas caption cite code col colgroup datalist dd del details dfn dialog div dl dt em embed fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 header hr i iframe img input ins kbd keygen label legend li main map mark menu menuitem meta meter nav noscript object ol optgroup option output p param picture pre progress q rp rt ruby s samp script section select small source span strong style sub summary sup table tbody td textarea tfoot th thead time title tr track u ul video wbr' .split(' ') .map(function(t) { return t + '=H.makeGrower("' + t + '")' }) .join(','); // NB: "var" not included var f = fun.toString().trim(); f = f.substring(f.indexOf('{') + 1, f.lastIndexOf('}')); return eval(growers + ';' + f.toString()); }; this.delay = function(ms, fun) { var t = null; return function() { var as = arguments; window.clearTimeout(t); t = window.setTimeout(function() { fun.apply(this, as) }, ms); }; }; this.makeWorker = function(workerFunction/*, wrap=true*/) { var s = workerFunction.toString(); var w = arguments[1]; w = (w === undefined) || ( !! w); if (w) s = "self.addEventListener('message', " + s + ", false);"; else s = s.substring(s.indexOf('{') + 1, s.lastIndexOf('}')); var r = document && document.location && document.location.href; if (r) { var j = r.lastIndexOf('/'); if (j < 0) j = r.length - 1; r = r.substring(0, j) + '/'; s = "var rootUrl = \"" + r + "\";" + s; } var b = new Blob([ s ]); var w = new Worker(window.URL.createObjectURL(b)); w.on = function(t, cb) { w.addEventListener(t, cb, false); }; return w; }; // // done. return this; }).apply({}); // end H
Simplify H.grow()
src/h.js
Simplify H.grow()
<ide><path>rc/h.js <ide> var f = fun.toString().trim(); <ide> f = f.substring(f.indexOf('{') + 1, f.lastIndexOf('}')); <ide> <del> return eval(growers + ';' + f.toString()); <add> return eval(growers + ';' + f); <ide> }; <ide> <ide> this.delay = function(ms, fun) {
Java
mit
error: pathspec 'src/one/numjava.java' did not match any file(s) known to git
3f087d985207021a363ac355ef4c90ea08bf962f
1
sidd4698/deep-learning
import java.util.*; class numjava { private int M; private int N; private float finalmatrix [][]; public numjava(int M,int N) { this.M=M; this.N=N; finalmatrix=new float[M][N]; } // To create random 2-d array public static numjava createrandom (int M,int N) { numjava mat=new numjava(M,N); for (int i=0;i<M;i++) { for (int j=0;j<N;j++) { mat.finalmatrix[i][j]=(float)Math.random(); } } return mat; } // To print a matrix public static void print(numjava mat) { System.out.println(mat.M+" "+mat.N); for (int i=0;i<mat.M;i++) { for (int j=0;j<mat.N;j++) { System.out.printf("%9.4f ", mat.finalmatrix[i][j]); } System.out.println(); } } //Dot product of 2 matrices public static numjava dot(numjava mat1,numjava mat2) { int M1=mat1.M; int N1=mat1.N; int M2=mat2.M; int N2=mat2.N; if (N1!=M2) throw new RuntimeException("Illegal matrix dimensions"); numjava mat=new numjava(M1,N2); for (int i=0;i<M1;i++) { for (int j=0;j<N1;j++) { mat.finalmatrix[i][j]=0; for (int k=0;k<M2;k++) { mat.finalmatrix[i][j]+=mat1.finalmatrix[i][k]*mat2.finalmatrix[k][j]; } } } return mat; } //Elementwise multiplication public static numjava elementmul(numjava mat1,numjava mat2) { int M1=mat1.M; int N1=mat1.N; int M2=mat2.M; int N2=mat2.N; try { if (N1!=M2) throw new Exception(); } catch(Exception e) { System.out.println("Illegal matrix dimension"); } numjava mat=null; if (M1!=M2) { mat=new numjava(M2,N1); for (int i=0;i<M2;i++) { for(int j=0;j<N1;j++) { mat.finalmatrix[i][j]=mat1.finalmatrix[i][0]*mat2.finalmatrix[0][j]; } } } else { mat=new numjava(M1,N1); for (int i=0;i<M1;i++) { for (int j=0;j<N1;j++) { mat.finalmatrix[i][j]=mat1.finalmatrix[i][j]*mat2.finalmatrix[i][j]; } } } return mat; } /* public static void shape(float mat1[][]) { return mat1[0].length,mat1[1].length; } */ //Reshaping the matrices public static numjava reshape(numjava mat,int M,int N) { int old_M=mat.M; int old_N=mat.N; int counteri=0; int counterj=0; numjava C=new numjava(M,N); //try //{ if((old_M*old_N)!=(M*N)) { throw new RuntimeException("Cannot reshape"); } else { for (int i=0;i<M;i++) { for (int j=0;j<N;j++) { if (counterj>=old_N) { counteri++; counterj=0; } C.finalmatrix[i][j]=mat.finalmatrix[counteri][counterj]; counterj++; } } } //} /* catch(Exception e) { } */ return C; // System.out.println("Cannot rehape matrix"); } // Creating a matrix public static numjava creatematrix(float mat[][]) { int M=mat.length; int N=mat[0].length; System.out.println(M+" "+N); numjava C=new numjava(M,N); for (int i=0;i<M;i++) { for(int j=0;j<N;j++) { C.finalmatrix[i][j]=mat[i][j]; System.out.println(C.finalmatrix[i][j]); } } return C; } //Transposing a matrix public static numjava transpose(numjava mat) { numjava C=new numjava(mat.N,mat.M); // int counteri=0; // int counterj=0; for (int i=0;i<mat.M;i++) { for (int j=0;j<mat.N;j++) { C.finalmatrix[j][i]= mat.finalmatrix[i][j]; } } return C; } /* static int get_Row_Size(double [][] matrix){ return matrix.length; } static int get_Column_Size(double [][] matrix) { return matrix[0].length; } */ static float get_Max_Element_Matrix_by_Value(numjava matrix) throws Exception { if(matrix.finalmatrix == null) { throw new Exception("Matrix is null"); } float max = matrix.finalmatrix[0][0]; for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { if(matrix.finalmatrix[row][column] > max) { max = matrix.finalmatrix[row][column]; } } } return max; } // Returns an array 1st index is row and 2nd is column static int[] get_Arg_Max(numjava matrix) throws Exception { if(matrix.finalmatrix == null) { throw new Exception("Matrix is null"); } float max = matrix.finalmatrix[0][0]; int[] index_Max = new int[2]; for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { if(matrix.finalmatrix[row][column] > max) { max = matrix.finalmatrix[row][column]; index_Max[0] = row; index_Max[1] = column; } } } return index_Max; } static float get_Max_Element_Matrix_by_Row(numjava matrix, int row_Number) { float max = matrix.finalmatrix[row_Number][0]; for (int row = 0; row < matrix.M; row++) { if(matrix.finalmatrix[row_Number][row] > max) { max = matrix.finalmatrix[row_Number][row]; } } return max; } static float get_Max_Element_Matrix_by_Column(numjava matrix, int column_Number) { float max = matrix.finalmatrix[0][column_Number]; for (int column = 0; column < matrix.N; column++){ if(matrix.finalmatrix[column][column_Number] > max) { max = matrix.finalmatrix[column][column_Number]; } } return max; } static float get_Min_Element_Matrix_by_Value(numjava matrix) throws Exception { if(matrix.finalmatrix == null) { throw new Exception("Matrix is null"); } float min = matrix.finalmatrix[0][0]; for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { if(matrix.finalmatrix[row][column] < min) { min = matrix.finalmatrix[row][column]; } } } return min; } // Returns an array 1st index is row and 2nd is column static int[] get_Min_Element_Matrix_by_Index(numjava matrix) throws Exception { if(matrix.finalmatrix == null) { throw new Exception("Matrix is null"); } float min = matrix.finalmatrix[0][0]; int[] index_Min = new int[2]; for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { if(matrix.finalmatrix[row][column] < min) { min = matrix.finalmatrix[row][column]; index_Min[0] = row; index_Min[1] = column; } } } return index_Min; } static float get_Min_Element_Matrix_by_Row(numjava matrix, int row_Number) { float min = matrix.finalmatrix[row_Number][0]; for (int row = 0; row < matrix.M; row++) { if(matrix.finalmatrix[row_Number][row] > min) { min = matrix.finalmatrix[row_Number][row]; } } return min; } static float get_Min_Element_Matrix_by_Column(numjava matrix, int column_Number) { float min = matrix.finalmatrix[0][column_Number]; for (int column = 0; column < matrix.N; column++){ if(matrix.finalmatrix[column][column_Number] > min) { min = matrix.finalmatrix[column][column_Number]; } } return min; } static float calculate_Exponential(float elem) { return (float)Math.exp(elem); } // transformed with the matrix reference static numjava transform_Matrix_with_Exponential(numjava matrix) { for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { matrix.finalmatrix[row][column] = calculate_Exponential(matrix.finalmatrix[row][column]); } } return matrix; } /* static double [][] copy_Matrix(double [][] matrix) { double [][] cp_Matrix = new double[matrix.length][]; for(int i = 0; i < matrix.length; i++) { double[] aMatrix = matrix[i]; int aLength = aMatrix.length; cp_Matrix[i] = new double[aLength]; System.arraycopy(aMatrix, 0, cp_Matrix[i], 0, aLength); } return cp_Matrix; } */ static numjava make_Unit_Matrix(numjava matrix) { for (int row = 0; row < matrix.M; row++) { for (int column = 0; column < matrix.N; column++) { matrix.finalmatrix[row][column] = 1; } } return matrix; } public static void main(String args[]) { try { numjava x1=numjava.creatematrix(new float[][]{{1,2,3},{4,5,6},{9,8,7}}); numjava x2=numjava.creatematrix(new float[][]{{1,2,3},{4,5,6},{9,8,7}}); numjava.print(x1); System.out.println("Max element by Value :"+numjava.get_Max_Element_Matrix_by_Value(x1)); System.out.println("Max_Argument :"+numjava.get_Arg_Max(x1)[0]+" "+numjava.get_Arg_Max(x1)[1]); System.out.println("Max_element by row :"+numjava.get_Max_Element_Matrix_by_Row(x1, 1)); System.out.println("Max element by column :"+numjava.get_Max_Element_Matrix_by_Column(x1,1)); System.out.println("Min elemet by Value :"+numjava.get_Min_Element_Matrix_by_Value(x1)); System.out.println("Min elemet by Index :"+numjava.get_Min_Element_Matrix_by_Index(x1)[0]+" "+numjava.get_Min_Element_Matrix_by_Index(x1)[1]); System.out.println("Min elemet by row :"+numjava.get_Min_Element_Matrix_by_Row(x1, 1)); System.out.println("Min elemet by column :"+numjava.get_Min_Element_Matrix_by_Column(x1, 1)); numjava.print(numjava.transform_Matrix_with_Exponential(x1)); numjava.print(numjava.make_Unit_Matrix(x1)); } catch(Exception e) { e.printStackTrace(); //System.exit(0); } } }
src/one/numjava.java
Create numjava.java
src/one/numjava.java
Create numjava.java
<ide><path>rc/one/numjava.java <add>import java.util.*; <add>class numjava <add>{ <add> private int M; <add> private int N; <add> private float finalmatrix [][]; <add> public numjava(int M,int N) <add> { <add> this.M=M; <add> this.N=N; <add> finalmatrix=new float[M][N]; <add> } <add> <add> // To create random 2-d array <add> public static numjava createrandom (int M,int N) <add> { <add> numjava mat=new numjava(M,N); <add> for (int i=0;i<M;i++) <add> { <add> for (int j=0;j<N;j++) <add> { <add> mat.finalmatrix[i][j]=(float)Math.random(); <add> } <add> } <add> <add> return mat; <add> } <add> // To print a matrix <add> public static void print(numjava mat) <add> { <add> System.out.println(mat.M+" "+mat.N); <add> for (int i=0;i<mat.M;i++) <add> { <add> for (int j=0;j<mat.N;j++) <add> { <add> System.out.printf("%9.4f ", mat.finalmatrix[i][j]); <add> } <add> System.out.println(); <add> } <add> <add> } <add> <add> //Dot product of 2 matrices <add> public static numjava dot(numjava mat1,numjava mat2) <add> { <add> <add> int M1=mat1.M; <add> int N1=mat1.N; <add> int M2=mat2.M; <add> int N2=mat2.N; <add> <add> if (N1!=M2) throw new RuntimeException("Illegal matrix dimensions"); <add> <add> numjava mat=new numjava(M1,N2); <add> <add> for (int i=0;i<M1;i++) <add> { <add> for (int j=0;j<N1;j++) <add> { <add> mat.finalmatrix[i][j]=0; <add> for (int k=0;k<M2;k++) <add> { <add> mat.finalmatrix[i][j]+=mat1.finalmatrix[i][k]*mat2.finalmatrix[k][j]; <add> } <add> } <add> } <add> return mat; <add> } <add> <add> //Elementwise multiplication <add> public static numjava elementmul(numjava mat1,numjava mat2) <add> { <add> int M1=mat1.M; <add> int N1=mat1.N; <add> int M2=mat2.M; <add> int N2=mat2.N; <add> try <add> { <add> if (N1!=M2) <add> throw new Exception(); <add> } <add> <add> catch(Exception e) <add> { <add> System.out.println("Illegal matrix dimension"); <add> } <add> <add> numjava mat=null; <add> if (M1!=M2) <add> { <add> mat=new numjava(M2,N1); <add> for (int i=0;i<M2;i++) <add> { <add> <add> for(int j=0;j<N1;j++) <add> { <add> mat.finalmatrix[i][j]=mat1.finalmatrix[i][0]*mat2.finalmatrix[0][j]; <add> } <add> } <add> } <add> <add> else <add> { <add> mat=new numjava(M1,N1); <add> for (int i=0;i<M1;i++) <add> { <add> for (int j=0;j<N1;j++) <add> { <add> mat.finalmatrix[i][j]=mat1.finalmatrix[i][j]*mat2.finalmatrix[i][j]; <add> } <add> } <add> <add> } <add> return mat; <add> } <add> /* <add> public static void shape(float mat1[][]) <add> { <add> return mat1[0].length,mat1[1].length; <add> } <add> */ <add> //Reshaping the matrices <add> public static numjava reshape(numjava mat,int M,int N) <add> { <add> <add> int old_M=mat.M; <add> int old_N=mat.N; <add> int counteri=0; <add> int counterj=0; <add> numjava C=new numjava(M,N); <add> //try <add> //{ <add> if((old_M*old_N)!=(M*N)) <add> { <add> <add> throw new RuntimeException("Cannot reshape"); <add> <add> } <add> else <add> { <add> <add> for (int i=0;i<M;i++) <add> { <add> for (int j=0;j<N;j++) <add> { <add> if (counterj>=old_N) <add> { <add> counteri++; <add> counterj=0; <add> } <add> <add> C.finalmatrix[i][j]=mat.finalmatrix[counteri][counterj]; <add> counterj++; <add> } <add> <add> } <add> <add> } <add> <add> <add> //} <add> /* <add> catch(Exception e) <add> { <add> } <add> */ <add> return C; <add> // System.out.println("Cannot rehape matrix"); <add> <add> } <add> <add> // Creating a matrix <add> public static numjava creatematrix(float mat[][]) <add> { <add> int M=mat.length; <add> int N=mat[0].length; <add> System.out.println(M+" "+N); <add> numjava C=new numjava(M,N); <add> for (int i=0;i<M;i++) <add> { <add> for(int j=0;j<N;j++) <add> { <add> C.finalmatrix[i][j]=mat[i][j]; <add> System.out.println(C.finalmatrix[i][j]); <add> } <add> } <add> return C; <add> } <add> <add> //Transposing a matrix <add> public static numjava transpose(numjava mat) <add> { <add> <add> numjava C=new numjava(mat.N,mat.M); <add>// int counteri=0; <add>// int counterj=0; <add> for (int i=0;i<mat.M;i++) <add> { <add> for (int j=0;j<mat.N;j++) <add> { <add> <add> C.finalmatrix[j][i]= mat.finalmatrix[i][j]; <add> <add> } <add> <add> } <add> return C; <add> } <add> <add> <add> <add> <add> <add>/* <add>static int get_Row_Size(double [][] matrix){ <add> return matrix.length; <add> } <add> <add> static int get_Column_Size(double [][] matrix) { <add> return matrix[0].length; <add> } <add>*/ <add> static float get_Max_Element_Matrix_by_Value(numjava matrix) throws Exception { <add> <add> if(matrix.finalmatrix == null) { <add> throw new Exception("Matrix is null"); <add> } <add> <add> float max = matrix.finalmatrix[0][0]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> if(matrix.finalmatrix[row][column] > max) { <add> max = matrix.finalmatrix[row][column]; <add> } <add> } <add> } <add> return max; <add> } <add> <add> // Returns an array 1st index is row and 2nd is column <add> static int[] get_Arg_Max(numjava matrix) throws Exception { <add> <add> if(matrix.finalmatrix == null) { <add> throw new Exception("Matrix is null"); <add> } <add> <add> float max = matrix.finalmatrix[0][0]; <add> <add> int[] index_Max = new int[2]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> if(matrix.finalmatrix[row][column] > max) { <add> max = matrix.finalmatrix[row][column]; <add> index_Max[0] = row; <add> index_Max[1] = column; <add> } <add> } <add> } <add> return index_Max; <add> } <add> <add> static float get_Max_Element_Matrix_by_Row(numjava matrix, int row_Number) { <add> <add> float max = matrix.finalmatrix[row_Number][0]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> if(matrix.finalmatrix[row_Number][row] > max) { <add> max = matrix.finalmatrix[row_Number][row]; <add> } <add> } <add> return max; <add> } <add> <add> static float get_Max_Element_Matrix_by_Column(numjava matrix, int column_Number) { <add> <add> float max = matrix.finalmatrix[0][column_Number]; <add> <add> for (int column = 0; column < matrix.N; column++){ <add> if(matrix.finalmatrix[column][column_Number] > max) { <add> max = matrix.finalmatrix[column][column_Number]; <add> } <add> } <add> return max; <add> } <add> <add> static float get_Min_Element_Matrix_by_Value(numjava matrix) throws Exception { <add> <add> if(matrix.finalmatrix == null) { <add> throw new Exception("Matrix is null"); <add> } <add> <add> float min = matrix.finalmatrix[0][0]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> if(matrix.finalmatrix[row][column] < min) { <add> min = matrix.finalmatrix[row][column]; <add> } <add> } <add> } <add> return min; <add> } <add> <add> // Returns an array 1st index is row and 2nd is column <add> static int[] get_Min_Element_Matrix_by_Index(numjava matrix) throws Exception { <add> <add> if(matrix.finalmatrix == null) { <add> throw new Exception("Matrix is null"); <add> } <add> <add> float min = matrix.finalmatrix[0][0]; <add> <add> int[] index_Min = new int[2]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> if(matrix.finalmatrix[row][column] < min) { <add> min = matrix.finalmatrix[row][column]; <add> index_Min[0] = row; <add> index_Min[1] = column; <add> } <add> } <add> } <add> return index_Min; <add> } <add> <add> static float get_Min_Element_Matrix_by_Row(numjava matrix, int row_Number) { <add> <add> float min = matrix.finalmatrix[row_Number][0]; <add> <add> for (int row = 0; row < matrix.M; row++) { <add> if(matrix.finalmatrix[row_Number][row] > min) { <add> min = matrix.finalmatrix[row_Number][row]; <add> } <add> } <add> return min; <add> } <add> <add> static float get_Min_Element_Matrix_by_Column(numjava matrix, int column_Number) { <add> <add> float min = matrix.finalmatrix[0][column_Number]; <add> <add> for (int column = 0; column < matrix.N; column++){ <add> if(matrix.finalmatrix[column][column_Number] > min) { <add> min = matrix.finalmatrix[column][column_Number]; <add> } <add> } <add> return min; <add> } <add> <add> static float calculate_Exponential(float elem) { <add> return (float)Math.exp(elem); <add> } <add> <add> // transformed with the matrix reference <add> static numjava transform_Matrix_with_Exponential(numjava matrix) { <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> matrix.finalmatrix[row][column] = calculate_Exponential(matrix.finalmatrix[row][column]); <add> } <add> } <add> return matrix; <add> } <add> <add> /* <add> static double [][] copy_Matrix(double [][] matrix) { <add> double [][] cp_Matrix = new double[matrix.length][]; <add> <add> for(int i = 0; i < matrix.length; i++) { <add> double[] aMatrix = matrix[i]; <add> int aLength = aMatrix.length; <add> cp_Matrix[i] = new double[aLength]; <add> System.arraycopy(aMatrix, 0, cp_Matrix[i], 0, aLength); <add> } <add> return cp_Matrix; <add> } <add> */ <add> <add> static numjava make_Unit_Matrix(numjava matrix) { <add> <add> for (int row = 0; row < matrix.M; row++) { <add> for (int column = 0; column < matrix.N; column++) { <add> matrix.finalmatrix[row][column] = 1; <add> } <add> } <add> <add> return matrix; <add> } <add> <add> <add> <add> <add> public static void main(String args[]) <add> { <add> try <add> { <add> <add> <add> numjava x1=numjava.creatematrix(new float[][]{{1,2,3},{4,5,6},{9,8,7}}); <add> numjava x2=numjava.creatematrix(new float[][]{{1,2,3},{4,5,6},{9,8,7}}); <add> numjava.print(x1); <add> <add> System.out.println("Max element by Value :"+numjava.get_Max_Element_Matrix_by_Value(x1)); <add> System.out.println("Max_Argument :"+numjava.get_Arg_Max(x1)[0]+" "+numjava.get_Arg_Max(x1)[1]); <add> System.out.println("Max_element by row :"+numjava.get_Max_Element_Matrix_by_Row(x1, 1)); <add> System.out.println("Max element by column :"+numjava.get_Max_Element_Matrix_by_Column(x1,1)); <add> System.out.println("Min elemet by Value :"+numjava.get_Min_Element_Matrix_by_Value(x1)); <add> System.out.println("Min elemet by Index :"+numjava.get_Min_Element_Matrix_by_Index(x1)[0]+" "+numjava.get_Min_Element_Matrix_by_Index(x1)[1]); <add> System.out.println("Min elemet by row :"+numjava.get_Min_Element_Matrix_by_Row(x1, 1)); <add> System.out.println("Min elemet by column :"+numjava.get_Min_Element_Matrix_by_Column(x1, 1)); <add> numjava.print(numjava.transform_Matrix_with_Exponential(x1)); <add> numjava.print(numjava.make_Unit_Matrix(x1)); <add> <add> } <add> <add> catch(Exception e) <add> { <add> e.printStackTrace(); <add> //System.exit(0); <add> } <add> } <add>}
Java
apache-2.0
1eb0008e317e5e8ef952805f9cdef549a1f75cd0
0
metaborg/spoofax,metaborg/spoofax,metaborg/spoofax,metaborg/spoofax
package org.metaborg.core.transform; import java.util.Collection; import java.util.stream.Collectors; import org.metaborg.core.action.IActionService; import org.metaborg.core.action.ITransformGoal; import org.metaborg.core.action.TransformActionContrib; import org.metaborg.core.analysis.IAnalysisService; import org.metaborg.core.analysis.IAnalyzeUnit; import org.metaborg.core.context.IContext; import org.metaborg.core.language.ILanguageComponent; import org.metaborg.core.language.ILanguageImpl; import org.metaborg.core.language.LanguageIdentifier; import org.metaborg.core.syntax.IParseUnit; import org.metaborg.util.log.ILogger; import org.metaborg.util.log.LoggerUtils; import com.google.common.collect.Lists; import com.google.inject.Inject; public class TransformService<P extends IParseUnit, A extends IAnalyzeUnit, TP extends ITransformUnit<P>, TA extends ITransformUnit<A>> implements ITransformService<P, A, TP, TA> { private static final ILogger logger = LoggerUtils.logger(TransformService.class); private final IActionService actionService; private final IAnalysisService<P, A, ?> analysisService; private final ITransformer<P, A, TP, TA> transformer; @Inject public TransformService(IActionService actionService, IAnalysisService<P, A, ?> analysisService, ITransformer<P, A, TP, TA> transformer) { this.actionService = actionService; this.analysisService = analysisService; this.transformer = transformer; } @Override public boolean available(ILanguageImpl language, ITransformGoal goal) { return actionService.available(language, goal); } @Override public boolean requiresAnalysis(ILanguageImpl language, ITransformGoal goal) { return actionService.requiresAnalysis(language, goal); } @Override public Collection<TP> transform(P input, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TP> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if(analysisService.available(context.language())) checkAnalyzed(action); final TP result = transformer.transform(input, context, action, config); results.add(result); } return results; } @Override public TP transform(P input, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } if(analysisService.available(context.language())) checkAnalyzed(action); final TP result = transformer.transform(input, context, action, config); return result; } @Override public Collection<TA> transform(A input, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform analyze unit " + input + ", it is not valid"); } final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TA> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if (!isActionEnabled(action, context)) { logger.info("Skipped action '" + action.action.name() + "' because it is not enabled by the project or a compile dependency."); continue; } final TA result = transformer.transform(input, context, action, config); results.add(result); } return results; } @Override public TA transform(A input, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } final TA result = transformer.transform(input, context, action, config); return result; } @Override public Collection<TP> transformAllParsed(Iterable<P> inputs, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TP> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if(analysisService.available(context.language())) checkAnalyzed(action); final Collection<TP> result = transformer.transformAllParsed(inputs, context, action, config); results.addAll(result); } return results; } @Override public Collection<TP> transformAllParsed(Iterable<P> inputs, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(analysisService.available(context.language())) checkAnalyzed(action); final Collection<TP> result = transformer.transformAllParsed(inputs, context, action, config); return result; } @Override public Collection<TA> transformAllAnalyzed(Iterable<A> inputs, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TA> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { final Collection<TA> result = transformer.transformAllAnalyzed(inputs, context, action, config); results.addAll(result); } return results; } @Override public Collection<TA> transformAllAnalyzed(Iterable<A> inputs, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { final Collection<TA> result = transformer.transformAllAnalyzed(inputs, context, action, config); return result; } private static void checkAnalyzed(TransformActionContrib action) throws TransformException { if(!action.action.flags().parsed) { final String message = logger.format("Transformation {} requires an analyzed result, but a parsed result is given", action); throw new TransformException(message); } } /** * Determines whether a transformation action is enabled. * * A transformation action is enabled when it is contributed by a language component that * is a compile dependency of this project, or is a component of the project itself. * * @param action the action to check * @param context the context in which to check * @return {@code true} when the action is enabled; otherwise, {@code false} */ private static boolean isActionEnabled(TransformActionContrib action, IContext context) { // @formatter:off final ILanguageComponent actionContributor = action.contributor; logger.info("Looking for '" + action.contributor.id() + "' in [" + context.project().config().compileDeps().stream().map(d -> d.toString()).collect(Collectors.joining(", ")) + "] or in [" + context.language().components().stream().map(d -> d.id().toString()).collect(Collectors.joining(", ")) + "]."); return context.project().config().compileDeps().contains(actionContributor.id()) || context.language().components().contains(actionContributor); // @formatter:on } }
org.metaborg.core/src/main/java/org/metaborg/core/transform/TransformService.java
package org.metaborg.core.transform; import java.util.Collection; import org.metaborg.core.action.IActionService; import org.metaborg.core.action.ITransformGoal; import org.metaborg.core.action.TransformActionContrib; import org.metaborg.core.analysis.IAnalysisService; import org.metaborg.core.analysis.IAnalyzeUnit; import org.metaborg.core.context.IContext; import org.metaborg.core.language.ILanguageComponent; import org.metaborg.core.language.ILanguageImpl; import org.metaborg.core.language.LanguageIdentifier; import org.metaborg.core.syntax.IParseUnit; import org.metaborg.util.log.ILogger; import org.metaborg.util.log.LoggerUtils; import com.google.common.collect.Lists; import com.google.inject.Inject; public class TransformService<P extends IParseUnit, A extends IAnalyzeUnit, TP extends ITransformUnit<P>, TA extends ITransformUnit<A>> implements ITransformService<P, A, TP, TA> { private static final ILogger logger = LoggerUtils.logger(TransformService.class); private final IActionService actionService; private final IAnalysisService<P, A, ?> analysisService; private final ITransformer<P, A, TP, TA> transformer; @Inject public TransformService(IActionService actionService, IAnalysisService<P, A, ?> analysisService, ITransformer<P, A, TP, TA> transformer) { this.actionService = actionService; this.analysisService = analysisService; this.transformer = transformer; } @Override public boolean available(ILanguageImpl language, ITransformGoal goal) { return actionService.available(language, goal); } @Override public boolean requiresAnalysis(ILanguageImpl language, ITransformGoal goal) { return actionService.requiresAnalysis(language, goal); } @Override public Collection<TP> transform(P input, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TP> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if(analysisService.available(context.language())) checkAnalyzed(action); final TP result = transformer.transform(input, context, action, config); results.add(result); } return results; } @Override public TP transform(P input, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } if(analysisService.available(context.language())) checkAnalyzed(action); final TP result = transformer.transform(input, context, action, config); return result; } @Override public Collection<TA> transform(A input, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform analyze unit " + input + ", it is not valid"); } final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TA> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if (!isActionEnabled(action, context)) { logger.info("Skipped action '" + action.action.name() + "' because it is not enabled by the project or a compile dependency."); continue; } final TA result = transformer.transform(input, context, action, config); results.add(result); } return results; } @Override public TA transform(A input, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(!input.valid()) { throw new TransformException("Cannot transform parse unit " + input + ", it is not valid"); } final TA result = transformer.transform(input, context, action, config); return result; } @Override public Collection<TP> transformAllParsed(Iterable<P> inputs, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TP> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { if(analysisService.available(context.language())) checkAnalyzed(action); final Collection<TP> result = transformer.transformAllParsed(inputs, context, action, config); results.addAll(result); } return results; } @Override public Collection<TP> transformAllParsed(Iterable<P> inputs, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { if(analysisService.available(context.language())) checkAnalyzed(action); final Collection<TP> result = transformer.transformAllParsed(inputs, context, action, config); return result; } @Override public Collection<TA> transformAllAnalyzed(Iterable<A> inputs, IContext context, ITransformGoal goal, ITransformConfig config) throws TransformException { final Iterable<TransformActionContrib> actions = actionService.actionContributions(context.language(), goal); final Collection<TA> results = Lists.newArrayList(); for(TransformActionContrib action : actions) { final Collection<TA> result = transformer.transformAllAnalyzed(inputs, context, action, config); results.addAll(result); } return results; } @Override public Collection<TA> transformAllAnalyzed(Iterable<A> inputs, IContext context, TransformActionContrib action, ITransformConfig config) throws TransformException { final Collection<TA> result = transformer.transformAllAnalyzed(inputs, context, action, config); return result; } private static void checkAnalyzed(TransformActionContrib action) throws TransformException { if(!action.action.flags().parsed) { final String message = logger.format("Transformation {} requires an analyzed result, but a parsed result is given", action); throw new TransformException(message); } } /** * Determines whether a transformation action is enabled. * * A transformation action is enabled when it is contributed by a language component that * is a compile dependency of this project, or is a component of the project itself. * * @param action the action to check * @param context the context in which to check * @return {@code true} when the action is enabled; otherwise, {@code false} */ private static boolean isActionEnabled(TransformActionContrib action, IContext context) { // @formatter:off final ILanguageComponent actionContributor = action.contributor; return context.project().config().compileDeps().contains(actionContributor.id()) || context.language().components().contains(actionContributor); // @formatter:on } }
Debugging transformation contributions
org.metaborg.core/src/main/java/org/metaborg/core/transform/TransformService.java
Debugging transformation contributions
<ide><path>rg.metaborg.core/src/main/java/org/metaborg/core/transform/TransformService.java <ide> package org.metaborg.core.transform; <ide> <ide> import java.util.Collection; <add>import java.util.stream.Collectors; <ide> <ide> import org.metaborg.core.action.IActionService; <ide> import org.metaborg.core.action.ITransformGoal; <ide> private static boolean isActionEnabled(TransformActionContrib action, IContext context) { <ide> // @formatter:off <ide> final ILanguageComponent actionContributor = action.contributor; <add> logger.info("Looking for '" + action.contributor.id() + "' in [" + context.project().config().compileDeps().stream().map(d -> d.toString()).collect(Collectors.joining(", ")) + "] or in [" + context.language().components().stream().map(d -> d.id().toString()).collect(Collectors.joining(", ")) + "]."); <ide> return context.project().config().compileDeps().contains(actionContributor.id()) <ide> || context.language().components().contains(actionContributor); <ide> // @formatter:on
Java
apache-2.0
4beb1889ef00ef2dd18416affbee7572927b5176
0
didclab/onedatashare,didclab/onedatashare,didclab/onedatashare,didclab/onedatashare
package org.onedatashare.server.module.vfs; import org.apache.commons.vfs2.FileContent; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.onedatashare.server.model.core.*; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; public class VfsResource extends Resource<VfsSession, VfsResource> { private FileObject fileObject; protected VfsResource(VfsSession session, String path, FileObject fileObject) { super(session, path); this.fileObject = fileObject; } public Mono<VfsResource> mkdir() { return initialize().doOnSuccess(vfsResource -> { try { fileObject.createFolder(); } catch (FileSystemException e) { e.printStackTrace(); } }); } public Mono<VfsResource> delete() { return initialize().doOnSuccess(vfsResource -> { try { fileObject.delete(); } catch (FileSystemException e) { e.printStackTrace(); } }); } @Override public Mono<VfsResource> select(String path) { return session.select(path); } public Mono<Stat> stat() { return initialize().map(VfsResource::onStat); } private Stat onStat() { Stat stat = new Stat(); try { if(fileObject.isFolder()){ stat.dir = true; stat.file = false; } else { stat = fileContentToStat(fileObject); } stat.name = fileObject.getName().getBaseName(); if(stat.dir) { FileObject[] children = fileObject.getChildren(); ArrayList<Stat> files = new ArrayList<>(); for(FileObject file : children) { files.add(fileContentToStat(file)); } stat.setFiles(files); } } catch (FileSystemException e) { e.printStackTrace(); } return stat; } public Stat fileContentToStat(FileObject file) { Stat stat = new Stat(); FileContent fileContent = null; try { fileContent = file.getContent(); if(file.isFolder()) { stat.dir = true; stat.file = false; } else { stat.file = true; stat.dir = false; stat.size = fileContent.getSize(); } stat.name = file.getName().getBaseName(); stat.time = fileContent.getLastModifiedTime() / 1000; } catch (FileSystemException e) { e.printStackTrace(); } return stat; } public VfsTap tap() { return new VfsTap(); } public VfsDrain sink() { return new VfsDrain().start(); } class VfsTap implements Tap { FileContent fileContent; { try { fileContent = fileObject.getContent(); } catch (FileSystemException e) { e.printStackTrace(); } } final long size = stat().block().size; public Flux<Slice> tap(long sliceSize) { InputStream instr = null; try { instr = fileContent.getInputStream(); } catch (FileSystemException e) { e.printStackTrace(); } InputStream finalInstr = instr; return Flux.generate( () -> 0L, (state, sink) -> { if (state + sliceSize < size) { byte[] b = new byte[Math.toIntExact(sliceSize)]; sink.next(new Slice(b)); } else { int remaining = (int) (size - state); byte[] b = new byte[remaining]; try { fileContent.getInputStream().read(b, state.byteValue(), remaining); } catch (IOException e) { e.printStackTrace(); } sink.next(new Slice(b)); b = new byte[Math.toIntExact(sliceSize) + 1024]; sink.complete(); } return state + sliceSize; }); } } class VfsDrain implements Drain { OutputStream outputStream; long uploaded = 0L; @Override public VfsDrain start() { try { fileObject.createFile(); outputStream = fileObject.getContent().getOutputStream(); } catch (FileSystemException e) { e.printStackTrace(); } return this; } @Override public void drain(Slice slice) { try { outputStream.flush(); outputStream.write(slice.asBytes()); } catch (IOException e) { e.printStackTrace(); } uploaded += slice.length(); } @Override public void finish() { try { outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } }
src/main/java/org/onedatashare/server/module/vfs/VfsResource.java
package org.onedatashare.server.module.vfs; import org.apache.commons.vfs2.FileContent; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.onedatashare.server.model.core.*; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; public class VfsResource extends Resource<VfsSession, VfsResource> { private FileObject fileObject; protected VfsResource(VfsSession session, String path, FileObject fileObject) { super(session, path); this.fileObject = fileObject; } public Mono<VfsResource> mkdir() { return initialize().doOnSuccess(vfsResource -> { try { fileObject.createFolder(); } catch (FileSystemException e) { e.printStackTrace(); } }); } public Mono<VfsResource> delete() { return initialize().doOnSuccess(vfsResource -> { try { fileObject.delete(); } catch (FileSystemException e) { e.printStackTrace(); } }); } @Override public Mono<VfsResource> select(String path) { return session.select(path); } public Mono<Stat> stat() { return initialize().map(VfsResource::onStat); } private Stat onStat() { Stat stat = new Stat(); try { if(fileObject.isFolder()){ stat.dir = true; stat.file = false; } else { stat = fileContentToStat(fileObject); } stat.name = fileObject.getName().getBaseName(); if(stat.dir) { FileObject[] children = fileObject.getChildren(); ArrayList<Stat> files = new ArrayList<>(); for(FileObject file : children) { files.add(fileContentToStat(file)); } stat.setFiles(files); } } catch (FileSystemException e) { e.printStackTrace(); } return stat; } public Stat fileContentToStat(FileObject file) { Stat stat = new Stat(); FileContent fileContent = null; try { fileContent = file.getContent(); if(file.isFolder()) { stat.dir = true; stat.file = false; } else { stat.file = true; stat.dir = false; stat.size = fileContent.getSize(); } stat.name = file.getName().getBaseName(); stat.time = fileContent.getLastModifiedTime() / 1000; } catch (FileSystemException e) { e.printStackTrace(); } return stat; } public VfsTap tap() { return new VfsTap(); } public VfsDrain sink() { return new VfsDrain().start(); } class VfsTap implements Tap { FileContent fileContent; { try { fileContent = fileObject.getContent(); } catch (FileSystemException e) { e.printStackTrace(); } } final long size = stat().block().size; public Flux<Slice> tap(long sliceSize) { return Flux.generate( () -> 0L, (state, sink) -> { if (state + sliceSize < size) { byte[] b = new byte[Math.toIntExact(sliceSize)]; try { fileContent.getInputStream() .read(b, state.intValue(), Math.toIntExact(sliceSize)); } catch (IOException e) { e.printStackTrace(); } sink.next(new Slice(b)); } else { int remaining = (int) (size - state); byte[] b = new byte[remaining]; try { fileContent.getInputStream().read(b, state.intValue(), remaining); } catch (IOException e) { e.printStackTrace(); } sink.next(new Slice(b)); sink.complete(); } return state + sliceSize; }); } } class VfsDrain implements Drain { OutputStream outputStream; long uploaded = 0L; @Override public VfsDrain start() { try { fileObject.createFile(); outputStream = fileObject.getContent().getOutputStream(); } catch (FileSystemException e) { e.printStackTrace(); } return this; } @Override public void drain(Slice slice) { try { outputStream.write(slice.asBytes()); } catch (IOException e) { e.printStackTrace(); } uploaded += slice.length(); } @Override public void finish() { try { outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } } }
Fixed FTP not transferring data properly
src/main/java/org/onedatashare/server/module/vfs/VfsResource.java
Fixed FTP not transferring data properly
<ide><path>rc/main/java/org/onedatashare/server/module/vfs/VfsResource.java <ide> final long size = stat().block().size; <ide> <ide> public Flux<Slice> tap(long sliceSize) { <add> InputStream instr = null; <add> try { <add> instr = fileContent.getInputStream(); <add> } catch (FileSystemException e) { <add> e.printStackTrace(); <add> } <add> InputStream finalInstr = instr; <ide> return Flux.generate( <ide> () -> 0L, <ide> (state, sink) -> { <ide> if (state + sliceSize < size) { <ide> byte[] b = new byte[Math.toIntExact(sliceSize)]; <del> try { <del> fileContent.getInputStream() <del> .read(b, state.intValue(), Math.toIntExact(sliceSize)); <del> } catch (IOException e) { <del> e.printStackTrace(); <del> } <ide> sink.next(new Slice(b)); <ide> } else { <ide> int remaining = (int) (size - state); <ide> byte[] b = new byte[remaining]; <ide> try { <del> fileContent.getInputStream().read(b, state.intValue(), remaining); <add> fileContent.getInputStream().read(b, state.byteValue(), remaining); <ide> } catch (IOException e) { <ide> e.printStackTrace(); <ide> } <ide> sink.next(new Slice(b)); <add> b = new byte[Math.toIntExact(sliceSize) + 1024]; <ide> sink.complete(); <ide> } <ide> return state + sliceSize; <ide> @Override <ide> public void drain(Slice slice) { <ide> try { <add> outputStream.flush(); <ide> outputStream.write(slice.asBytes()); <ide> } catch (IOException e) { <ide> e.printStackTrace();
Java
bsd-2-clause
c6ec6193faedc86fd2cc6ae9473bd81909e21402
0
insideo/randomcoder-website,insideo/randomcoder-website,insideo/randomcoder-website
package com.randomcoder.security.test; import static org.junit.Assert.*; import javax.servlet.http.*; import org.acegisecurity.*; import org.acegisecurity.ui.logout.LogoutHandler; import org.junit.*; import org.springframework.mock.web.*; import com.randomcoder.security.NullLogoutHandler; public class NullLogoutHandlerTest { private NullLogoutHandler handler = null; private LogoutHandlerMock mock = null; private MockHttpServletRequest request = null; private MockHttpServletResponse response = null; @Before public void setUp() throws Exception { mock = new LogoutHandlerMock(); handler = new NullLogoutHandler(); handler.setUsername("anonymousUser"); handler.setLogoutHandler(mock); request = new MockHttpServletRequest(); response = new MockHttpServletResponse(); } @After public void tearDown() throws Exception { handler = null; mock = null; request = null; response = null; } @Test public void testLogoutNormal() { handler.logout(request, response, new AuthenticationMock()); Authentication auth = mock.getAuthentication(); assertNotNull(auth); assertEquals(AuthenticationMock.class, auth.getClass()); assertEquals("temp", auth.getName()); assertEquals("temp", auth.getPrincipal()); assertFalse(auth.isAuthenticated()); // for code coverage auth.getAuthorities(); auth.getCredentials(); auth.getDetails(); auth.setAuthenticated(false); try { auth.setAuthenticated(true); fail("Didn't catch exception"); } catch (IllegalArgumentException e) {} } @Test public void testLogoutNull() { handler.logout(request, response, null); Authentication auth = mock.getAuthentication(); assertNotNull(auth); assertEquals("anonymousUser", auth.getName()); } @SuppressWarnings("unused") private static class LogoutHandlerMock implements LogoutHandler { private Authentication authentication = null; public LogoutHandlerMock() {} public void logout(HttpServletRequest request, HttpServletResponse response, Authentication auth) { authentication = auth; } public Authentication getAuthentication() { return authentication; } } @SuppressWarnings("unused") private static class AuthenticationMock implements Authentication { private static final long serialVersionUID = 3105620828874678824L; public AuthenticationMock() {} public GrantedAuthority[] getAuthorities() { return new GrantedAuthority[] {}; } public Object getCredentials() { return null; } public Object getDetails() { return null; } public Object getPrincipal() { return "temp"; } public boolean isAuthenticated() { return false; } public void setAuthenticated(boolean authenticated) throws IllegalArgumentException { } public String getName() { return "temp"; } } }
WEB-INF/src/com/randomcoder/security/test/NullLogoutHandlerTest.java
package com.randomcoder.security.test; import static org.junit.Assert.*; import javax.servlet.http.*; import org.acegisecurity.*; import org.acegisecurity.ui.logout.LogoutHandler; import org.junit.*; import org.springframework.mock.web.*; import com.randomcoder.security.NullLogoutHandler; public class NullLogoutHandlerTest { private NullLogoutHandler handler = null; private LogoutHandlerMock mock = null; private MockHttpServletRequest request = null; private MockHttpServletResponse response = null; @Before public void setUp() throws Exception { mock = new LogoutHandlerMock(); handler = new NullLogoutHandler(); handler.setUsername("anonymousUser"); handler.setLogoutHandler(mock); request = new MockHttpServletRequest(); response = new MockHttpServletResponse(); } @After public void tearDown() throws Exception { handler = null; mock = null; request = null; response = null; } @Test public void testLogoutNormal() { handler.logout(request, response, new AuthenticationMock()); Authentication auth = mock.getAuthentication(); assertNotNull(auth); assertEquals(AuthenticationMock.class, auth.getClass()); assertEquals("anonymousUser", auth.getName()); assertEquals("anonymousUser", auth.getPrincipal()); assertFalse(auth.isAuthenticated()); // for code coverage auth.getAuthorities(); auth.getCredentials(); auth.getDetails(); auth.setAuthenticated(false); try { auth.setAuthenticated(true); fail("Didn't catch exception"); } catch (IllegalArgumentException e) {} } @Test public void testLogoutNull() { handler.logout(request, response, null); Authentication auth = mock.getAuthentication(); assertNotNull(auth); } @SuppressWarnings("unused") private static class LogoutHandlerMock implements LogoutHandler { private Authentication authentication = null; public LogoutHandlerMock() {} public void logout(HttpServletRequest request, HttpServletResponse response, Authentication auth) { authentication = auth; } public Authentication getAuthentication() { return authentication; } } @SuppressWarnings("unused") private static class AuthenticationMock implements Authentication { private static final long serialVersionUID = 3105620828874678824L; public AuthenticationMock() {} public GrantedAuthority[] getAuthorities() { return new GrantedAuthority[] {}; } public Object getCredentials() { return null; } public Object getDetails() { return null; } public Object getPrincipal() { return "temp"; } public boolean isAuthenticated() { return false; } public void setAuthenticated(boolean authenticated) throws IllegalArgumentException { } public String getName() { return "temp"; } } }
Fixed NullLogoutHandlerTest git-svn-id: c54c250ef6781e452e8ba97060b13a35b2e33c47@349 5bee6cb3-3d18-0410-8c93-a642edd49b48
WEB-INF/src/com/randomcoder/security/test/NullLogoutHandlerTest.java
Fixed NullLogoutHandlerTest
<ide><path>EB-INF/src/com/randomcoder/security/test/NullLogoutHandlerTest.java <ide> assertNotNull(auth); <ide> assertEquals(AuthenticationMock.class, auth.getClass()); <ide> <del> assertEquals("anonymousUser", auth.getName()); <del> assertEquals("anonymousUser", auth.getPrincipal()); <add> assertEquals("temp", auth.getName()); <add> assertEquals("temp", auth.getPrincipal()); <ide> assertFalse(auth.isAuthenticated()); <ide> <ide> // for code coverage <ide> handler.logout(request, response, null); <ide> Authentication auth = mock.getAuthentication(); <ide> assertNotNull(auth); <add> assertEquals("anonymousUser", auth.getName()); <ide> } <ide> <ide> @SuppressWarnings("unused")
Java
bsd-3-clause
9bb426a71590e22ba6ea0b584723d623aad74535
0
NCIP/labviewer,NCIP/labviewer,NCIP/labviewer
package gov.nih.nci.caxchange.client; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Font; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JPasswordField; import javax.swing.JScrollPane; import javax.swing.JTabbedPane; import javax.swing.JTextField; import javax.swing.ListSelectionModel; import javax.swing.Timer; import javax.swing.WindowConstants; import javax.swing.border.Border; import org.apache.log4j.Logger; /** * @author asharma * */ public class TestCancerCenterClientUI extends JPanel implements ActionListener { private static final long serialVersionUID = 1L; private String csvDirectory; private String hl7v2Directory; private String mapDirectory; private String hl7v2mapDirectory; private String processedDirectory; private String inProcessDirectory; private String preProcessorFile; private File inProcessFolder; private File rawFilesBackupFolder; private File errorFolder; private static JFrame aWindow = new JFrame("Cancer Center Hub Client (CCHC)"); private JTextField jtxtHL7V2Dir = new JTextField(); private JTextField jtxtProcessedFilesDir = new JTextField(); private JTextField jtxtCSVDir = new JTextField(); private JTextField jtxtMAPDir = new JTextField(); private JTextField jtxtMAPHL7V2Dir = new JTextField(); private JTextField jtxtInProcessFilesDir = new JTextField(); private JTextField jtxtPollingInterval = new JTextField(); private JTextField jtxtInitialInterval = new JTextField(); private JTextField jtxtLocation = new JTextField(); //private JTextField jtxtIDPLocation = new JTextField(); private JTextField jtxtOrgName = new JTextField(); private JTextField jtxtUserName = new JTextField(); private JTextField jtxtPassword = new JPasswordField(); private JTextField jtxtHubURL = new JTextField(); private JTextField jtxtstudyLookupServiceURL = new JTextField(); private JTextField jtxtpreProcessorProFile = new JTextField(); private DefaultListModel msgDispBox = new DefaultListModel(); private String[] version = {"2.2","2.3","2.4","2.5"}; private JComboBox jcomboBoxVersion=new JComboBox(version); private JList dispList = new JList(); private static Logger logger = Logger .getLogger("gov.nih.nci.caxchange.client.TestCancerCenterClientUI"); private BufferedReader buffReader; private Border blackline = BorderFactory.createLineBorder(Color.black); private final ArrayList<ScheduledExecutorService>threadsList=new ArrayList<ScheduledExecutorService>(); /** * Default constructor. */ public TestCancerCenterClientUI() { init(); aWindow.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); aWindow.addWindowListener(new WindowAdapter() { public void windowClosed(WindowEvent e) { stopThreads(); System.exit(0); } }); msgDispBox.addElement("Progress Shown Here"); //Add the tabs to the frame JTabbedPane jtab = new JTabbedPane(); JPanel jpanelMain = createMainPanel(); jtab.addTab("Main", jpanelMain); JPanel jpanelStatus = createStatusPanel(); jtab.addTab("Status", jpanelStatus); aWindow.getContentPane().add(jtab, BorderLayout.CENTER); aWindow.setSize(950, 650); aWindow.setVisible(true); } /** * The init() method is used to read the default properties file and initialize the * input text fields with the user selected preferences. If the Tool is run for the * first time; the text fields are blank. */ public void init() { try { /*InputStream is = TestCancerCenterClientUI.class.getClassLoader(). getResourceAsStream("./properties/DefaultProperties.properties"); */ String path = System.getProperty("DefaultProperties.File"); FileInputStream is = new FileInputStream(new File(path)); if (is!=null) { Properties props = new Properties(); //Read in the stored properties props.load(is); jtxtHL7V2Dir.setText(props.getProperty("HL7V2Dir")); jtxtCSVDir.setText(props.getProperty("rawFilesFolder")); jtxtMAPDir.setText(props.getProperty("mapFileName")); jtxtPollingInterval.setText(props .getProperty("pollingDelayInSeconds")); jtxtProcessedFilesDir.setText(props .getProperty("processedFolder")); jtxtMAPHL7V2Dir.setText(props.getProperty("hl7v2mapFileName")); jtxtInProcessFilesDir.setText(props .getProperty("inProcessDirectory")); jtxtInitialInterval.setText(props .getProperty("initialDelayInSeconds")); jtxtLocation.setText(props.getProperty("Location")); //jtxtIDPLocation.setText(props.getProperty("IDPLocation")); jtxtOrgName.setText(props.getProperty("ORGANIZATIONNAME")); jtxtUserName.setText(props.getProperty("userName")); jtxtPassword.setText(props.getProperty("userPasswd")); jtxtHubURL.setText(props.getProperty("HubURL")); jtxtstudyLookupServiceURL.setText(props.getProperty("StudyLookUpServiceURL")); jtxtpreProcessorProFile.setText(props .getProperty("preProcessorPropertiesFile")); jcomboBoxVersion.setSelectedItem(props.getProperty("V2Version")); csvDirectory = jtxtCSVDir.getText(); inProcessDirectory = jtxtInProcessFilesDir.getText(); mapDirectory = jtxtMAPDir.getText(); hl7v2Directory = jtxtHL7V2Dir.getText(); hl7v2mapDirectory = jtxtMAPHL7V2Dir.getText(); processedDirectory = jtxtProcessedFilesDir.getText(); preProcessorFile = jtxtpreProcessorProFile.getText(); } } catch (Exception e) { logger.error("Exception processing Cancer Center Properties File"+e); } } /** * Creates the Main Panel * @return jplPanel */ protected JPanel createMainPanel() { //Heading display box String text = "Accepts and saves the user entered values to perform"; String text1 = "the csv to HL7V3 and HL7V2 to HL7V3 conversion"; String star = "* Please select/enter all the form fields"; JPanel jplPanel = new JPanel(); jplPanel.setLayout(new BoxLayout(jplPanel, BoxLayout.PAGE_AXIS)); JLabel jlbDisplay = new JLabel(text); JLabel jlbDisplay1 = new JLabel(text1); jlbDisplay.setForeground(Color.BLUE); jlbDisplay1.setForeground(Color.BLUE); jlbDisplay.setFont(new Font("Serif", Font.BOLD, 15)); jlbDisplay1.setFont(new Font("Serif", Font.BOLD, 15)); Box topBox = Box.createVerticalBox(); topBox.add(Box.createVerticalStrut(10)); topBox.add(jlbDisplay); topBox.add(jlbDisplay1); Box csvBox = Box.createVerticalBox(); csvBox.setBorder(BorderFactory.createTitledBorder(blackline, "CSV")); csvBox.add(Box.createVerticalStrut(10)); //add the csv, map & HL7V3 file directory selection Box csvBox1 = Box.createHorizontalBox(); csvBox1.add(Box.createHorizontalStrut(10)); JLabel jlbCSVLabel = new JLabel("Select the raw files directory"); csvBox1.add(jlbCSVLabel); csvBox1.add(Box.createHorizontalStrut(10)); csvBox1.add(jtxtCSVDir); JButton jfileCSV = new JButton("Browse..."); jfileCSV.addActionListener(this); jfileCSV.setActionCommand("BrowseCSVFile"); jtxtCSVDir.add(jfileCSV); csvBox1.add(jfileCSV); csvBox.add(csvBox1); Box csvBox2 = Box.createHorizontalBox(); csvBox2.add(Box.createHorizontalStrut(10)); JLabel jlbMAPLabel = new JLabel("Select the MAP file"); csvBox2.add(jlbMAPLabel); csvBox2.add(Box.createHorizontalStrut(67)); csvBox2.add(jtxtMAPDir); JButton jfileMAP = new JButton("Browse..."); jfileMAP.addActionListener(this); jfileMAP.setActionCommand("BrowseMAPFile"); jtxtMAPDir.add(jfileMAP); csvBox2.add(jfileMAP); csvBox.add(csvBox2); //HL7V2 box Box HL7V2Box = Box.createVerticalBox(); HL7V2Box .setBorder(BorderFactory.createTitledBorder(blackline, "HL7V2")); HL7V2Box.add(Box.createVerticalStrut(10)); Box csvBox3 = Box.createHorizontalBox(); csvBox3.add(Box.createHorizontalStrut(10)); JLabel jlbHL7V2Label = new JLabel("Select the HL7V2 directory"); csvBox3.add(jlbHL7V2Label); csvBox3.add(Box.createHorizontalStrut(15)); csvBox3.add(jtxtHL7V2Dir); JButton jfileHL7V2 = new JButton("Browse..."); jfileHL7V2.addActionListener(this); jfileHL7V2.setActionCommand("BrowseHL7V2File"); jtxtHL7V2Dir.add(jfileHL7V2); csvBox3.add(jfileHL7V2); HL7V2Box.add(csvBox3); Box csvBox6 = Box.createHorizontalBox(); csvBox6.add(Box.createHorizontalStrut(10)); JLabel jlbMAPHL7V2Label = new JLabel("Select the MAP file"); csvBox6.add(jlbMAPHL7V2Label); csvBox6.add(Box.createHorizontalStrut(60)); csvBox6.add(jtxtMAPHL7V2Dir); JButton jfileMAPHL7V2 = new JButton("Browse..."); jfileMAPHL7V2.addActionListener(this); jfileMAPHL7V2.setActionCommand("BrowseMAPHL7V2File"); jtxtMAPHL7V2Dir.add(jfileMAPHL7V2); csvBox6.add(jfileMAPHL7V2); HL7V2Box.add(csvBox6); Box csvBox13 = Box.createHorizontalBox(); csvBox13.add(Box.createHorizontalStrut(10)); JLabel jlbVersionLabel = new JLabel("Select the Version"); csvBox13.add(jlbVersionLabel); csvBox13.add(Box.createHorizontalStrut(60)); jcomboBoxVersion.addActionListener(this); csvBox13.add(jcomboBoxVersion); HL7V2Box.add(csvBox13); //1. Processed files & In process directory 2. polling delay & Initial delay Box csvBox4 = Box.createVerticalBox(); csvBox4.add(Box.createVerticalStrut(10)); csvBox4.setBorder(BorderFactory.createTitledBorder(blackline, "Common Settings")); JLabel jlbPreProcessedLabel = new JLabel( "Select the Pre Processed Property file"); Box secondBox = Box.createHorizontalBox(); secondBox.add(Box.createHorizontalStrut(10)); secondBox.add(jlbPreProcessedLabel); secondBox.add(Box.createHorizontalStrut(10)); secondBox.add(jtxtpreProcessorProFile); JButton jfilePreProcessed = new JButton("Browse..."); jfilePreProcessed.addActionListener(this); jfilePreProcessed.setActionCommand("BrowsePreProcessedFile"); jtxtpreProcessorProFile.add(jfilePreProcessed); secondBox.add(jfilePreProcessed); csvBox4.add(secondBox); JLabel jlbProcessedLabel = new JLabel( "Select the Processed file(s) directory"); Box thirdBox = Box.createHorizontalBox(); thirdBox.add(Box.createHorizontalStrut(10)); thirdBox.add(jlbProcessedLabel); thirdBox.add(Box.createHorizontalStrut(15)); thirdBox.add(jtxtProcessedFilesDir); JButton jfileProcessed = new JButton("Browse..."); jfileProcessed.addActionListener(this); jfileProcessed.setActionCommand("BrowseProcessedDir"); jtxtProcessedFilesDir.add(jfileProcessed); thirdBox.add(jfileProcessed); csvBox4.add(thirdBox); JLabel jlbInProcessLabel = new JLabel( "Select the InProcess file(s) directory"); Box fourthBox = Box.createHorizontalBox(); fourthBox.add(Box.createHorizontalStrut(10)); fourthBox.add(jlbInProcessLabel); fourthBox.add(Box.createHorizontalStrut(19)); fourthBox.add(jtxtInProcessFilesDir); JButton jfileInProcess = new JButton("Browse..."); jfileInProcess.addActionListener(this); jfileInProcess.setActionCommand("BrowseInProcessDir"); jtxtInProcessFilesDir.add(jfileInProcess); fourthBox.add(jfileInProcess); csvBox4.add(fourthBox); Box csvBox5 = Box.createHorizontalBox(); csvBox5.add(Box.createHorizontalStrut(10)); JLabel jlbPollingLabel = new JLabel( "Enter the polling delay in seconds"); csvBox5.add(jlbPollingLabel); csvBox5.add(Box.createHorizontalStrut(37)); jtxtPollingInterval.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { JTextField textField = (JTextField) e.getSource(); String content = textField.getText(); if (content.length() != 0) { try { Integer.parseInt(content); } catch (NumberFormatException nfe) { getToolkit().beep(); textField.requestFocus(); } } } }); csvBox5.add(jtxtPollingInterval); csvBox4.add(csvBox5); csvBox5.add(Box.createHorizontalStrut(75)); JLabel jlbInitialLabel = new JLabel( "Enter the Initial delay in seconds"); csvBox5.add(jlbInitialLabel); csvBox5.add(Box.createHorizontalStrut(19)); jtxtInitialInterval.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { JTextField textField = (JTextField) e.getSource(); String content = textField.getText(); if (content.length() != 0) { try { Integer.parseInt(content); } catch (NumberFormatException nfe) { getToolkit().beep(); textField.requestFocus(); } } } }); csvBox5.add(jtxtInitialInterval); csvBox4.add(csvBox5); //Location and IDPLocation Box csvBox8 = Box.createHorizontalBox(); csvBox8.add(Box.createHorizontalStrut(10)); JLabel jlbLocationLabel = new JLabel("Enter the Location name"); csvBox8.add(jlbLocationLabel); csvBox8.add(Box.createHorizontalStrut(88)); csvBox8.add(jtxtLocation); csvBox4.add(csvBox8); /* csvBox8.add(Box.createHorizontalStrut(125)); JLabel jlbIDPLocationLabel = new JLabel("Enter the IDP Location"); csvBox8.add(jlbIDPLocationLabel); csvBox8.add(Box.createHorizontalStrut(120)); csvBox8.add(jtxtIDPLocation);*/ csvBox4.add(csvBox8); //Organization name Box csvBox9 = Box.createHorizontalBox(); csvBox9.add(Box.createHorizontalStrut(10)); JLabel jlbOrgNameLabel = new JLabel("Enter the Organization name"); csvBox9.add(jlbOrgNameLabel); csvBox9.add(Box.createHorizontalStrut(65)); csvBox9.add(jtxtOrgName); csvBox4.add(csvBox9); //Hub URL Box csvBox10 = Box.createHorizontalBox(); csvBox10.add(Box.createHorizontalStrut(10)); JLabel jlbhubURLLabel = new JLabel("Enter the Hub URL"); csvBox10.add(jlbhubURLLabel); csvBox10.add(Box.createHorizontalStrut(124)); csvBox10.add(jtxtHubURL); csvBox4.add(csvBox10); Box csvBox12 = Box.createHorizontalBox(); csvBox12.add(Box.createHorizontalStrut(10)); JLabel jlbstudyLookupServiceURLLabel = new JLabel("Enter the StudyLookUpService URL"); csvBox12.add(jlbstudyLookupServiceURLLabel); csvBox12.add(Box.createHorizontalStrut(27)); csvBox12.add(jtxtstudyLookupServiceURL); csvBox4.add(csvBox12); //User Creds Box userBox = Box.createVerticalBox(); userBox.add(Box.createVerticalStrut(10)); userBox.setBorder(BorderFactory.createTitledBorder(blackline, "User Credentials")); Box csvBox11 = Box.createHorizontalBox(); csvBox11.add(Box.createHorizontalStrut(10)); JLabel jlbuserNameLabel = new JLabel("Enter the User Name"); csvBox11.add(jlbuserNameLabel); csvBox11.add(Box.createHorizontalStrut(105)); csvBox11.add(jtxtUserName); userBox.add(csvBox11); csvBox11.add(Box.createHorizontalStrut(10)); JLabel jlbpasswordLabel = new JLabel("Enter the Password"); csvBox11.add(jlbpasswordLabel); csvBox11.add(Box.createHorizontalStrut(10)); csvBox11.add(jtxtPassword); userBox.add(csvBox11); //Button box Box buttonBox = Box.createVerticalBox(); buttonBox.add(Box.createVerticalStrut(10)); Box csvBox7 = Box.createHorizontalBox(); csvBox7.add(Box.createHorizontalStrut(10)); JButton jbnClear = new JButton("Clear"); jbnClear.addActionListener(this); jbnClear.setActionCommand("Clear"); csvBox7.setAlignmentX(Component.CENTER_ALIGNMENT); csvBox7.add(jbnClear); JButton jbnAccept = new JButton("Accept"); jbnAccept.addActionListener(this); jbnAccept.setActionCommand("Accept"); csvBox7.add(jbnAccept); JButton jbnClose = new JButton("Close"); jbnClose.addActionListener(this); jbnClose.setActionCommand("Close"); csvBox7.setAlignmentX(Component.CENTER_ALIGNMENT); csvBox7.add(jbnClose); buttonBox.add(csvBox7); JLabel jlbStar = new JLabel(star); jlbStar.setForeground(Color.RED); jlbStar.setFont(new Font("Ariel", Font.ITALIC, 12)); jlbStar.setAlignmentX(Component.CENTER_ALIGNMENT); buttonBox.add(jlbStar); jplPanel.add(topBox); jplPanel.add(csvBox); jplPanel.add(HL7V2Box); jplPanel.add(csvBox4); jplPanel.add(userBox); jplPanel.add(buttonBox); return jplPanel; } /** * Creates the Status Panel * @return jplPanel */ protected JPanel createStatusPanel() { dispList = new JList(msgDispBox); dispList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); dispList.setSelectedIndex(0); dispList.setFont(new Font("Ariel", Font.PLAIN, 15)); dispList.setForeground(Color.BLUE); JScrollPane listScrollPane = new JScrollPane(dispList); JPanel jplPanel = new JPanel(); jplPanel.setLayout(new GridLayout(1, 1)); jplPanel.add(listScrollPane); return jplPanel; } /** * @param args */ public static void main(String[] args) { TestCancerCenterClientUI testUI = new TestCancerCenterClientUI(); } /** * actionPerformed is called whenever an action is executed on the swing * interface. * <P> * @param e The ActionEvent that was fired */ public void actionPerformed(ActionEvent e) { if ("BrowseCSVFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the CVS file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { csvDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtCSVDir.setText(csvDirectory); } } else if ("BrowseMAPFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the MAP file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { mapDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtMAPDir.setText(mapDirectory); } } else if ("BrowseMAPHL7V2File".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the HL7V2 MAP file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { hl7v2mapDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtMAPHL7V2Dir.setText(hl7v2mapDirectory); } } else if ("BrowseHL7V2File".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the HL7V2 file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { hl7v2Directory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtHL7V2Dir.setText(hl7v2Directory); } } else if ("BrowseProcessedDir".equals(e.getActionCommand())) { msgDispBox .addElement("Selecting the directory for Processed files"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { processedDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtProcessedFilesDir.setText(processedDirectory); } } else if ("BrowseInProcessDir".equals(e.getActionCommand())) { msgDispBox .addElement("Selecting the directory for In Processed files"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { inProcessDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtInProcessFilesDir.setText(inProcessDirectory); } } else if ("BrowsePreProcessedFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the Pre Processed property file"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { preProcessorFile = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtpreProcessorProFile.setText(preProcessorFile); } } else if ("Clear".equals(e.getActionCommand())) { jtxtHL7V2Dir.setText(""); jtxtCSVDir.setText(""); jtxtMAPDir.setText(""); jtxtPollingInterval.setText(""); jtxtProcessedFilesDir.setText(""); jtxtMAPHL7V2Dir.setText(""); jtxtInProcessFilesDir.setText(""); jtxtInitialInterval.setText(""); jtxtLocation.setText(""); //jtxtIDPLocation.setText(""); jtxtOrgName.setText(""); jtxtUserName.setText(""); jtxtPassword.setText(""); jtxtHubURL.setText(""); jtxtpreProcessorProFile.setText(""); jtxtstudyLookupServiceURL.setText(""); }else if ("Close".equals(e.getActionCommand())) { stopThreads(); System.exit(0); }else if ("Accept".equals(e.getActionCommand())) { msgDispBox.addElement("Saving the selection"); File file = saveDefaults(); CancerCenterClient.getInstance().test(file,threadsList); //updating the status panel with the log file updates. int delay = 10; //milliseconds try { FileReader fipStream = new FileReader( "../log/CancerCenterClient.log"); buffReader = new BufferedReader(fipStream); ActionListener taskPerformer = new ActionListener() { public synchronized void actionPerformed(ActionEvent evt) { try{ while (buffReader.readLine() != null) { msgDispBox.addElement(buffReader.readLine()); } } catch (IOException e2) { logger.error("IOException" + e2.getLocalizedMessage()); } } }; new Timer(delay, taskPerformer).start(); }catch (FileNotFoundException e1) { logger .error("FileNotFoundException" + e1.getLocalizedMessage()); } } } /** * Updates the UI with the path of the browsed directory * @return fileChooser */ private JFileChooser fileChoose() { JFileChooser fileChooser = new JFileChooser() { public void updateUI() { putClientProperty("FileChooser.useShellFolder", Boolean.FALSE); super.updateUI(); } }; return fileChooser; } /** * Replaces the file separator from '\' to'/' for the user selected directories. * @param path * @return placeholder */ private String fileSeparatorReplace(String path) { String placeholder; placeholder = path.replace('\\', '/'); return placeholder; } /** * Creates the inProcessFolder, rawFilesBackUpFolder, errorFolder in the user selected * inProcess Directory * @return created */ private boolean createdInProcessFolders() { boolean created = true; inProcessFolder = new File(inProcessDirectory + "/inProcessFolder"); if (!inProcessFolder.exists()) created = inProcessFolder.mkdir(); rawFilesBackupFolder = new File(inProcessDirectory + "/rawFilesBackupFolder"); if (!rawFilesBackupFolder.exists()) created = rawFilesBackupFolder.mkdir(); errorFolder = new File(inProcessDirectory + "/errorFolder"); if (!errorFolder.exists()) created = errorFolder.mkdir(); return created; } /** * Saves the user selected values into DefaultProperties.properties file. * @return file DefaultProperties file */ private File saveDefaults() { String path =System.getProperty("DefaultProperties.File"); File file = new File(path); String txtVersion =(String)jcomboBoxVersion.getSelectedItem(); try{ if (createdInProcessFolders()) { FileWriter fstream = new FileWriter(file,false); fstream.write("rawFilesFolder=" + csvDirectory); fstream.write("\n"); fstream.write("mapFileName=" + mapDirectory); fstream.write("\n"); fstream.write("HL7V2Dir=" + hl7v2Directory); fstream.write("\n"); fstream.write("hl7v2mapFileName=" + hl7v2mapDirectory); fstream.write("\n"); fstream.write("inProcessDirectory=" + inProcessDirectory); fstream.write("\n"); fstream.write("inProcessFolder=" + inProcessFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("rawFilesBackupFolder=" + rawFilesBackupFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("errorFolder=" + errorFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("processedFolder=" + processedDirectory); fstream.write("\n"); fstream.write("pollingDelayInSeconds=" + jtxtPollingInterval.getText()); fstream.write("\n"); fstream.write("initialDelayInSeconds=" + jtxtInitialInterval.getText()); fstream.write("\n"); fstream.write("Location=" + jtxtLocation.getText()); fstream.write("\n"); fstream.write("V2Version=" + txtVersion); fstream.write("\n"); fstream.write("ORGANIZATIONNAME=" + jtxtOrgName.getText()); fstream.write("\n"); fstream.write("userName=" + jtxtUserName.getText()); fstream.write("\n"); fstream.write("userPassword=" + jtxtPassword.getText()); fstream.write("\n"); fstream.write("HubURL=" + jtxtHubURL.getText()); fstream.write("\n"); fstream.write("StudyLookUpServiceURL=" + jtxtstudyLookupServiceURL.getText()); fstream.write("\n"); fstream.write("preProcessorPropertiesFile=" + preProcessorFile); fstream.write("\n"); fstream.flush(); fstream.close(); } } catch (IOException e) { logger.error("File not found" + e.getLocalizedMessage()); } return file; } /** * Stops the threads which are polling the directories to check for a new * .CSV file or a HL7V2 message */ private void stopThreads() { if(!threadsList.isEmpty()){ for(ScheduledExecutorService se: threadsList) { se.shutdownNow(); } } } }
CancerCenterClient/src/java/main/gov/nih/nci/caxchange/client/TestCancerCenterClientUI.java
package gov.nih.nci.caxchange.client; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Font; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JPasswordField; import javax.swing.JScrollPane; import javax.swing.JTabbedPane; import javax.swing.JTextField; import javax.swing.ListSelectionModel; import javax.swing.Timer; import javax.swing.WindowConstants; import javax.swing.border.Border; import org.apache.log4j.Logger; /** * @author asharma * */ public class TestCancerCenterClientUI extends JPanel implements ActionListener { private static final long serialVersionUID = 1L; private String csvDirectory; private String hl7v2Directory; private String mapDirectory; private String hl7v2mapDirectory; private String processedDirectory; private String inProcessDirectory; private String preProcessorFile; private File inProcessFolder; private File rawFilesBackupFolder; private File errorFolder; private static JFrame aWindow = new JFrame("Cancer Center Hub Client (CCHC)"); private JTextField jtxtHL7V2Dir = new JTextField(); private JTextField jtxtProcessedFilesDir = new JTextField(); private JTextField jtxtCSVDir = new JTextField(); private JTextField jtxtMAPDir = new JTextField(); private JTextField jtxtMAPHL7V2Dir = new JTextField(); private JTextField jtxtInProcessFilesDir = new JTextField(); private JTextField jtxtPollingInterval = new JTextField(); private JTextField jtxtInitialInterval = new JTextField(); private JTextField jtxtLocation = new JTextField(); //private JTextField jtxtIDPLocation = new JTextField(); private JTextField jtxtOrgName = new JTextField(); private JTextField jtxtUserName = new JTextField(); private JTextField jtxtPassword = new JPasswordField(); private JTextField jtxtHubURL = new JTextField(); private JTextField jtxtstudyLookupServiceURL = new JTextField(); private JTextField jtxtpreProcessorProFile = new JTextField(); private DefaultListModel msgDispBox = new DefaultListModel(); private JList dispList = new JList(); private static Logger logger = Logger .getLogger("gov.nih.nci.caxchange.client.TestCancerCenterClientUI"); private BufferedReader buffReader; private Border blackline = BorderFactory.createLineBorder(Color.black); private final ArrayList<ScheduledExecutorService>threadsList=new ArrayList<ScheduledExecutorService>(); /** * Default constructor. */ public TestCancerCenterClientUI() { init(); aWindow.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); aWindow.addWindowListener(new WindowAdapter() { public void windowClosed(WindowEvent e) { stopThreads(); System.exit(0); } }); msgDispBox.addElement("Progress Shown Here"); //Add the tabs to the frame JTabbedPane jtab = new JTabbedPane(); JPanel jpanelMain = createMainPanel(); jtab.addTab("Main", jpanelMain); JPanel jpanelStatus = createStatusPanel(); jtab.addTab("Status", jpanelStatus); aWindow.getContentPane().add(jtab, BorderLayout.CENTER); aWindow.setSize(950, 650); aWindow.setVisible(true); } /** * The init() method is used to read the default properties file and initialize the * input text fields with the user selected preferences. If the Tool is run for the * first time; the text fields are blank. */ public void init() { try { /*InputStream is = TestCancerCenterClientUI.class.getClassLoader(). getResourceAsStream("./properties/DefaultProperties.properties"); */ String path = System.getProperty("DefaultProperties.File"); FileInputStream is = new FileInputStream(new File(path)); if (is!=null) { Properties props = new Properties(); //Read in the stored properties props.load(is); jtxtHL7V2Dir.setText(props.getProperty("HL7V2Dir")); jtxtCSVDir.setText(props.getProperty("rawFilesFolder")); jtxtMAPDir.setText(props.getProperty("mapFileName")); jtxtPollingInterval.setText(props .getProperty("pollingDelayInSeconds")); jtxtProcessedFilesDir.setText(props .getProperty("processedFolder")); jtxtMAPHL7V2Dir.setText(props.getProperty("hl7v2mapFileName")); jtxtInProcessFilesDir.setText(props .getProperty("inProcessDirectory")); jtxtInitialInterval.setText(props .getProperty("initialDelayInSeconds")); jtxtLocation.setText(props.getProperty("Location")); //jtxtIDPLocation.setText(props.getProperty("IDPLocation")); jtxtOrgName.setText(props.getProperty("ORGANIZATIONNAME")); jtxtUserName.setText(props.getProperty("userName")); jtxtPassword.setText(props.getProperty("userPasswd")); jtxtHubURL.setText(props.getProperty("HubURL")); jtxtstudyLookupServiceURL.setText(props.getProperty("StudyLookUpServiceURL")); jtxtpreProcessorProFile.setText(props .getProperty("preProcessorPropertiesFile")); csvDirectory = jtxtCSVDir.getText(); inProcessDirectory = jtxtInProcessFilesDir.getText(); mapDirectory = jtxtMAPDir.getText(); hl7v2Directory = jtxtHL7V2Dir.getText(); hl7v2mapDirectory = jtxtMAPHL7V2Dir.getText(); processedDirectory = jtxtProcessedFilesDir.getText(); preProcessorFile = jtxtpreProcessorProFile.getText(); } } catch (Exception e) { logger.error("Exception processing Cancer Center Properties File"); } } /** * Creates the Main Panel * @return jplPanel */ protected JPanel createMainPanel() { //Heading display box String text = "Accepts and saves the user entered values to perform"; String text1 = "the csv to HL7V3 and HL7V2 to HL7V3 conversion"; String star = "* Please select/enter all the form fields"; JPanel jplPanel = new JPanel(); jplPanel.setLayout(new BoxLayout(jplPanel, BoxLayout.PAGE_AXIS)); JLabel jlbDisplay = new JLabel(text); JLabel jlbDisplay1 = new JLabel(text1); jlbDisplay.setForeground(Color.BLUE); jlbDisplay1.setForeground(Color.BLUE); jlbDisplay.setFont(new Font("Serif", Font.BOLD, 15)); jlbDisplay1.setFont(new Font("Serif", Font.BOLD, 15)); Box topBox = Box.createVerticalBox(); topBox.add(Box.createVerticalStrut(10)); topBox.add(jlbDisplay); topBox.add(jlbDisplay1); Box csvBox = Box.createVerticalBox(); csvBox.setBorder(BorderFactory.createTitledBorder(blackline, "CSV")); csvBox.add(Box.createVerticalStrut(10)); //add the csv, map & HL7V3 file directory selection Box csvBox1 = Box.createHorizontalBox(); csvBox1.add(Box.createHorizontalStrut(10)); JLabel jlbCSVLabel = new JLabel("Select the raw files directory"); csvBox1.add(jlbCSVLabel); csvBox1.add(Box.createHorizontalStrut(10)); csvBox1.add(jtxtCSVDir); JButton jfileCSV = new JButton("Browse..."); jfileCSV.addActionListener(this); jfileCSV.setActionCommand("BrowseCSVFile"); jtxtCSVDir.add(jfileCSV); csvBox1.add(jfileCSV); csvBox.add(csvBox1); Box csvBox2 = Box.createHorizontalBox(); csvBox2.add(Box.createHorizontalStrut(10)); JLabel jlbMAPLabel = new JLabel("Select the MAP file"); csvBox2.add(jlbMAPLabel); csvBox2.add(Box.createHorizontalStrut(67)); csvBox2.add(jtxtMAPDir); JButton jfileMAP = new JButton("Browse..."); jfileMAP.addActionListener(this); jfileMAP.setActionCommand("BrowseMAPFile"); jtxtMAPDir.add(jfileMAP); csvBox2.add(jfileMAP); csvBox.add(csvBox2); //HL7V2 box Box HL7V2Box = Box.createVerticalBox(); HL7V2Box .setBorder(BorderFactory.createTitledBorder(blackline, "HL7V2")); HL7V2Box.add(Box.createVerticalStrut(10)); Box csvBox3 = Box.createHorizontalBox(); csvBox3.add(Box.createHorizontalStrut(10)); JLabel jlbHL7V2Label = new JLabel("Select the HL7V2 directory"); csvBox3.add(jlbHL7V2Label); csvBox3.add(Box.createHorizontalStrut(15)); csvBox3.add(jtxtHL7V2Dir); JButton jfileHL7V2 = new JButton("Browse..."); jfileHL7V2.addActionListener(this); jfileHL7V2.setActionCommand("BrowseHL7V2File"); jtxtHL7V2Dir.add(jfileHL7V2); csvBox3.add(jfileHL7V2); HL7V2Box.add(csvBox3); Box csvBox6 = Box.createHorizontalBox(); csvBox6.add(Box.createHorizontalStrut(10)); JLabel jlbMAPHL7V2Label = new JLabel("Select the MAP file"); csvBox6.add(jlbMAPHL7V2Label); csvBox6.add(Box.createHorizontalStrut(60)); csvBox6.add(jtxtMAPHL7V2Dir); JButton jfileMAPHL7V2 = new JButton("Browse..."); jfileMAPHL7V2.addActionListener(this); jfileMAPHL7V2.setActionCommand("BrowseMAPHL7V2File"); jtxtMAPHL7V2Dir.add(jfileMAPHL7V2); csvBox6.add(jfileMAPHL7V2); HL7V2Box.add(csvBox6); //1. Processed files & In process directory 2. polling delay & Initial delay Box csvBox4 = Box.createVerticalBox(); csvBox4.add(Box.createVerticalStrut(10)); csvBox4.setBorder(BorderFactory.createTitledBorder(blackline, "Common Settings")); JLabel jlbPreProcessedLabel = new JLabel( "Select the Pre Processed Property file"); Box secondBox = Box.createHorizontalBox(); secondBox.add(Box.createHorizontalStrut(10)); secondBox.add(jlbPreProcessedLabel); secondBox.add(Box.createHorizontalStrut(10)); secondBox.add(jtxtpreProcessorProFile); JButton jfilePreProcessed = new JButton("Browse..."); jfilePreProcessed.addActionListener(this); jfilePreProcessed.setActionCommand("BrowsePreProcessedFile"); jtxtpreProcessorProFile.add(jfilePreProcessed); secondBox.add(jfilePreProcessed); csvBox4.add(secondBox); JLabel jlbProcessedLabel = new JLabel( "Select the Processed file(s) directory"); Box thirdBox = Box.createHorizontalBox(); thirdBox.add(Box.createHorizontalStrut(10)); thirdBox.add(jlbProcessedLabel); thirdBox.add(Box.createHorizontalStrut(15)); thirdBox.add(jtxtProcessedFilesDir); JButton jfileProcessed = new JButton("Browse..."); jfileProcessed.addActionListener(this); jfileProcessed.setActionCommand("BrowseProcessedDir"); jtxtProcessedFilesDir.add(jfileProcessed); thirdBox.add(jfileProcessed); csvBox4.add(thirdBox); JLabel jlbInProcessLabel = new JLabel( "Select the InProcess file(s) directory"); Box fourthBox = Box.createHorizontalBox(); fourthBox.add(Box.createHorizontalStrut(10)); fourthBox.add(jlbInProcessLabel); fourthBox.add(Box.createHorizontalStrut(19)); fourthBox.add(jtxtInProcessFilesDir); JButton jfileInProcess = new JButton("Browse..."); jfileInProcess.addActionListener(this); jfileInProcess.setActionCommand("BrowseInProcessDir"); jtxtInProcessFilesDir.add(jfileInProcess); fourthBox.add(jfileInProcess); csvBox4.add(fourthBox); Box csvBox5 = Box.createHorizontalBox(); csvBox5.add(Box.createHorizontalStrut(10)); JLabel jlbPollingLabel = new JLabel( "Enter the polling delay in seconds"); csvBox5.add(jlbPollingLabel); csvBox5.add(Box.createHorizontalStrut(37)); jtxtPollingInterval.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { JTextField textField = (JTextField) e.getSource(); String content = textField.getText(); if (content.length() != 0) { try { Integer.parseInt(content); } catch (NumberFormatException nfe) { getToolkit().beep(); textField.requestFocus(); } } } }); csvBox5.add(jtxtPollingInterval); csvBox4.add(csvBox5); csvBox5.add(Box.createHorizontalStrut(75)); JLabel jlbInitialLabel = new JLabel( "Enter the Initial delay in seconds"); csvBox5.add(jlbInitialLabel); csvBox5.add(Box.createHorizontalStrut(19)); jtxtInitialInterval.addFocusListener(new FocusAdapter() { public void focusLost(FocusEvent e) { JTextField textField = (JTextField) e.getSource(); String content = textField.getText(); if (content.length() != 0) { try { Integer.parseInt(content); } catch (NumberFormatException nfe) { getToolkit().beep(); textField.requestFocus(); } } } }); csvBox5.add(jtxtInitialInterval); csvBox4.add(csvBox5); //Location and IDPLocation Box csvBox8 = Box.createHorizontalBox(); csvBox8.add(Box.createHorizontalStrut(10)); JLabel jlbLocationLabel = new JLabel("Enter the Location name"); csvBox8.add(jlbLocationLabel); csvBox8.add(Box.createHorizontalStrut(88)); csvBox8.add(jtxtLocation); csvBox4.add(csvBox8); /* csvBox8.add(Box.createHorizontalStrut(125)); JLabel jlbIDPLocationLabel = new JLabel("Enter the IDP Location"); csvBox8.add(jlbIDPLocationLabel); csvBox8.add(Box.createHorizontalStrut(120)); csvBox8.add(jtxtIDPLocation);*/ csvBox4.add(csvBox8); //Organization name Box csvBox9 = Box.createHorizontalBox(); csvBox9.add(Box.createHorizontalStrut(10)); JLabel jlbOrgNameLabel = new JLabel("Enter the Organization name"); csvBox9.add(jlbOrgNameLabel); csvBox9.add(Box.createHorizontalStrut(65)); csvBox9.add(jtxtOrgName); csvBox4.add(csvBox9); //Hub URL Box csvBox10 = Box.createHorizontalBox(); csvBox10.add(Box.createHorizontalStrut(10)); JLabel jlbhubURLLabel = new JLabel("Enter the Hub URL"); csvBox10.add(jlbhubURLLabel); csvBox10.add(Box.createHorizontalStrut(124)); csvBox10.add(jtxtHubURL); csvBox4.add(csvBox10); Box csvBox12 = Box.createHorizontalBox(); csvBox12.add(Box.createHorizontalStrut(10)); JLabel jlbstudyLookupServiceURLLabel = new JLabel("Enter the StudyLookUpService URL"); csvBox12.add(jlbstudyLookupServiceURLLabel); csvBox12.add(Box.createHorizontalStrut(27)); csvBox12.add(jtxtstudyLookupServiceURL); csvBox4.add(csvBox12); //User Creds Box userBox = Box.createVerticalBox(); userBox.add(Box.createVerticalStrut(10)); userBox.setBorder(BorderFactory.createTitledBorder(blackline, "User Credentials")); Box csvBox11 = Box.createHorizontalBox(); csvBox11.add(Box.createHorizontalStrut(10)); JLabel jlbuserNameLabel = new JLabel("Enter the User Name"); csvBox11.add(jlbuserNameLabel); csvBox11.add(Box.createHorizontalStrut(105)); csvBox11.add(jtxtUserName); userBox.add(csvBox11); csvBox11.add(Box.createHorizontalStrut(10)); JLabel jlbpasswordLabel = new JLabel("Enter the Password"); csvBox11.add(jlbpasswordLabel); csvBox11.add(Box.createHorizontalStrut(10)); csvBox11.add(jtxtPassword); userBox.add(csvBox11); //Button box Box buttonBox = Box.createVerticalBox(); buttonBox.add(Box.createVerticalStrut(10)); Box csvBox7 = Box.createHorizontalBox(); csvBox7.add(Box.createHorizontalStrut(10)); JButton jbnClear = new JButton("Clear"); jbnClear.addActionListener(this); jbnClear.setActionCommand("Clear"); csvBox7.setAlignmentX(Component.CENTER_ALIGNMENT); csvBox7.add(jbnClear); JButton jbnAccept = new JButton("Accept"); jbnAccept.addActionListener(this); jbnAccept.setActionCommand("Accept"); csvBox7.add(jbnAccept); JButton jbnClose = new JButton("Close"); jbnClose.addActionListener(this); jbnClose.setActionCommand("Close"); csvBox7.setAlignmentX(Component.CENTER_ALIGNMENT); csvBox7.add(jbnClose); buttonBox.add(csvBox7); JLabel jlbStar = new JLabel(star); jlbStar.setForeground(Color.RED); jlbStar.setFont(new Font("Ariel", Font.ITALIC, 12)); jlbStar.setAlignmentX(Component.CENTER_ALIGNMENT); buttonBox.add(jlbStar); jplPanel.add(topBox); jplPanel.add(csvBox); jplPanel.add(HL7V2Box); jplPanel.add(csvBox4); jplPanel.add(userBox); jplPanel.add(buttonBox); return jplPanel; } /** * Creates the Status Panel * @return jplPanel */ protected JPanel createStatusPanel() { dispList = new JList(msgDispBox); dispList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); dispList.setSelectedIndex(0); dispList.setFont(new Font("Ariel", Font.PLAIN, 15)); dispList.setForeground(Color.BLUE); JScrollPane listScrollPane = new JScrollPane(dispList); JPanel jplPanel = new JPanel(); jplPanel.setLayout(new GridLayout(1, 1)); jplPanel.add(listScrollPane); return jplPanel; } /** * @param args */ public static void main(String[] args) { TestCancerCenterClientUI testUI = new TestCancerCenterClientUI(); } /** * actionPerformed is called whenever an action is executed on the swing * interface. * <P> * @param e The ActionEvent that was fired */ public void actionPerformed(ActionEvent e) { if ("BrowseCSVFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the CVS file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { csvDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtCSVDir.setText(csvDirectory); } } else if ("BrowseMAPFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the MAP file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { mapDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtMAPDir.setText(mapDirectory); } } else if ("BrowseMAPHL7V2File".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the HL7V2 MAP file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { hl7v2mapDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtMAPHL7V2Dir.setText(hl7v2mapDirectory); } } else if ("BrowseHL7V2File".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the HL7V2 file directory"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { hl7v2Directory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtHL7V2Dir.setText(hl7v2Directory); } } else if ("BrowseProcessedDir".equals(e.getActionCommand())) { msgDispBox .addElement("Selecting the directory for Processed files"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { processedDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtProcessedFilesDir.setText(processedDirectory); } } else if ("BrowseInProcessDir".equals(e.getActionCommand())) { msgDispBox .addElement("Selecting the directory for In Processed files"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { inProcessDirectory = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtInProcessFilesDir.setText(inProcessDirectory); } } else if ("BrowsePreProcessedFile".equals(e.getActionCommand())) { msgDispBox.addElement("Selecting the Pre Processed property file"); JFileChooser fileChooser = fileChoose(); fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY); int result = fileChooser.showSaveDialog(this); if (result != JFileChooser.CANCEL_OPTION) { preProcessorFile = fileSeparatorReplace(fileChooser .getSelectedFile().getAbsolutePath()); jtxtpreProcessorProFile.setText(preProcessorFile); } } else if ("Clear".equals(e.getActionCommand())) { jtxtHL7V2Dir.setText(""); jtxtCSVDir.setText(""); jtxtMAPDir.setText(""); jtxtPollingInterval.setText(""); jtxtProcessedFilesDir.setText(""); jtxtMAPHL7V2Dir.setText(""); jtxtInProcessFilesDir.setText(""); jtxtInitialInterval.setText(""); jtxtLocation.setText(""); //jtxtIDPLocation.setText(""); jtxtOrgName.setText(""); jtxtUserName.setText(""); jtxtPassword.setText(""); jtxtHubURL.setText(""); jtxtpreProcessorProFile.setText(""); jtxtstudyLookupServiceURL.setText(""); }else if ("Close".equals(e.getActionCommand())) { stopThreads(); System.exit(0); }else if ("Accept".equals(e.getActionCommand())) { msgDispBox.addElement("Saving the selection"); File file = saveDefaults(); CancerCenterClient.getInstance().test(file,threadsList); //updating the status panel with the log file updates. int delay = 10; //milliseconds try { FileReader fipStream = new FileReader( "../log/CancerCenterClient.log"); buffReader = new BufferedReader(fipStream); ActionListener taskPerformer = new ActionListener() { public synchronized void actionPerformed(ActionEvent evt) { try{ while (buffReader.readLine() != null) { msgDispBox.addElement(buffReader.readLine()); } } catch (IOException e2) { logger.error("IOException" + e2.getLocalizedMessage()); } } }; new Timer(delay, taskPerformer).start(); }catch (FileNotFoundException e1) { logger .error("FileNotFoundException" + e1.getLocalizedMessage()); } } } /** * Updates the UI with the path of the browsed directory * @return fileChooser */ private JFileChooser fileChoose() { JFileChooser fileChooser = new JFileChooser() { public void updateUI() { putClientProperty("FileChooser.useShellFolder", Boolean.FALSE); super.updateUI(); } }; return fileChooser; } /** * Replaces the file separator from '\' to'/' for the user selected directories. * @param path * @return placeholder */ private String fileSeparatorReplace(String path) { String placeholder; placeholder = path.replace('\\', '/'); return placeholder; } /** * Creates the inProcessFolder, rawFilesBackUpFolder, errorFolder in the user selected * inProcess Directory * @return created */ private boolean createdInProcessFolders() { boolean created = true; inProcessFolder = new File(inProcessDirectory + "/inProcessFolder"); if (!inProcessFolder.exists()) created = inProcessFolder.mkdir(); rawFilesBackupFolder = new File(inProcessDirectory + "/rawFilesBackupFolder"); if (!rawFilesBackupFolder.exists()) created = rawFilesBackupFolder.mkdir(); errorFolder = new File(inProcessDirectory + "/errorFolder"); if (!errorFolder.exists()) created = errorFolder.mkdir(); return created; } /** * Saves the user selected values into DefaultProperties.properties file. * @return file DefaultProperties file */ private File saveDefaults() { String path =System.getProperty("DefaultProperties.File"); File file = new File(path); try{ if (createdInProcessFolders()) { FileWriter fstream = new FileWriter(file,false); fstream.write("rawFilesFolder=" + csvDirectory); fstream.write("\n"); fstream.write("mapFileName=" + mapDirectory); fstream.write("\n"); fstream.write("HL7V2Dir=" + hl7v2Directory); fstream.write("\n"); fstream.write("hl7v2mapFileName=" + hl7v2mapDirectory); fstream.write("\n"); fstream.write("inProcessDirectory=" + inProcessDirectory); fstream.write("\n"); fstream.write("inProcessFolder=" + inProcessFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("rawFilesBackupFolder=" + rawFilesBackupFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("errorFolder=" + errorFolder.getAbsolutePath().replace('\\', '/') + "/"); fstream.write("\n"); fstream.write("processedFolder=" + processedDirectory); fstream.write("\n"); fstream.write("pollingDelayInSeconds=" + jtxtPollingInterval.getText()); fstream.write("\n"); fstream.write("initialDelayInSeconds=" + jtxtInitialInterval.getText()); fstream.write("\n"); fstream.write("Location=" + jtxtLocation.getText()); fstream.write("\n"); /*fstream.write("IDPLocation=" + jtxtIDPLocation.getText()); fstream.write("\n");*/ fstream.write("ORGANIZATIONNAME=" + jtxtOrgName.getText()); fstream.write("\n"); fstream.write("userName=" + jtxtUserName.getText()); fstream.write("\n"); fstream.write("userPassword=" + jtxtPassword.getText()); fstream.write("\n"); fstream.write("HubURL=" + jtxtHubURL.getText()); fstream.write("\n"); fstream.write("StudyLookUpServiceURL=" + jtxtstudyLookupServiceURL.getText()); fstream.write("\n"); fstream.write("preProcessorPropertiesFile=" + preProcessorFile); fstream.write("\n"); fstream.flush(); fstream.close(); } } catch (IOException e) { logger.error("File not found" + e.getLocalizedMessage()); } return file; } /** * Stops the threads which are polling the directories to check for a new * .CSV file or a HL7V2 message */ private void stopThreads() { if(!threadsList.isEmpty()){ for(ScheduledExecutorService se: threadsList) { se.shutdownNow(); } } } }
added version combo box SVN-Revision: 519
CancerCenterClient/src/java/main/gov/nih/nci/caxchange/client/TestCancerCenterClientUI.java
added version combo box
<ide><path>ancerCenterClient/src/java/main/gov/nih/nci/caxchange/client/TestCancerCenterClientUI.java <ide> import javax.swing.BoxLayout; <ide> import javax.swing.DefaultListModel; <ide> import javax.swing.JButton; <add>import javax.swing.JComboBox; <ide> import javax.swing.JFileChooser; <ide> import javax.swing.JFrame; <ide> import javax.swing.JLabel; <ide> private JTextField jtxtstudyLookupServiceURL = new JTextField(); <ide> private JTextField jtxtpreProcessorProFile = new JTextField(); <ide> private DefaultListModel msgDispBox = new DefaultListModel(); <add> private String[] version = {"2.2","2.3","2.4","2.5"}; <add> private JComboBox jcomboBoxVersion=new JComboBox(version); <ide> private JList dispList = new JList(); <ide> private static Logger logger = Logger <ide> .getLogger("gov.nih.nci.caxchange.client.TestCancerCenterClientUI"); <ide> jtxtHubURL.setText(props.getProperty("HubURL")); <ide> jtxtstudyLookupServiceURL.setText(props.getProperty("StudyLookUpServiceURL")); <ide> jtxtpreProcessorProFile.setText(props <del> .getProperty("preProcessorPropertiesFile")); <del> <add> .getProperty("preProcessorPropertiesFile")); <add> jcomboBoxVersion.setSelectedItem(props.getProperty("V2Version")); <ide> csvDirectory = jtxtCSVDir.getText(); <ide> inProcessDirectory = jtxtInProcessFilesDir.getText(); <ide> mapDirectory = jtxtMAPDir.getText(); <ide> preProcessorFile = jtxtpreProcessorProFile.getText(); <ide> } <ide> } catch (Exception e) { <del> logger.error("Exception processing Cancer Center Properties File"); <add> logger.error("Exception processing Cancer Center Properties File"+e); <ide> } <ide> } <ide> <ide> jtxtMAPHL7V2Dir.add(jfileMAPHL7V2); <ide> csvBox6.add(jfileMAPHL7V2); <ide> HL7V2Box.add(csvBox6); <add> <add> Box csvBox13 = Box.createHorizontalBox(); <add> csvBox13.add(Box.createHorizontalStrut(10)); <add> JLabel jlbVersionLabel = new JLabel("Select the Version"); <add> csvBox13.add(jlbVersionLabel); <add> csvBox13.add(Box.createHorizontalStrut(60)); <add> jcomboBoxVersion.addActionListener(this); <add> csvBox13.add(jcomboBoxVersion); <add> HL7V2Box.add(csvBox13); <ide> <ide> //1. Processed files & In process directory 2. polling delay & Initial delay <ide> Box csvBox4 = Box.createVerticalBox(); <ide> private File saveDefaults() { <ide> String path =System.getProperty("DefaultProperties.File"); <ide> File file = new File(path); <add> String txtVersion =(String)jcomboBoxVersion.getSelectedItem(); <ide> try{ <ide> if (createdInProcessFolders()) { <ide> FileWriter fstream = new FileWriter(file,false); <ide> fstream.write("\n"); <ide> fstream.write("Location=" + jtxtLocation.getText()); <ide> fstream.write("\n"); <del> /*fstream.write("IDPLocation=" + jtxtIDPLocation.getText()); <del> fstream.write("\n");*/ <add> fstream.write("V2Version=" + txtVersion); <add> fstream.write("\n"); <ide> fstream.write("ORGANIZATIONNAME=" + jtxtOrgName.getText()); <ide> fstream.write("\n"); <ide> fstream.write("userName=" + jtxtUserName.getText());
JavaScript
mit
00d17eb056f618d2f7030fb57bcbf4acc6f4011c
0
bramakrishnan/hello.js,sjungwirth/hello.js,hubba/hello.js,osharifali/hello.js,sudheesh001/hello.js,acemaster/hello.js,Hadrien-DELAITRE/hello.js,scorphus/hello.js,artichokes/hello.js,gtalusan/hello.js,webmasteraxe/hello.js,MrSwitch/hello.js,HynesIP/hello.js,skion/hello.js,skion/hello.js,acemaster/hello.js,Gnolam/hello.js,flybaseio/hello.js,MrSwitch/hello.js,webmasteraxe/hello.js,MrSwitch/hello.js,Hadrien-DELAITRE/hello.js,HynesIP/hello.js,SoSocio/hello.js,rkorn86/hello.js,osharifali/hello.js,gtalusan/hello.js,sjungwirth/hello.js,sjungwirth/hello.js,nishant8BITS/hello.js,SoSocio/hello.js,vasken/hello.js,bramakrishnan/hello.js,SoSocio/hello.js,kelonye/hello,rkorn86/hello.js,scorphus/hello.js,RichardLitt/hello.js,flybaseio/hello.js,rkorn86/hello.js,nishant8BITS/hello.js,RichardLitt/hello.js,artichokes/hello.js,vasken/hello.js,hubba/hello.js,flybaseio/hello.js,Gnolam/hello.js,artichokes/hello.js,KaranSofat/hello.js,sudheesh001/hello.js,KaranSofat/hello.js
// // GOOGLE API // (function(){ // Format // Ensure each record contains a name, id etc. function formatItem(o){ if(o.error){ return; } if(!o.name){ o.name = o.title || o.message; } if(!o.picture){ o.picture = o.thumbnailLink; } if(!o.thumbnail){ o.thumbnail = o.thumbnailLink; } if(o.mimeType === "application/vnd.google-apps.folder"){ o.type = "folder"; o.files = "https://www.googleapis.com/drive/v2/files?q=%22"+o.id+"%22+in+parents"; } } // Google has a horrible JSON API function gEntry(o){ var entry = function(a){ var media = a['media$group']['media$content'].length ? a['media$group']['media$content'][0] : {}; var i=0, _a; var p = { id : a.id.$t, name : a.title.$t, description : a.summary.$t, updated_time : a.updated.$t, created_time : a.published.$t, picture : media ? media.url : null, thumbnail : media ? media.url : null, width : media.width, height : media.height // original : a }; // Get feed/children if("link" in a){ for(i=0;i<a.link.length;i++){ if(a.link[i].rel.match(/\#feed$/)){ p.photos = a.link[i].href; p.files = a.link[i].href; p.upload_location = a.link[i].href; break; } } } // Get images of different scales if('category' in a&&a['category'].length){ _a = a['category']; for(i=0;i<_a.length;i++){ if(_a[i].scheme&&_a[i].scheme.match(/\#kind$/)){ p.type = _a[i].term.replace(/^.*?\#/,''); } } } // Get images of different scales if('media$thumbnail' in a['media$group'] && a['media$group']['media$thumbnail'].length){ _a = a['media$group']['media$thumbnail']; p.thumbnail = a['media$group']['media$thumbnail'][0].url; p.images = []; for(i=0;i<_a.length;i++){ p.images.push({ source : _a[i].url, width : _a[i].width, height : _a[i].height }); } _a = a['media$group']['media$content'].length ? a['media$group']['media$content'][0] : null; if(_a){ p.images.push({ source : _a.url, width : _a.width, height : _a.height }); } } return p; }; var r = []; if("feed" in o && "entry" in o.feed){ for(i=0;i<o.feed.entry.length;i++){ r.push(entry(o.feed.entry[i])); } return { //name : o.feed.title.$t, //updated : o.feed.updated.$t, data : r }; } // Old style, picasa, etc... if( "entry" in o ){ return entry(o.entry); }else if( "items" in o ){ for(var i=0;i<o.items.length;i++){ formatItem( o.items[i] ); } return { data : o.items }; } else{ formatItem( o ); return o; } } // // Embed hello.init({ google : { name : "Google Plus", uri : { // REF: http://code.google.com/apis/accounts/docs/OAuth2UserAgent.html auth : "https://accounts.google.com/o/oauth2/auth", // me : "plus/v1/people/me?pp=1", me : 'oauth2/v1/userinfo?alt=json', base : "https://www.googleapis.com/", 'me/friends' : 'https://www.google.com/m8/feeds/contacts/default/full?alt=json&max-results=1000', 'me/share' : 'plus/v1/people/me/activities/public', 'me/feed' : 'plus/v1/people/me/activities/public', 'me/albums' : 'https://picasaweb.google.com/data/feed/api/user/default?alt=json', 'me/photos' : 'https://picasaweb.google.com/data/feed/api/user/default?alt=json&kind=photo&max-results=100', "me/files" : 'https://www.googleapis.com/drive/v2/files?q=%22root%22+in+parents' }, scope : { //, basic : "https://www.googleapis.com/auth/plus.me https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile", email : '', birthday : '', events : '', photos : 'https://picasaweb.google.com/data/', videos : 'http://gdata.youtube.com', friends : 'https://www.google.com/m8/feeds', files : 'https://www.googleapis.com/auth/drive.readonly', publish : '', publish_files : 'https://www.googleapis.com/auth/drive', create_event : '', offline_access : '' }, scope_delim : ' ', wrap : { me : function(o){ if(o.id){ o.last_name = o.family_name || (o.name? o.name.familyName : null); o.first_name = o.given_name || (o.name? o.name.givenName : null); // o.name = o.first_name + ' ' + o.last_name; o.picture = o.picture || ( o.image ? o.image.url : null); o.thumbnail = o.picture; o.name = o.displayName || o.name; } return o; }, 'me/friends' : function(o){ var r = []; if("feed" in o && "entry" in o.feed){ for(var i=0;i<o.feed.entry.length;i++){ var a = o.feed.entry[i]; r.push({ id : a.id.$t, name : a.title.$t, email : (a.gd$email&&a.gd$email.length>0)?a.gd$email[0].address:null, updated_time : a.updated.$t, picture : (a.link&&a.link.length>0)?a.link[0].href+'?access_token='+hello.getAuthResponse('google').access_token:null, thumbnail : (a.link&&a.link.length>0)?a.link[0].href+'?access_token='+hello.getAuthResponse('google').access_token:null }); } return { //name : o.feed.title.$t, //updated : o.feed.updated.$t, data : r }; } return o; }, 'me/share' : function(o){ o.data = o.items; try{ delete o.items; }catch(e){ o.items = null; } return o; }, 'me/feed' : function(o){ o.data = o.items; try{ delete o.items; }catch(e){ o.items = null; } return o; }, 'me/albums' : gEntry, 'me/photos' : gEntry, 'default' : gEntry }, xhr : function(p){ if(p.method==='post'){ return false; } return true; } } }); })();
src/modules/google.js
// // GOOGLE API // (function(){ // Format // Ensure each record contains a name, id etc. function formatItem(o){ if(o.error){ return; } if(!o.name){ o.name = o.title || o.message; } if(!o.picture){ o.picture = o.thumbnailLink; } if(!o.thumbnail){ o.thumbnail = o.thumbnailLink; } if(o.mimeType === "application/vnd.google-apps.folder"){ o.type = "folder"; o.files = "https://www.googleapis.com/drive/v2/files?q=%22"+o.id+"%22+in+parents"; } } // Google has a horrible JSON API function gEntry(o){ var entry = function(a){ var media = a['media$group']['media$content'].length ? a['media$group']['media$content'][0] : {}; var i=0, _a; var p = { id : a.id.$t, name : a.title.$t, description : a.summary.$t, updated_time : a.updated.$t, created_time : a.published.$t, picture : media ? media.url : null, thumbnail : media ? media.url : null, width : media.width, height : media.height // original : a }; // Get feed/children if("link" in a){ for(i=0;i<a.link.length;i++){ if(a.link[i].rel.match(/\#feed$/)){ p.photos = a.link[i].href; p.files = a.link[i].href; p.upload_location = a.link[i].href; break; } } } // Get images of different scales if('category' in a&&a['category'].length){ _a = a['category']; for(i=0;i<_a.length;i++){ if(_a[i].scheme&&_a[i].scheme.match(/\#kind$/)){ p.type = _a[i].term.replace(/^.*?\#/,''); } } } // Get images of different scales if('media$thumbnail' in a['media$group'] && a['media$group']['media$thumbnail'].length){ _a = a['media$group']['media$thumbnail']; p.thumbnail = a['media$group']['media$thumbnail'][0].url; p.images = []; for(i=0;i<_a.length;i++){ p.images.push({ source : _a[i].url, width : _a[i].width, height : _a[i].height }); } _a = a['media$group']['media$content'].length ? a['media$group']['media$content'][0] : null; if(_a){ p.images.push({ source : _a.url, width : _a.width, height : _a.height }); } } return p; }; var r = []; if("feed" in o && "entry" in o.feed){ for(i=0;i<o.feed.entry.length;i++){ r.push(entry(o.feed.entry[i])); } return { //name : o.feed.title.$t, //updated : o.feed.updated.$t, data : r }; } // Old style, picasa, etc... if( "entry" in o ){ return entry(o.entry); }else if( "items" in o ){ for(var i=0;i<o.items.length;i++){ formatItem( o.items[i] ); } return { data : o.items }; } else{ formatItem( o ); return o; } } // // Embed hello.init({ google : { name : "Google Plus", uri : { // REF: http://code.google.com/apis/accounts/docs/OAuth2UserAgent.html auth : "https://accounts.google.com/o/oauth2/auth", // me : "plus/v1/people/me?pp=1", me : 'oauth2/v1/userinfo?alt=json', base : "https://www.googleapis.com/", 'me/friends' : 'https://www.google.com/m8/feeds/contacts/default/full?alt=json&max-results=1000', 'me/share' : 'plus/v1/people/me/activities/public', 'me/feed' : 'plus/v1/people/me/activities/public', 'me/albums' : 'https://picasaweb.google.com/data/feed/api/user/default?alt=json', 'me/photos' : 'https://picasaweb.google.com/data/feed/api/user/default?alt=json&kind=photo&max-results=100', "me/files" : 'https://www.googleapis.com/drive/v2/files?q=%22root%22+in+parents' }, scope : { //, basic : "https://www.googleapis.com/auth/plus.me https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile", email : '', birthday : '', events : '', photos : 'https://picasaweb.google.com/data/', videos : 'http://gdata.youtube.com', friends : 'https://www.google.com/m8/feeds', files : 'https://www.googleapis.com/auth/drive.readonly', publish : '', publish_files : 'https://www.googleapis.com/auth/drive', create_event : '', offline_access : '' }, scope_delim : ' ', wrap : { me : function(o){ if(o.id){ o.last_name = o.family_name || o.name.familyName; o.first_name = o.given_name || o.name.givenName; // o.name = o.first_name + ' ' + o.last_name; o.picture = o.picture || o.image.url; o.thumbnail = o.picture; o.name = o.displayName || o.name; } return o; }, 'me/friends' : function(o){ var r = []; if("feed" in o && "entry" in o.feed){ for(var i=0;i<o.feed.entry.length;i++){ var a = o.feed.entry[i]; r.push({ id : a.id.$t, name : a.title.$t, email : (a.gd$email&&a.gd$email.length>0)?a.gd$email[0].address:null, updated_time : a.updated.$t, picture : (a.link&&a.link.length>0)?a.link[0].href+'?access_token='+hello.getAuthResponse('google').access_token:null, thumbnail : (a.link&&a.link.length>0)?a.link[0].href+'?access_token='+hello.getAuthResponse('google').access_token:null }); } return { //name : o.feed.title.$t, //updated : o.feed.updated.$t, data : r }; } return o; }, 'me/share' : function(o){ o.data = o.items; try{ delete o.items; }catch(e){ o.items = null; } return o; }, 'me/feed' : function(o){ o.data = o.items; try{ delete o.items; }catch(e){ o.items = null; } return o; }, 'me/albums' : gEntry, 'me/photos' : gEntry, 'default' : gEntry }, xhr : function(p){ if(p.method==='post'){ return false; } return true; } } }); })();
Fixing Google bug with no profile picture
src/modules/google.js
Fixing Google bug with no profile picture
<ide><path>rc/modules/google.js <ide> wrap : { <ide> me : function(o){ <ide> if(o.id){ <del> o.last_name = o.family_name || o.name.familyName; <del> o.first_name = o.given_name || o.name.givenName; <add> o.last_name = o.family_name || (o.name? o.name.familyName : null); <add> o.first_name = o.given_name || (o.name? o.name.givenName : null); <ide> // o.name = o.first_name + ' ' + o.last_name; <del> o.picture = o.picture || o.image.url; <add> o.picture = o.picture || ( o.image ? o.image.url : null); <ide> o.thumbnail = o.picture; <ide> o.name = o.displayName || o.name; <ide> }
Java
apache-2.0
10de2d45caac7bb7b7e100227502d7efd6107e45
0
jludvice/fabric8,punkhorn/fabric8,avano/fabric8,punkhorn/fabric8,jonathanchristison/fabric8,janstey/fabric8,jonathanchristison/fabric8,chirino/fabric8,chirino/fabric8,punkhorn/fabric8,janstey/fabric8,jludvice/fabric8,chirino/fabric8,avano/fabric8,janstey/fabric8,punkhorn/fabric8,jludvice/fabric8,avano/fabric8,chirino/fabric8,jludvice/fabric8,jonathanchristison/fabric8,jonathanchristison/fabric8,avano/fabric8
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.boot.commands; import io.fabric8.api.*; import io.fabric8.utils.PasswordEncoder; import io.fabric8.utils.Ports; import io.fabric8.utils.shell.ShellUtils; import io.fabric8.zookeeper.bootstrap.BootstrapConfiguration; import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; import java.util.Dictionary; import java.util.Hashtable; import java.util.List; import java.util.Set; import org.apache.felix.gogo.commands.Argument; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; import org.apache.felix.utils.properties.Properties; import org.apache.karaf.jaas.modules.BackingEngine; import org.apache.karaf.shell.console.AbstractAction; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import com.google.common.base.Strings; @Command(name = "create", scope = "fabric", description = "Creates a new fabric ensemble (ZooKeeper ensemble)", detailedDescription = "classpath:create.txt") class CreateAction extends AbstractAction { private static final String GIT_REMOTE_URL = Constants.GIT_REMOTE_URL; private static final String GIT_REMOTE_USER = "gitRemoteUser"; private static final String GIT_REMOTE_PASSWORD = "gitRemotePassword"; @Option(name = "--clean", description = "Clean local zookeeper cluster and configurations") private boolean clean; @Option(name = "--no-import", description = "Disable the import of the sample registry data") private boolean noImport; @Option(name = "--import-dir", description = "Directory of files to import into the newly created ensemble") private String importDir; @Option(name = "-v", aliases = {"--verbose"}, description = "Flag to enable verbose output of files being imported") boolean verbose = false; @Option(name = "-f", aliases = "--force", multiValued = false, description = "Forces re-creating fabric") private boolean force; @Option(name = "-g", aliases = {"--global-resolver"}, description = "The global resolver policy, which becomes the default resolver policy applied to all new containers created in this fabric. Possible values are: localip, localhostname, publicip, publichostname, manualip. Default is localhostname.") String globalResolver; @Option(name = "-r", aliases = {"--resolver"}, description = "The local resolver policy. Possible values are: localip, localhostname, publicip, publichostname, manualip. Default is localhostname.") String resolver; @Option(name = "-m", aliases = {"--manual-ip"}, description = "An address to use, when using the manualip resolver.") String manualIp; @Option(name = "-b", aliases = {"--bind-address"}, description = "The default bind address.") String bindAddress; @Option(name = "-n", aliases = "--non-managed", multiValued = false, description = "Flag to keep the container non managed") private boolean nonManaged; @Option(name = "--wait-for-provisioning", multiValued = false, description = "Flag to wait for the initial container provisioning") private boolean waitForProvisioning=false; @Option(name = "--bootstrap-timeout", multiValued = false, description = "How long to wait (milliseconds) for the initial fabric bootstrap") private long bootstrapTimeout =120000L; @Option(name = "-t", aliases = {"--time"}, description = "How long to wait (milliseconds) for the ensemble to start up before trying to import the default data") long ensembleStartupTime = 2000L; @Option(name = "-p", aliases = "--profile", multiValued = true, description = "Chooses the profile of the container.") private Set<String> profiles = null; @Option(name = "-v", aliases = "--version", multiValued = false, description = "Chooses the default version.") private String version = ContainerOptions.DEFAULT_VERSION; @Option(name = "--min-port", multiValued = false, description = "The minimum port of the allowed port range") private int minimumPort = Ports.MIN_PORT_NUMBER; @Option(name = "--max-port", multiValued = false, description = "The maximum port of the allowed port range") private int maximumPort = Ports.MAX_PORT_NUMBER; @Option(name = "--zookeeper-ticktime", multiValued = false, description = "The length of a single tick, which is the basic time unit used by ZooKeeper, as measured in milliseconds. It is used to regulate heartbeats, and timeouts. For example, the minimum session timeout will be two ticks") private int zooKeeperTickTime = CreateEnsembleOptions.DEFAULT_TICKTIME; @Option(name = "--zookeeper-init-limit", multiValued = false, description = "The amount of time, in ticks (see tickTime), to allow followers to connect and sync to a leader") private int zooKeeperInitLimit = CreateEnsembleOptions.DEFAULT_INIT_LIMIT; @Option(name = "--zookeeper-sync-limit", multiValued = false, description = "The amount of time, in ticks (see tickTime), to allow followers to sync with ZooKeeper") private int zooKeeperSyncLimit = CreateEnsembleOptions.DEFAULT_SYNC_LIMIT; @Option(name = "--zookeeper-data-dir", multiValued = false, description = "The location where ZooKeeper will store the in-memory database snapshots and, unless specified otherwise, the transaction log of updates to the database.") private String zooKeeperDataDir = CreateEnsembleOptions.DEFAULT_DATA_DIR; @Option(name = "--zookeeper-password", multiValued = false, description = "The ensemble password to use (one will be generated if not given)") private String zookeeperPassword; @Option(name = "--zookeeper-server-port", multiValued = false, description = "The main port for ZooKeeper server") private int zooKeeperServerPort = -1; @Option(name = "--generate-zookeeper-password", multiValued = false, description = "Flag to enable automatic generation of password") private boolean generateZookeeperPassword = false; @Option(name = "--new-user", multiValued = false, description = "The username of a new user. The option refers to karaf user (ssh, http, jmx).") private String newUser; @Option(name = "--new-user-password", multiValued = false, description = "The password of the new user. The option refers to karaf user (ssh, http, jmx).") private String newUserPassword; @Option(name = "--external-git-url", multiValued = false, description = "Specify an external git URL.") private String externalGitUrl; @Option(name = "--external-git-user", multiValued = false, description = "Specify an external git user.") private String externalGitUser; @Option(name = "--external-git-password", multiValued = false, description = "Specify an external git password.") private String externalGitPassword; @Option(name = "--new-user-role", multiValued = false, description = "The role of the new user. The option refers to karaf user (ssh, http, jmx).") private String newUserRole = "admin"; @Argument(required = false, multiValued = true, description = "List of containers. Empty list assumes current container only.") private List<String> containers; private static final String ROLE_DELIMITER = ","; private final BundleContext bundleContext; private final ConfigurationAdmin configAdmin; private final ZooKeeperClusterBootstrap bootstrap; private final RuntimeProperties runtimeProperties; CreateAction(BundleContext bundleContext, ConfigurationAdmin configAdmin, ZooKeeperClusterBootstrap bootstrap, RuntimeProperties runtimeProperties) { this.bundleContext = bundleContext; this.configAdmin = configAdmin; this.bootstrap = bootstrap; this.runtimeProperties = runtimeProperties; Path homePath = runtimeProperties.getHomePath(); importDir = homePath.resolve("fabric").resolve("import").toFile().getAbsolutePath(); } protected Object doExecute() throws Exception { // prevent creating fabric if already created ServiceReference<FabricService> sref = bundleContext.getServiceReference(FabricService.class); FabricService fabricService = sref != null ? bundleContext.getService(sref) : null; if (!force && (fabricService != null && fabricService.getCurrentContainer().isEnsembleServer())) { System.out.println("Current container " + fabricService.getCurrentContainerName() + " is already in the current fabric ensemble. Cannot create fabric."); System.out.println("You can use the --force option, if you want to force re-create the fabric."); return null; } Configuration bootConfiguration = configAdmin.getConfiguration(BootstrapConfiguration.COMPONENT_PID, null); Dictionary<String, Object> bootProperties = bootConfiguration.getProperties(); if (bootProperties == null) { bootProperties = new Hashtable<>(); } String runtimeIdentity = runtimeProperties.getRuntimeIdentity(); CreateEnsembleOptions.Builder<?> builder = CreateEnsembleOptions.builder() .zooKeeperServerTickTime(zooKeeperTickTime) .zooKeeperServerInitLimit(zooKeeperInitLimit) .zooKeeperServerSyncLimit(zooKeeperSyncLimit) .zooKeeperServerDataDir(zooKeeperDataDir) .fromRuntimeProperties(runtimeProperties) .bootstrapTimeout(bootstrapTimeout) .waitForProvision(waitForProvisioning) .clean(clean); builder.version(version); if (containers == null || containers.isEmpty()) { containers = Arrays.asList(runtimeIdentity); } if (!noImport && importDir != null) { builder.autoImportEnabled(true); builder.importPath(importDir); } if (globalResolver != null) { builder.globalResolver(globalResolver); bootProperties.put(ZkDefs.GLOBAL_RESOLVER_PROPERTY, globalResolver); } if (resolver != null) { builder.resolver(resolver); bootProperties.put(ZkDefs.LOCAL_RESOLVER_PROPERTY, resolver); } if (manualIp != null) { builder.manualIp(manualIp); bootProperties.put(ZkDefs.MANUAL_IP, manualIp); } if (bindAddress != null) { if (!bindAddress.contains(":")) { builder.bindAddress(bindAddress); bootProperties.put(ZkDefs.BIND_ADDRESS, bindAddress); } else { String[] parts = bindAddress.split(":"); builder.bindAddress(parts[0]); builder.zooKeeperServerPort(Integer.parseInt(parts[1])); bootProperties.put(ZkDefs.BIND_ADDRESS, parts[0]); } } if (zooKeeperServerPort > 0) { // --zookeeper-server-port option has higher priority than // CreateEnsembleOptions.ZOOKEEPER_SERVER_PORT and CreateEnsembleOptions.ZOOKEEPER_SERVER_CONNECTION_PORT // system/runtime properties builder.setZooKeeperServerPort(zooKeeperServerPort); builder.setZooKeeperServerConnectionPort(zooKeeperServerPort); } //Configure External Git Repository. if (externalGitUrl != null) { builder.dataStoreProperty(GIT_REMOTE_URL, externalGitUrl); } if (externalGitUser != null) { builder.dataStoreProperty(GIT_REMOTE_USER, externalGitUser); } if (externalGitPassword != null) { builder.dataStoreProperty(GIT_REMOTE_PASSWORD, externalGitPassword); } if (profiles != null && profiles.size() > 0) { builder.profiles(profiles); } if (nonManaged) { builder.agentEnabled(false); } else { builder.agentEnabled(true); } builder.minimumPort(minimumPort); builder.maximumPort(maximumPort); bootProperties.put(ZkDefs.MINIMUM_PORT, String.valueOf(minimumPort)); bootProperties.put(ZkDefs.MAXIMUM_PORT, String.valueOf(maximumPort)); newUser = newUser != null ? newUser : ShellUtils.retrieveFabricUser(session); newUserPassword = newUserPassword != null ? newUserPassword : ShellUtils.retrieveFabricUserPassword(session); Path propsPath = runtimeProperties.getConfPath().resolve("users.properties"); Properties userProps = new Properties(propsPath.toFile()); if (userProps.isEmpty()) { String[] credentials = promptForNewUser(newUser, newUserPassword); newUser = credentials[0]; newUserPassword = credentials[1]; } else { if (newUser == null || newUserPassword == null) { newUser = "" + userProps.keySet().iterator().next(); newUserPassword = "" + userProps.get(newUser); if (newUserPassword.contains(ROLE_DELIMITER)) { newUserPassword = newUserPassword.substring(0, newUserPassword.indexOf(ROLE_DELIMITER)); } } String passwordWithroles = userProps.get(newUser); if (passwordWithroles != null && passwordWithroles.contains(ROLE_DELIMITER)) { String[] infos = passwordWithroles.split(","); String oldUserRole = newUserRole; for (int i = 1; i < infos.length; i++) { if (infos[i].trim().startsWith(BackingEngine.GROUP_PREFIX)) { // it's a group reference String groupInfo = (String) userProps.get(infos[i].trim()); if (groupInfo != null) { String[] roles = groupInfo.split(","); for (int j = 1; j < roles.length; j++) { if (!roles[j].trim().equals(oldUserRole)) { newUserRole = newUserRole + ROLE_DELIMITER + roles[j].trim(); } } } } else { // it's an user reference if (!infos[i].trim().equals(oldUserRole)) { newUserRole = newUserRole + ROLE_DELIMITER + infos[i].trim(); } } } } } if (Strings.isNullOrEmpty(newUser)) { System.out.println("No user specified. Cannot create a new fabric ensemble."); return null; } StringBuilder sb = new StringBuilder(); // session is unset when this is called from FMC if (session != null) { ShellUtils.storeFabricCredentials(session, newUser, newUserPassword); } if (generateZookeeperPassword) { //do nothing use the generated password. } else if (zookeeperPassword == null) { zookeeperPassword = PasswordEncoder.decode(runtimeProperties.getProperty(CreateEnsembleOptions.ZOOKEEPER_PASSWORD, PasswordEncoder.encode(newUserPassword))); builder.zookeeperPassword(zookeeperPassword); } else { builder.zookeeperPassword(zookeeperPassword); } bootConfiguration.update(bootProperties); CreateEnsembleOptions options = builder.users(userProps) .withUser(newUser, newUserPassword , newUserRole) .build(); if (containers.size() == 1 && containers.contains(runtimeIdentity)) { bootstrap.create(options); } else { ServiceProxy<ZooKeeperClusterService> serviceProxy = ServiceProxy.createServiceProxy(bundleContext, ZooKeeperClusterService.class); try { serviceProxy.getService().createCluster(containers, options); } finally { serviceProxy.close(); } } ShellUtils.storeZookeeperPassword(session, options.getZookeeperPassword()); if (zookeeperPassword == null && !generateZookeeperPassword) { sb.append("Zookeeper password: (reusing users ").append(newUser).append(" password:").append(options.getZookeeperPassword()).append(")\n"); sb.append("(You can use the --zookeeper-password / --generate-zookeeper-password option to specify one.)\n"); } else if (generateZookeeperPassword) { sb.append("Generated zookeeper password:").append(options.getZookeeperPassword()); } System.out.println(sb.toString()); if (!nonManaged && !waitForProvisioning) { System.out.println("It may take a couple of seconds for the container to provision..."); System.out.println("You can use the --wait-for-provisioning option, if you want this command to block until the container is provisioned."); } return null; } private String[] promptForNewUser(String user, String password) throws IOException { String[] response = new String[2]; // If the username was not configured via cli, then prompt the user for the values if (user == null || password == null) { System.out.println("No user found in etc/users.properties or specified as an option. Please specify one ..."); } while (user == null || user.isEmpty()) { user = ShellUtils.readLine(session, "New user name: ", false); if (user == null) { break; } } if (user != null && password == null) { String password1 = null; String password2 = null; while (password1 == null || !password1.equals(password2)) { password1 = ShellUtils.readLine(session, "Password for " + user + ": ", true); password2 = ShellUtils.readLine(session, "Verify password for " + user + ": ", true); if (password1 == null || password2 == null) { break; } if (password1 != null && password1.equals(password2)) { password = password1; } else { System.out.println("Passwords did not match. Please try again!"); } } } response[0] = user; response[1] = password; return response; } public String getBindAddress() { return bindAddress; } public void setBindAddress(String bindAddress) { this.bindAddress = bindAddress; } public boolean isClean() { return clean; } public void setClean(boolean clean) { this.clean = clean; } public boolean isNoImport() { return noImport; } public void setNoImport(boolean noImport) { this.noImport = noImport; } public String getImportDir() { return importDir; } public void setImportDir(String importDir) { this.importDir = importDir; } public boolean isVerbose() { return verbose; } public void setVerbose(boolean verbose) { this.verbose = verbose; } public long getEnsembleStartupTime() { return ensembleStartupTime; } public void setEnsembleStartupTime(long ensembleStartupTime) { this.ensembleStartupTime = ensembleStartupTime; } public List<String> getContainers() { return containers; } public void setContainers(List<String> containers) { this.containers = containers; } public int getMinimumPort() { return minimumPort; } public void setMinimumPort(int minimumPort) { this.minimumPort = minimumPort; } public int getMaximumPort() { return maximumPort; } public void setMaximumPort(int maximumPort) { this.maximumPort = maximumPort; } public String getZookeeperPassword() { return zookeeperPassword; } public void setZookeeperPassword(String zookeeperPassword) { this.zookeeperPassword = zookeeperPassword; } public String getNewUser() { return newUser; } public void setNewUser(String newUser) { this.newUser = newUser; } public String getNewUserPassword() { return newUserPassword; } public void setNewUserPassword(String newUserPassword) { this.newUserPassword = newUserPassword; } public String getNewUserRole() { return newUserRole; } public void setNewUserRole(String newUserRole) { this.newUserRole = newUserRole; } public Set<String> getProfiles() { return profiles; } public void setProfiles(Set<String> profiles) { this.profiles = profiles; } public boolean isNonManaged() { return nonManaged; } public void setNonManaged(boolean nonManaged) { this.nonManaged = nonManaged; } public String getGlobalResolver() { return globalResolver; } public void setGlobalResolver(String globalResolver) { this.globalResolver = globalResolver; } public String getResolver() { return resolver; } public void setResolver(String resolver) { this.resolver = resolver; } public String getManualIp() { return manualIp; } public void setManualIp(String manualIp) { this.manualIp = manualIp; } public boolean isGenerateZookeeperPassword() { return generateZookeeperPassword; } public void setGenerateZookeeperPassword(boolean generateZookeeperPassword) { this.generateZookeeperPassword = generateZookeeperPassword; } }
fabric/fabric-boot-commands/src/main/java/io/fabric8/boot/commands/CreateAction.java
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.boot.commands; import io.fabric8.api.*; import io.fabric8.utils.PasswordEncoder; import io.fabric8.utils.Ports; import io.fabric8.utils.shell.ShellUtils; import io.fabric8.zookeeper.bootstrap.BootstrapConfiguration; import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; import java.util.Dictionary; import java.util.Hashtable; import java.util.List; import java.util.Set; import org.apache.felix.gogo.commands.Argument; import org.apache.felix.gogo.commands.Command; import org.apache.felix.gogo.commands.Option; import org.apache.felix.utils.properties.Properties; import org.apache.karaf.jaas.modules.BackingEngine; import org.apache.karaf.shell.console.AbstractAction; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import com.google.common.base.Strings; @Command(name = "create", scope = "fabric", description = "Creates a new fabric ensemble (ZooKeeper ensemble)", detailedDescription = "classpath:create.txt") class CreateAction extends AbstractAction { private static final String GIT_REMOTE_URL = Constants.GIT_REMOTE_URL; private static final String GIT_REMOTE_USER = "gitRemoteUser"; private static final String GIT_REMOTE_PASSWORD = "gitRemotePassword"; @Option(name = "--clean", description = "Clean local zookeeper cluster and configurations") private boolean clean; @Option(name = "--no-import", description = "Disable the import of the sample registry data") private boolean noImport; @Option(name = "--import-dir", description = "Directory of files to import into the newly created ensemble") private String importDir; @Option(name = "-v", aliases = {"--verbose"}, description = "Flag to enable verbose output of files being imported") boolean verbose = false; @Option(name = "-f", aliases = "--force", multiValued = false, description = "Forces re-creating fabric") private boolean force; @Option(name = "-g", aliases = {"--global-resolver"}, description = "The global resolver policy, which becomes the default resolver policy applied to all new containers created in this fabric. Possible values are: localip, localhostname, publicip, publichostname, manualip. Default is localhostname.") String globalResolver; @Option(name = "-r", aliases = {"--resolver"}, description = "The local resolver policy. Possible values are: localip, localhostname, publicip, publichostname, manualip. Default is localhostname.") String resolver; @Option(name = "-m", aliases = {"--manual-ip"}, description = "An address to use, when using the manualip resolver.") String manualIp; @Option(name = "-b", aliases = {"--bind-address"}, description = "The default bind address.") String bindAddress; @Option(name = "-n", aliases = "--non-managed", multiValued = false, description = "Flag to keep the container non managed") private boolean nonManaged; @Option(name = "--wait-for-provisioning", multiValued = false, description = "Flag to wait for the initial container provisioning") private boolean waitForProvisioning=false; @Option(name = "--bootstrap-timeout", multiValued = false, description = "How long to wait (milliseconds) for the initial fabric bootstrap") private long bootstrapTimeout =120000L; @Option(name = "-t", aliases = {"--time"}, description = "How long to wait (milliseconds) for the ensemble to start up before trying to import the default data") long ensembleStartupTime = 2000L; @Option(name = "-p", aliases = "--profile", multiValued = true, description = "Chooses the profile of the container.") private Set<String> profiles = null; @Option(name = "-v", aliases = "--version", multiValued = false, description = "Chooses the default version.") private String version = ContainerOptions.DEFAULT_VERSION; @Option(name = "--min-port", multiValued = false, description = "The minimum port of the allowed port range") private int minimumPort = Ports.MIN_PORT_NUMBER; @Option(name = "--max-port", multiValued = false, description = "The maximum port of the allowed port range") private int maximumPort = Ports.MAX_PORT_NUMBER; @Option(name = "--zookeeper-ticktime", multiValued = false, description = "The length of a single tick, which is the basic time unit used by ZooKeeper, as measured in milliseconds. It is used to regulate heartbeats, and timeouts. For example, the minimum session timeout will be two ticks") private int zooKeeperTickTime = CreateEnsembleOptions.DEFAULT_TICKTIME; @Option(name = "--zookeeper-init-limit", multiValued = false, description = "The amount of time, in ticks (see tickTime), to allow followers to connect and sync to a leader") private int zooKeeperInitLimit = CreateEnsembleOptions.DEFAULT_INIT_LIMIT; @Option(name = "--zookeeper-sync-limit", multiValued = false, description = "The amount of time, in ticks (see tickTime), to allow followers to sync with ZooKeeper") private int zooKeeperSyncLimit = CreateEnsembleOptions.DEFAULT_SYNC_LIMIT; @Option(name = "--zookeeper-data-dir", multiValued = false, description = "The location where ZooKeeper will store the in-memory database snapshots and, unless specified otherwise, the transaction log of updates to the database.") private String zooKeeperDataDir = CreateEnsembleOptions.DEFAULT_DATA_DIR; @Option(name = "--zookeeper-password", multiValued = false, description = "The ensemble password to use (one will be generated if not given)") private String zookeeperPassword; @Option(name = "--zookeeper-server-port", multiValued = false, description = "The main port for ZooKeeper server") private int zooKeeperServerPort = -1; @Option(name = "--generate-zookeeper-password", multiValued = false, description = "Flag to enable automatic generation of password") private boolean generateZookeeperPassword = false; @Option(name = "--new-user", multiValued = false, description = "The username of a new user. The option refers to karaf user (ssh, http, jmx).") private String newUser; @Option(name = "--new-user-password", multiValued = false, description = "The password of the new user. The option refers to karaf user (ssh, http, jmx).") private String newUserPassword; @Option(name = "--external-git-url", multiValued = false, description = "Specify an external git URL.") private String externalGitUrl; @Option(name = "--external-git-user", multiValued = false, description = "Specify an external git user.") private String externalGitUser; @Option(name = "--external-git-password", multiValued = false, description = "Specify an external git password.") private String externalGitPassword; @Option(name = "--new-user-role", multiValued = false, description = "The role of the new user. The option refers to karaf user (ssh, http, jmx).") private String newUserRole = "admin"; @Argument(required = false, multiValued = true, description = "List of containers. Empty list assumes current container only.") private List<String> containers; private static final String ROLE_DELIMITER = ","; private final BundleContext bundleContext; private final ConfigurationAdmin configAdmin; private final ZooKeeperClusterBootstrap bootstrap; private final RuntimeProperties runtimeProperties; CreateAction(BundleContext bundleContext, ConfigurationAdmin configAdmin, ZooKeeperClusterBootstrap bootstrap, RuntimeProperties runtimeProperties) { this.bundleContext = bundleContext; this.configAdmin = configAdmin; this.bootstrap = bootstrap; this.runtimeProperties = runtimeProperties; Path homePath = runtimeProperties.getHomePath(); importDir = homePath.resolve("fabric").resolve("import").toFile().getAbsolutePath(); } protected Object doExecute() throws Exception { // prevent creating fabric if already created ServiceReference<FabricService> sref = bundleContext.getServiceReference(FabricService.class); FabricService fabricService = sref != null ? bundleContext.getService(sref) : null; if (!force && (fabricService != null && fabricService.getCurrentContainer().isEnsembleServer())) { System.out.println("Current container " + fabricService.getCurrentContainerName() + " is already in the current fabric ensemble. Cannot create fabric."); System.out.println("You can use the --force option, if you want to force re-create the fabric."); return null; } Configuration bootConfiguration = configAdmin.getConfiguration(BootstrapConfiguration.COMPONENT_PID, null); Dictionary<String, Object> bootProperties = bootConfiguration.getProperties(); if (bootProperties == null) { bootProperties = new Hashtable<>(); } String runtimeIdentity = runtimeProperties.getRuntimeIdentity(); CreateEnsembleOptions.Builder<?> builder = CreateEnsembleOptions.builder() .zooKeeperServerTickTime(zooKeeperTickTime) .zooKeeperServerInitLimit(zooKeeperInitLimit) .zooKeeperServerSyncLimit(zooKeeperSyncLimit) .zooKeeperServerDataDir(zooKeeperDataDir) .fromRuntimeProperties(runtimeProperties) .bootstrapTimeout(bootstrapTimeout) .waitForProvision(waitForProvisioning) .clean(clean); builder.version(version); if (containers == null || containers.isEmpty()) { containers = Arrays.asList(runtimeIdentity); } if (!noImport && importDir != null) { builder.autoImportEnabled(true); builder.importPath(importDir); } if (globalResolver != null) { builder.globalResolver(globalResolver); bootProperties.put(ZkDefs.GLOBAL_RESOLVER_PROPERTY, globalResolver); } if (resolver != null) { builder.resolver(resolver); bootProperties.put(ZkDefs.LOCAL_RESOLVER_PROPERTY, resolver); } if (manualIp != null) { builder.manualIp(manualIp); bootProperties.put(ZkDefs.MANUAL_IP, manualIp); } if (bindAddress != null) { if (!bindAddress.contains(":")) { builder.bindAddress(bindAddress); bootProperties.put(ZkDefs.BIND_ADDRESS, bindAddress); } else { String[] parts = bindAddress.split(":"); builder.bindAddress(parts[0]); builder.zooKeeperServerPort(Integer.parseInt(parts[1])); bootProperties.put(ZkDefs.BIND_ADDRESS, parts[0]); } } if (zooKeeperServerPort > 0) { // --zookeeper-server-port option has higher priority than // CreateEnsembleOptions.ZOOKEEPER_SERVER_PORT and CreateEnsembleOptions.ZOOKEEPER_SERVER_CONNECTION_PORT // system/runtime properties builder.setZooKeeperServerPort(zooKeeperServerPort); builder.setZooKeeperServerConnectionPort(zooKeeperServerPort); } //Configure External Git Repository. if (externalGitUrl != null) { builder.dataStoreProperty(GIT_REMOTE_URL, externalGitUrl); } if (externalGitUser != null) { builder.dataStoreProperty(GIT_REMOTE_USER, externalGitUser); } if (externalGitPassword != null) { builder.dataStoreProperty(GIT_REMOTE_PASSWORD, externalGitPassword); } if (profiles != null && profiles.size() > 0) { builder.profiles(profiles); } if (nonManaged) { builder.agentEnabled(false); } else { builder.agentEnabled(true); } builder.minimumPort(minimumPort); builder.maximumPort(maximumPort); bootProperties.put(ZkDefs.MINIMUM_PORT, String.valueOf(minimumPort)); bootProperties.put(ZkDefs.MAXIMUM_PORT, String.valueOf(maximumPort)); newUser = newUser != null ? newUser : ShellUtils.retrieveFabricUser(session); newUserPassword = newUserPassword != null ? newUserPassword : ShellUtils.retrieveFabricUserPassword(session); Path propsPath = runtimeProperties.getConfPath().resolve("users.properties"); Properties userProps = new Properties(propsPath.toFile()); if (userProps.isEmpty()) { String[] credentials = promptForNewUser(newUser, newUserPassword); newUser = credentials[0]; newUserPassword = credentials[1]; } else { if (newUser == null || newUserPassword == null) { newUser = "" + userProps.keySet().iterator().next(); newUserPassword = "" + userProps.get(newUser); if (newUserPassword.contains(ROLE_DELIMITER)) { newUserPassword = newUserPassword.substring(0, newUserPassword.indexOf(ROLE_DELIMITER)); } } String passwordWithroles = userProps.get(newUser); if (passwordWithroles != null && passwordWithroles.contains(ROLE_DELIMITER)) { String[] infos = passwordWithroles.split(","); String oldUserRole = newUserRole; for (int i = 1; i < infos.length; i++) { if (infos[i].trim().startsWith(BackingEngine.GROUP_PREFIX)) { // it's a group reference String groupInfo = (String) userProps.get(infos[i].trim()); if (groupInfo != null) { String[] roles = groupInfo.split(","); for (int j = 1; j < roles.length; j++) { if (!roles[j].trim().equals(oldUserRole)) { newUserRole = newUserRole + ROLE_DELIMITER + roles[j].trim(); } } } } else { // it's an user reference if (!infos[i].trim().equals(oldUserRole)) { newUserRole = newUserRole + ROLE_DELIMITER + infos[i].trim(); } } } } } if (Strings.isNullOrEmpty(newUser)) { System.out.println("No user specified. Cannot create a new fabric ensemble."); return null; } StringBuilder sb = new StringBuilder(); // session is unset when this is called from FMC if (session != null) { ShellUtils.storeFabricCredentials(session, newUser, newUserPassword); } if (generateZookeeperPassword) { //do nothing use the generated password. } else if (zookeeperPassword == null) { zookeeperPassword = PasswordEncoder.decode(runtimeProperties.getProperty(CreateEnsembleOptions.ZOOKEEPER_PASSWORD, PasswordEncoder.encode(newUserPassword))); builder.zookeeperPassword(zookeeperPassword); } else { builder.zookeeperPassword(zookeeperPassword); } bootConfiguration.update(bootProperties); CreateEnsembleOptions options = builder.users(userProps) .withUser(newUser, newUserPassword , newUserRole) .build(); if (containers.size() == 1 && containers.contains(runtimeIdentity)) { bootstrap.create(options); } else { ServiceProxy<ZooKeeperClusterService> serviceProxy = ServiceProxy.createServiceProxy(bundleContext, ZooKeeperClusterService.class); try { serviceProxy.getService().createCluster(containers, options); } finally { serviceProxy.close(); } } ShellUtils.storeZookeeperPassword(session, options.getZookeeperPassword()); if (zookeeperPassword == null && !generateZookeeperPassword) { sb.append("Zookeeper password: (reusing users ").append(newUser).append(" password:").append(options.getZookeeperPassword()).append(")\n"); sb.append("(You can use the --zookeeper-password / --generate-zookeeper-password option to specify one.)\n"); } else if (generateZookeeperPassword) { sb.append("Generated zookeeper password:").append(options.getZookeeperPassword()); } else { sb.append("Using specified zookeeper password:").append(options.getZookeeperPassword()); } System.out.println(sb.toString()); if (!nonManaged && !waitForProvisioning) { System.out.println("It may take a couple of seconds for the container to provision..."); System.out.println("You can use the --wait-for-provisioning option, if you want this command to block until the container is provisioned."); } return null; } private String[] promptForNewUser(String user, String password) throws IOException { String[] response = new String[2]; // If the username was not configured via cli, then prompt the user for the values if (user == null || password == null) { System.out.println("No user found in etc/users.properties or specified as an option. Please specify one ..."); } while (user == null || user.isEmpty()) { user = ShellUtils.readLine(session, "New user name: ", false); if (user == null) { break; } } if (user != null && password == null) { String password1 = null; String password2 = null; while (password1 == null || !password1.equals(password2)) { password1 = ShellUtils.readLine(session, "Password for " + user + ": ", true); password2 = ShellUtils.readLine(session, "Verify password for " + user + ": ", true); if (password1 == null || password2 == null) { break; } if (password1 != null && password1.equals(password2)) { password = password1; } else { System.out.println("Passwords did not match. Please try again!"); } } } response[0] = user; response[1] = password; return response; } public String getBindAddress() { return bindAddress; } public void setBindAddress(String bindAddress) { this.bindAddress = bindAddress; } public boolean isClean() { return clean; } public void setClean(boolean clean) { this.clean = clean; } public boolean isNoImport() { return noImport; } public void setNoImport(boolean noImport) { this.noImport = noImport; } public String getImportDir() { return importDir; } public void setImportDir(String importDir) { this.importDir = importDir; } public boolean isVerbose() { return verbose; } public void setVerbose(boolean verbose) { this.verbose = verbose; } public long getEnsembleStartupTime() { return ensembleStartupTime; } public void setEnsembleStartupTime(long ensembleStartupTime) { this.ensembleStartupTime = ensembleStartupTime; } public List<String> getContainers() { return containers; } public void setContainers(List<String> containers) { this.containers = containers; } public int getMinimumPort() { return minimumPort; } public void setMinimumPort(int minimumPort) { this.minimumPort = minimumPort; } public int getMaximumPort() { return maximumPort; } public void setMaximumPort(int maximumPort) { this.maximumPort = maximumPort; } public String getZookeeperPassword() { return zookeeperPassword; } public void setZookeeperPassword(String zookeeperPassword) { this.zookeeperPassword = zookeeperPassword; } public String getNewUser() { return newUser; } public void setNewUser(String newUser) { this.newUser = newUser; } public String getNewUserPassword() { return newUserPassword; } public void setNewUserPassword(String newUserPassword) { this.newUserPassword = newUserPassword; } public String getNewUserRole() { return newUserRole; } public void setNewUserRole(String newUserRole) { this.newUserRole = newUserRole; } public Set<String> getProfiles() { return profiles; } public void setProfiles(Set<String> profiles) { this.profiles = profiles; } public boolean isNonManaged() { return nonManaged; } public void setNonManaged(boolean nonManaged) { this.nonManaged = nonManaged; } public String getGlobalResolver() { return globalResolver; } public void setGlobalResolver(String globalResolver) { this.globalResolver = globalResolver; } public String getResolver() { return resolver; } public void setResolver(String resolver) { this.resolver = resolver; } public String getManualIp() { return manualIp; } public void setManualIp(String manualIp) { this.manualIp = manualIp; } public boolean isGenerateZookeeperPassword() { return generateZookeeperPassword; } public void setGenerateZookeeperPassword(boolean generateZookeeperPassword) { this.generateZookeeperPassword = generateZookeeperPassword; } }
ENTESB-2089 - don't print the password if it was entered on the console
fabric/fabric-boot-commands/src/main/java/io/fabric8/boot/commands/CreateAction.java
ENTESB-2089 - don't print the password if it was entered on the console
<ide><path>abric/fabric-boot-commands/src/main/java/io/fabric8/boot/commands/CreateAction.java <ide> sb.append("(You can use the --zookeeper-password / --generate-zookeeper-password option to specify one.)\n"); <ide> } else if (generateZookeeperPassword) { <ide> sb.append("Generated zookeeper password:").append(options.getZookeeperPassword()); <del> } else { <del> sb.append("Using specified zookeeper password:").append(options.getZookeeperPassword()); <ide> } <ide> System.out.println(sb.toString()); <ide> if (!nonManaged && !waitForProvisioning) {
Java
apache-2.0
863814cb38a4766cb46c0d0cfad3286e74c6a1bf
0
warnerbros/cpe-manifest-android-experience
package com.wb.nextgenlibrary.activity; import android.content.Context; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.res.Configuration; import android.media.MediaPlayer; import android.net.Uri; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.Size; import android.view.Display; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.VideoView; import com.bumptech.glide.Glide; import com.wb.nextgenlibrary.NextGenExperience; import com.wb.nextgenlibrary.R; import com.wb.nextgenlibrary.data.NextGenStyle; import com.wb.nextgenlibrary.util.TabletUtils; import com.wb.nextgenlibrary.util.utils.StringHelper; import java.util.Timer; import java.util.TimerTask; /** * Created by gzcheng on 1/7/16. */ public class NextGenActivity extends NextGenHideStatusBarActivity implements View.OnClickListener { // wrapper of ProfileViewFragment VideoView startupVideoView; ImageView startupImageView; ImageButton playMovieButton; ImageButton extraButton; View buttonsLayout; private int videoLoopPoint = 0; private int buttonAnimationStartTime = 0; private TimerTask startUpTimerTask; private Timer startUpTimer; private boolean isStartUp = true; @Override public void onCreate(Bundle savedState) { super.onCreate(savedState); setContentView(R.layout.next_gen_startup_view); startupVideoView = (VideoView)findViewById(R.id.startup_video_view); startupImageView = (ImageView) findViewById(R.id.startup_image_view); buttonsLayout = findViewById(R.id.startup_buttons_layout); if (buttonsLayout != null){ buttonsLayout.setVisibility(View.GONE); } playMovieButton = (ImageButton) findViewById(R.id.next_gen_startup_play_button); if (playMovieButton != null){ Glide.with(this).load(NextGenExperience.getMovieMetaData().getStyle().getButtonImageURL(NextGenStyle.NextGenAppearanceType.InMovie)).into(playMovieButton); playMovieButton.setOnClickListener(this); } extraButton = (ImageButton) findViewById(R.id.next_gen_startup_extra_button); if (extraButton != null){ Glide.with(this).load(NextGenExperience.getMovieMetaData().getStyle().getButtonImageURL(NextGenStyle.NextGenAppearanceType.OutOfMovie)).into(extraButton); extraButton.setOnClickListener(this); } if (!StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())) { if (startupImageView != null) startupImageView.setVisibility(View.GONE); videoLoopPoint = (int) (NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoLoopTime() * 1000); buttonAnimationStartTime = (int) (NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoFadeTime() * 1000); } else{ buttonsLayout.setVisibility(View.VISIBLE); startupVideoView.setVisibility(View.GONE); String bgImageUrl = NextGenExperience.getMovieMetaData().getStyle().getStartupImageURL(); if (startupImageView != null && !StringHelper.isEmpty(bgImageUrl)){ startupImageView.setVisibility(View.VISIBLE); Glide.with(this).load(bgImageUrl).fitCenter().into(startupImageView); } } adjustButtonSizesAndPosition(); } @Override public void onStart(){ super.onStart(); if (TabletUtils.isTablet()) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); else setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); if (StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())) { if (startupVideoView != null ){ startupVideoView.setVisibility(View.GONE); } } else if (startupVideoView != null ){ if (!isStartUp){ startupVideoView.seekTo(videoLoopPoint); startupVideoView.start(); return; } isStartUp = false; startupVideoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { @Override public void onPrepared(final MediaPlayer mp) { startupVideoView.start(); if (startUpTimer == null){ startUpTimer = new Timer(); } if (startUpTimerTask == null){ startUpTimerTask = new TimerTask() { @Override public void run() { runOnUiThread(new Runnable() { @Override public void run() { if (buttonsLayout != null){ buttonsLayout.setVisibility(View.VISIBLE); buttonsLayout.setAlpha(0.0f); // Start the animation buttonsLayout.animate().setDuration(1000).alpha(1.0f); } } }); } }; startUpTimer.schedule(startUpTimerTask, buttonAnimationStartTime); } } }); startupVideoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { startupVideoView.seekTo(videoLoopPoint); startupVideoView.start(); } }); startupVideoView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startupVideoView.stopPlayback(); } }); startupVideoView.requestFocus(); startupVideoView.setVideoURI(Uri.parse(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())); } } public void onStop(){ super.onStop(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); adjustButtonSizesAndPosition(); } private void adjustButtonSizesAndPosition(){ int orientation = NextGenHideStatusBarActivity.getCurrentScreenOrientation(); Size screenSize = NextGenExperience.getScreenSize(this); Size buttonFrameSize; double aspectRatio = (double)screenSize.getHeight() / (double)screenSize.getWidth(); if (aspectRatio < 0.75){ // use the height buttonFrameSize = new Size ( (int)((double)screenSize.getHeight() * 4.0 / 3.0), screenSize.getHeight()); }else if (aspectRatio > 0.75){ // user the width buttonFrameSize = new Size ( screenSize.getWidth(), (int)((double)screenSize.getWidth() * 3.0 / 4.0)); }else { buttonFrameSize = screenSize; } NextGenStyle movieStyle = NextGenExperience.getMovieMetaData().getStyle(); final ButtonParams mainMoiveParams = computeButtonParams(movieStyle.getButtonCenterOffset(NextGenStyle.NextGenAppearanceType.InMovie, orientation), movieStyle.getButtonSizeOffset(NextGenStyle.NextGenAppearanceType.InMovie, orientation), buttonFrameSize); final ButtonParams extraParams = computeButtonParams(movieStyle.getButtonCenterOffset(NextGenStyle.NextGenAppearanceType.OutOfMovie, orientation), movieStyle.getButtonSizeOffset(NextGenStyle.NextGenAppearanceType.OutOfMovie, orientation), buttonFrameSize); ViewGroup.LayoutParams mainMoiveBtnLayoutParams = playMovieButton.getLayoutParams(); if (mainMoiveBtnLayoutParams instanceof LinearLayout.LayoutParams) { ((LinearLayout.LayoutParams) mainMoiveBtnLayoutParams).setMargins(mainMoiveParams.x, mainMoiveParams.y, 0, 0); }else if (mainMoiveBtnLayoutParams instanceof RelativeLayout.LayoutParams) { ((RelativeLayout.LayoutParams) mainMoiveBtnLayoutParams).setMargins(mainMoiveParams.x, mainMoiveParams.y, 0, 0); } mainMoiveBtnLayoutParams.height = mainMoiveParams.height; mainMoiveBtnLayoutParams.width = mainMoiveParams.width; playMovieButton.setLayoutParams(mainMoiveBtnLayoutParams); ViewGroup.LayoutParams extraBtnLayoutParams = extraButton.getLayoutParams(); if (mainMoiveBtnLayoutParams instanceof LinearLayout.LayoutParams) { ((LinearLayout.LayoutParams) extraBtnLayoutParams).setMargins(extraParams.x, extraParams.y, 0, 0); }else if (mainMoiveBtnLayoutParams instanceof RelativeLayout.LayoutParams) { ((RelativeLayout.LayoutParams) extraBtnLayoutParams).setMargins(extraParams.x, extraParams.y, 0, 0); } extraBtnLayoutParams.height = extraParams.height; extraBtnLayoutParams.width = extraParams.width; extraButton.setLayoutParams(extraBtnLayoutParams); } @Override public void onClick(View v){ if (v.getId() == R.id.next_gen_startup_play_button) { Intent intent = new Intent(this, NextGenPlayer.class); intent.setDataAndType(Uri.parse(NextGenExperience.getMovieMetaData().getMainMovieUrl()), "video/*"); startActivity(intent); } else if (v.getId() == R.id.next_gen_startup_extra_button) { Intent extraIntent = new Intent(this, NextGenExtraActivity.class); startActivity(extraIntent); } } @Override public void onResume(){ super.onResume(); } class ButtonParams { int x, y, height, width; } private ButtonParams computeButtonParams(NextGenStyle.NGScreenOffSetRatio centerRatio, NextGenStyle.NGScreenOffSetRatio sizeRatio, Size videoSize){ ButtonParams resultParams = new ButtonParams(); WindowManager wm = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); DisplayMetrics screenMetrics = new DisplayMetrics(); display.getRealMetrics(screenMetrics); Size screenSize = new Size(screenMetrics.widthPixels, screenMetrics.heightPixels); double videoAspecRatio = (double)videoSize.getWidth() / (double)videoSize.getHeight(); double screenAspecRatio = (double)screenSize.getWidth() / (double)screenSize.getHeight(); int effectiveVideoWidth, effectiveVideoHeight; if (videoAspecRatio > screenAspecRatio){ // video is wider effectiveVideoWidth = screenSize.getWidth(); effectiveVideoHeight = (int)((double)screenSize.getWidth() / (double)videoSize.getWidth() * (double)videoSize.getHeight()); }else{ // screen is wider effectiveVideoHeight = screenSize.getHeight(); effectiveVideoWidth = (int)((double)screenSize.getHeight() / (double)videoSize.getHeight() * (double)videoSize.getWidth()); } resultParams.width = (int)(effectiveVideoWidth * sizeRatio.horizontalRatio); resultParams.height = (int)(effectiveVideoHeight * sizeRatio.verticalRatio); resultParams.x = (screenSize.getWidth() - effectiveVideoWidth) / 2 // the side pillow width + (int)(centerRatio.horizontalRatio * effectiveVideoWidth ) // ratio of the center - resultParams.width / 2; // half the width resultParams.y = (screenSize.getHeight() - effectiveVideoHeight) / 2 // the side pillow width + (int)(centerRatio.verticalRatio * effectiveVideoHeight ) // ratio of the center - resultParams.height / 2; // half the width return resultParams; } }
src/com/wb/nextgenlibrary/activity/NextGenActivity.java
package com.wb.nextgenlibrary.activity; import android.content.Context; import android.content.Intent; import android.content.res.Configuration; import android.media.MediaPlayer; import android.net.Uri; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.Size; import android.view.Display; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.VideoView; import com.bumptech.glide.Glide; import com.wb.nextgenlibrary.NextGenExperience; import com.wb.nextgenlibrary.R; import com.wb.nextgenlibrary.data.NextGenStyle; import com.wb.nextgenlibrary.util.utils.StringHelper; import java.util.Timer; import java.util.TimerTask; /** * Created by gzcheng on 1/7/16. */ public class NextGenActivity extends NextGenHideStatusBarActivity implements View.OnClickListener { // wrapper of ProfileViewFragment VideoView startupVideoView; ImageView startupImageView; ImageButton playMovieButton; ImageButton extraButton; View buttonsLayout; private int videoLoopPoint = 0; private int buttonAnimationStartTime = 0; private TimerTask startUpTimerTask; private Timer startUpTimer; private boolean isStartUp = true; @Override public void onCreate(Bundle savedState) { super.onCreate(savedState); setContentView(R.layout.next_gen_startup_view); startupVideoView = (VideoView)findViewById(R.id.startup_video_view); startupImageView = (ImageView) findViewById(R.id.startup_image_view); buttonsLayout = findViewById(R.id.startup_buttons_layout); if (buttonsLayout != null){ buttonsLayout.setVisibility(View.GONE); } playMovieButton = (ImageButton) findViewById(R.id.next_gen_startup_play_button); if (playMovieButton != null){ Glide.with(this).load(NextGenExperience.getMovieMetaData().getStyle().getButtonImageURL(NextGenStyle.NextGenAppearanceType.InMovie)).into(playMovieButton); playMovieButton.setOnClickListener(this); } extraButton = (ImageButton) findViewById(R.id.next_gen_startup_extra_button); if (extraButton != null){ Glide.with(this).load(NextGenExperience.getMovieMetaData().getStyle().getButtonImageURL(NextGenStyle.NextGenAppearanceType.OutOfMovie)).into(extraButton); extraButton.setOnClickListener(this); } if (!StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())) { if (startupImageView != null) startupImageView.setVisibility(View.GONE); videoLoopPoint = (int) (NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoLoopTime() * 1000); buttonAnimationStartTime = (int) (NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoFadeTime() * 1000); } else{ buttonsLayout.setVisibility(View.VISIBLE); startupVideoView.setVisibility(View.GONE); String bgImageUrl = NextGenExperience.getMovieMetaData().getStyle().getStartupImageURL(); if (startupImageView != null && !StringHelper.isEmpty(bgImageUrl)){ startupImageView.setVisibility(View.VISIBLE); Glide.with(this).load(bgImageUrl).fitCenter().into(startupImageView); } } adjustButtonSizesAndPosition(); } @Override public void onStart(){ super.onStart(); if (StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())) { if (startupVideoView != null ){ startupVideoView.setVisibility(View.GONE); } } else if (startupVideoView != null ){ if (!isStartUp){ startupVideoView.seekTo(videoLoopPoint); startupVideoView.start(); return; } isStartUp = false; startupVideoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { @Override public void onPrepared(final MediaPlayer mp) { startupVideoView.start(); if (startUpTimer == null){ startUpTimer = new Timer(); } if (startUpTimerTask == null){ startUpTimerTask = new TimerTask() { @Override public void run() { runOnUiThread(new Runnable() { @Override public void run() { if (buttonsLayout != null){ buttonsLayout.setVisibility(View.VISIBLE); buttonsLayout.setAlpha(0.0f); // Start the animation buttonsLayout.animate().setDuration(1000).alpha(1.0f); } } }); } }; startUpTimer.schedule(startUpTimerTask, buttonAnimationStartTime); } } }); startupVideoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { startupVideoView.seekTo(videoLoopPoint); startupVideoView.start(); } }); startupVideoView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startupVideoView.stopPlayback(); } }); startupVideoView.requestFocus(); startupVideoView.setVideoURI(Uri.parse(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())); } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); adjustButtonSizesAndPosition(); } private void adjustButtonSizesAndPosition(){ int orientation = NextGenHideStatusBarActivity.getCurrentScreenOrientation(); Size screenSize = NextGenExperience.getScreenSize(this); Size buttonFrameSize; double aspectRatio = (double)screenSize.getHeight() / (double)screenSize.getWidth(); if (aspectRatio < 0.75){ // use the height buttonFrameSize = new Size ( (int)((double)screenSize.getHeight() * 4.0 / 3.0), screenSize.getHeight()); }else if (aspectRatio > 0.75){ // user the width buttonFrameSize = new Size ( screenSize.getWidth(), (int)((double)screenSize.getWidth() * 3.0 / 4.0)); }else { buttonFrameSize = screenSize; } NextGenStyle movieStyle = NextGenExperience.getMovieMetaData().getStyle(); final ButtonParams mainMoiveParams = computeButtonParams(movieStyle.getButtonCenterOffset(NextGenStyle.NextGenAppearanceType.InMovie, orientation), movieStyle.getButtonSizeOffset(NextGenStyle.NextGenAppearanceType.InMovie, orientation), buttonFrameSize); final ButtonParams extraParams = computeButtonParams(movieStyle.getButtonCenterOffset(NextGenStyle.NextGenAppearanceType.OutOfMovie, orientation), movieStyle.getButtonSizeOffset(NextGenStyle.NextGenAppearanceType.OutOfMovie, orientation), buttonFrameSize); ViewGroup.LayoutParams mainMoiveBtnLayoutParams = playMovieButton.getLayoutParams(); if (mainMoiveBtnLayoutParams instanceof LinearLayout.LayoutParams) { ((LinearLayout.LayoutParams) mainMoiveBtnLayoutParams).setMargins(mainMoiveParams.x, mainMoiveParams.y, 0, 0); }else if (mainMoiveBtnLayoutParams instanceof RelativeLayout.LayoutParams) { ((RelativeLayout.LayoutParams) mainMoiveBtnLayoutParams).setMargins(mainMoiveParams.x, mainMoiveParams.y, 0, 0); } mainMoiveBtnLayoutParams.height = mainMoiveParams.height; mainMoiveBtnLayoutParams.width = mainMoiveParams.width; playMovieButton.setLayoutParams(mainMoiveBtnLayoutParams); ViewGroup.LayoutParams extraBtnLayoutParams = extraButton.getLayoutParams(); if (mainMoiveBtnLayoutParams instanceof LinearLayout.LayoutParams) { ((LinearLayout.LayoutParams) extraBtnLayoutParams).setMargins(extraParams.x, extraParams.y, 0, 0); }else if (mainMoiveBtnLayoutParams instanceof RelativeLayout.LayoutParams) { ((RelativeLayout.LayoutParams) extraBtnLayoutParams).setMargins(extraParams.x, extraParams.y, 0, 0); } extraBtnLayoutParams.height = extraParams.height; extraBtnLayoutParams.width = extraParams.width; extraButton.setLayoutParams(extraBtnLayoutParams); } @Override public void onClick(View v){ if (v.getId() == R.id.next_gen_startup_play_button) { Intent intent = new Intent(this, NextGenPlayer.class); intent.setDataAndType(Uri.parse(NextGenExperience.getMovieMetaData().getMainMovieUrl()), "video/*"); startActivity(intent); } else if (v.getId() == R.id.next_gen_startup_extra_button) { Intent extraIntent = new Intent(this, NextGenExtraActivity.class); startActivity(extraIntent); } } @Override public void onResume(){ super.onResume(); } class ButtonParams { int x, y, height, width; } private ButtonParams computeButtonParams(NextGenStyle.NGScreenOffSetRatio centerRatio, NextGenStyle.NGScreenOffSetRatio sizeRatio, Size videoSize){ ButtonParams resultParams = new ButtonParams(); WindowManager wm = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); DisplayMetrics screenMetrics = new DisplayMetrics(); display.getRealMetrics(screenMetrics); Size screenSize = new Size(screenMetrics.widthPixels, screenMetrics.heightPixels); double videoAspecRatio = (double)videoSize.getWidth() / (double)videoSize.getHeight(); double screenAspecRatio = (double)screenSize.getWidth() / (double)screenSize.getHeight(); int effectiveVideoWidth, effectiveVideoHeight; if (videoAspecRatio > screenAspecRatio){ // video is wider effectiveVideoWidth = screenSize.getWidth(); effectiveVideoHeight = (int)((double)screenSize.getWidth() / (double)videoSize.getWidth() * (double)videoSize.getHeight()); }else{ // screen is wider effectiveVideoHeight = screenSize.getHeight(); effectiveVideoWidth = (int)((double)screenSize.getHeight() / (double)videoSize.getHeight() * (double)videoSize.getWidth()); } resultParams.width = (int)(effectiveVideoWidth * sizeRatio.horizontalRatio); resultParams.height = (int)(effectiveVideoHeight * sizeRatio.verticalRatio); resultParams.x = (screenSize.getWidth() - effectiveVideoWidth) / 2 // the side pillow width + (int)(centerRatio.horizontalRatio * effectiveVideoWidth ) // ratio of the center - resultParams.width / 2; // half the width resultParams.y = (screenSize.getHeight() - effectiveVideoHeight) / 2 // the side pillow width + (int)(centerRatio.verticalRatio * effectiveVideoHeight ) // ratio of the center - resultParams.height / 2; // half the width return resultParams; } }
Lock startup page orientation to landscape if it's running on Tablet
src/com/wb/nextgenlibrary/activity/NextGenActivity.java
Lock startup page orientation to landscape if it's running on Tablet
<ide><path>rc/com/wb/nextgenlibrary/activity/NextGenActivity.java <ide> <ide> import android.content.Context; <ide> import android.content.Intent; <add>import android.content.pm.ActivityInfo; <ide> import android.content.res.Configuration; <ide> import android.media.MediaPlayer; <ide> import android.net.Uri; <ide> import com.wb.nextgenlibrary.NextGenExperience; <ide> import com.wb.nextgenlibrary.R; <ide> import com.wb.nextgenlibrary.data.NextGenStyle; <add>import com.wb.nextgenlibrary.util.TabletUtils; <ide> import com.wb.nextgenlibrary.util.utils.StringHelper; <ide> <ide> import java.util.Timer; <ide> @Override <ide> public void onStart(){ <ide> super.onStart(); <add> if (TabletUtils.isTablet()) <add> setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); <add> else <add> setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); <ide> if (StringHelper.isEmpty(NextGenExperience.getMovieMetaData().getStyle().getBackgroundVideoURL())) { <ide> if (startupVideoView != null ){ <ide> startupVideoView.setVisibility(View.GONE); <ide> } <ide> } <ide> <add> public void onStop(){ <add> super.onStop(); <add> <add> setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); <add> } <add> <ide> @Override <ide> public void onConfigurationChanged(Configuration newConfig) { <ide> super.onConfigurationChanged(newConfig);
JavaScript
agpl-3.0
58b338af19247d150ca45aed028c643a9354d5a7
0
vnc-biz/openerp-web,gisce/openerp-web,vnc-biz/openerp-web,splbio/openerp-web,MarkusTeufelberger/openerp-web,MarkusTeufelberger/openerp-web,MarkusTeufelberger/openerp-web,vnc-biz/openerp-web,vnc-biz/openerp-web,gisce/openerp-web,gisce/openerp-web,splbio/openerp-web,splbio/openerp-web
/*--------------------------------------------------------- * OpenERP Web core *--------------------------------------------------------*/ var console; if (!console) { console = {log: function () {}}; } if (!console.debug) { console.debug = console.log; } openerp.web.core = function(openerp) { /** * John Resig Class with factory improvement */ (function() { var initializing = false, fnTest = /xyz/.test(function(){xyz;}) ? /\b_super\b/ : /.*/; // The web Class implementation (does nothing) /** * Extended version of John Resig's Class pattern * * @class */ openerp.web.Class = function(){}; /** * Subclass an existing class * * @param {Object} prop class-level properties (class attributes and instance methods) to set on the new class */ openerp.web.Class.extend = function(prop) { var _super = this.prototype; // Instantiate a web class (but only create the instance, // don't run the init constructor) initializing = true; var prototype = new this(); initializing = false; // Copy the properties over onto the new prototype for (var name in prop) { // Check if we're overwriting an existing function prototype[name] = typeof prop[name] == "function" && typeof _super[name] == "function" && fnTest.test(prop[name]) ? (function(name, fn) { return function() { var tmp = this._super; // Add a new ._super() method that is the same // method but on the super-class this._super = _super[name]; // The method only need to be bound temporarily, so // we remove it when we're done executing var ret = fn.apply(this, arguments); this._super = tmp; return ret; }; })(name, prop[name]) : prop[name]; } // The dummy class constructor function Class() { // All construction is actually done in the init method if (!initializing && this.init) { var ret = this.init.apply(this, arguments); if (ret) { return ret; } } return this; } Class.include = function (properties) { for (var name in properties) { if (typeof properties[name] !== 'function' || !fnTest.test(properties[name])) { prototype[name] = properties[name]; } else if (typeof prototype[name] === 'function' && prototype.hasOwnProperty(name)) { prototype[name] = (function (name, fn, previous) { return function () { var tmp = this._super; this._super = previous; var ret = fn.apply(this, arguments); this._super = tmp; return ret; } })(name, properties[name], prototype[name]); } else if (typeof _super[name] === 'function') { prototype[name] = (function (name, fn) { return function () { var tmp = this._super; this._super = _super[name]; var ret = fn.apply(this, arguments); this._super = tmp; return ret; } })(name, properties[name]); } } }; // Populate our constructed prototype object Class.prototype = prototype; // Enforce the constructor to be what we expect Class.constructor = Class; // And make this class extendable Class.extend = arguments.callee; return Class; }; })(); openerp.web.callback = function(obj, method) { var callback = function() { var args = Array.prototype.slice.call(arguments); var r; for(var i = 0; i < callback.callback_chain.length; i++) { var c = callback.callback_chain[i]; if(c.unique) { callback.callback_chain.splice(i, 1); i -= 1; } var result = c.callback.apply(c.self, c.args.concat(args)); if (c.callback === method) { // return the result of the original method r = result; } // TODO special value to stop the chain // openerp.web.callback_stop } return r; }; callback.callback_chain = []; callback.add = function(f) { if(typeof(f) == 'function') { f = { callback: f, args: Array.prototype.slice.call(arguments, 1) }; } f.self = f.self || null; f.args = f.args || []; f.unique = !!f.unique; if(f.position == 'last') { callback.callback_chain.push(f); } else { callback.callback_chain.unshift(f); } return callback; }; callback.add_first = function(f) { return callback.add.apply(null,arguments); }; callback.add_last = function(f) { return callback.add({ callback: f, args: Array.prototype.slice.call(arguments, 1), position: "last" }); }; callback.remove = function(f) { callback.callback_chain = _.difference(callback.callback_chain, _.filter(callback.callback_chain, function(el) { return el.callback === f; })); return callback; }; return callback.add({ callback: method, self:obj, args:Array.prototype.slice.call(arguments, 2) }); }; /** * Generates an inherited class that replaces all the methods by null methods (methods * that does nothing and always return undefined). * * @param {Class} claz * @param {Object} add Additional functions to override. * @return {Class} */ openerp.web.generate_null_object_class = function(claz, add) { var newer = {}; var copy_proto = function(prototype) { for (var name in prototype) { if(typeof prototype[name] == "function") { newer[name] = function() {}; } } if (prototype.prototype) copy_proto(prototype.prototype); }; copy_proto(claz.prototype); newer.init = openerp.web.Widget.prototype.init; var tmpclass = claz.extend(newer); return tmpclass.extend(add || {}); }; /** * web error for lookup failure * * @class */ openerp.web.NotFound = openerp.web.Class.extend( /** @lends openerp.web.NotFound# */ { }); openerp.web.KeyNotFound = openerp.web.NotFound.extend( /** @lends openerp.web.KeyNotFound# */ { /** * Thrown when a key could not be found in a mapping * * @constructs openerp.web.KeyNotFound * @extends openerp.web.NotFound * @param {String} key the key which could not be found */ init: function (key) { this.key = key; }, toString: function () { return "The key " + this.key + " was not found"; } }); openerp.web.ObjectNotFound = openerp.web.NotFound.extend( /** @lends openerp.web.ObjectNotFound# */ { /** * Thrown when an object path does not designate a valid class or object * in the openerp hierarchy. * * @constructs openerp.web.ObjectNotFound * @extends openerp.web.NotFound * @param {String} path the invalid object path */ init: function (path) { this.path = path; }, toString: function () { return "Could not find any object of path " + this.path; } }); openerp.web.Registry = openerp.web.Class.extend( /** @lends openerp.web.Registry# */ { /** * Stores a mapping of arbitrary key (strings) to object paths (as strings * as well). * * Resolves those paths at query time in order to always fetch the correct * object, even if those objects have been overloaded/replaced after the * registry was created. * * An object path is simply a dotted name from the openerp root to the * object pointed to (e.g. ``"openerp.web.Connection"`` for an OpenERP * connection object). * * @constructs openerp.web.Registry * @param {Object} mapping a mapping of keys to object-paths */ init: function (mapping) { this.map = mapping || {}; }, /** * Retrieves the object matching the provided key string. * * @param {String} key the key to fetch the object for * @param {Boolean} [silent_error=false] returns undefined if the key or object is not found, rather than throwing an exception * @returns {Class} the stored class, to initialize * * @throws {openerp.web.KeyNotFound} if the object was not in the mapping * @throws {openerp.web.ObjectNotFound} if the object path was invalid */ get_object: function (key, silent_error) { var path_string = this.map[key]; if (path_string === undefined) { if (silent_error) { return void 'nooo'; } throw new openerp.web.KeyNotFound(key); } var object_match = openerp; var path = path_string.split('.'); // ignore first section for(var i=1; i<path.length; ++i) { object_match = object_match[path[i]]; if (object_match === undefined) { if (silent_error) { return void 'noooooo'; } throw new openerp.web.ObjectNotFound(path_string); } } return object_match; }, /** * Tries a number of keys, and returns the first object matching one of * the keys. * * @param {Array} keys a sequence of keys to fetch the object for * @returns {Class} the first class found matching an object * * @throws {openerp.web.KeyNotFound} if none of the keys was in the mapping * @trows {openerp.web.ObjectNotFound} if a found object path was invalid */ get_any: function (keys) { for (var i=0; i<keys.length; ++i) { var key = keys[i]; if (key === undefined || !(key in this.map)) { continue; } return this.get_object(key); } throw new openerp.web.KeyNotFound(keys.join(',')); }, /** * Adds a new key and value to the registry. * * This method can be chained. * * @param {String} key * @param {String} object_path fully qualified dotted object path * @returns {openerp.web.Registry} itself */ add: function (key, object_path) { this.map[key] = object_path; return this; }, /** * Creates and returns a copy of the current mapping, with the provided * mapping argument added in (replacing existing keys if needed) * * @param {Object} [mapping={}] a mapping of keys to object-paths */ clone: function (mapping) { return new openerp.web.Registry( _.extend({}, this.map, mapping || {})); } }); openerp.web.CallbackEnabled = openerp.web.Class.extend(/** @lends openerp.web.CallbackEnabled# */{ /** * @constructs openerp.web.CallbackEnabled * @extends openerp.web.Class */ init: function() { // Transform on_* method into openerp.web.callbacks for (var name in this) { if(typeof(this[name]) == "function") { this[name].debug_name = name; // bind ALL function to this not only on_and _do ? if((/^on_|^do_/).test(name)) { this[name] = openerp.web.callback(this, this[name]); } } } }, /** * Proxies a method of the object, in order to keep the right ``this`` on * method invocations. * * This method is similar to ``Function.prototype.bind`` or ``_.bind``, and * even more so to ``jQuery.proxy`` with a fundamental difference: its * resolution of the method being called is lazy, meaning it will use the * method as it is when the proxy is called, not when the proxy is created. * * Other methods will fix the bound method to what it is when creating the * binding/proxy, which is fine in most javascript code but problematic in * OpenERP Web where developers may want to replace existing callbacks with * theirs. * * The semantics of this precisely replace closing over the method call. * * @param {String} method_name name of the method to invoke * @returns {Function} proxied method */ proxy: function (method_name) { var self = this; return function () { return self[method_name].apply(self, arguments); } } }); openerp.web.Connection = openerp.web.CallbackEnabled.extend( /** @lends openerp.web.Connection# */{ /** * @constructs openerp.web.Connection * @extends openerp.web.CallbackEnabled * * @param {String} [server] JSON-RPC endpoint hostname * @param {String} [port] JSON-RPC endpoint port */ init: function() { this._super(); this.server = null; this.debug = ($.deparam($.param.querystring()).debug != undefined); // TODO: session store in cookie should be optional this.name = openerp._session_id; this.qweb_mutex = new $.Mutex(); }, bind: function(origin) { var window_origin = location.protocol+"//"+location.host; this.origin = origin ? _.str.rtrim(origin,'/') : window_origin; this.prefix = this.origin; this.server = this.origin; // keep chs happy openerp.web.qweb.default_dict['_s'] = this.origin; this.rpc_function = (this.origin == window_origin) ? this.rpc_json : this.rpc_jsonp; this.session_id = false; this.uid = false; this.username = false; this.user_context= {}; this.db = false; this.openerp_entreprise = false; this.module_list = []; this.module_loaded = {"web": true}; this.context = {}; this.shortcuts = []; this.active_id = null; return this.session_init(); }, /** * Executes an RPC call, registering the provided callbacks. * * Registers a default error callback if none is provided, and handles * setting the correct session id and session context in the parameter * objects * * @param {String} url RPC endpoint * @param {Object} params call parameters * @param {Function} success_callback function to execute on RPC call success * @param {Function} error_callback function to execute on RPC call failure * @returns {jQuery.Deferred} jquery-provided ajax deferred */ rpc: function(url, params, success_callback, error_callback) { var self = this; // url can be an $.ajax option object if (_.isString(url)) { url = { url: url }; } // Construct a JSON-RPC2 request, method is currently unused params.session_id = this.session_id; if (this.debug) params.debug = 1; var payload = { jsonrpc: '2.0', method: 'call', params: params, id: _.uniqueId('r') }; var deferred = $.Deferred(); this.on_rpc_request(); this.rpc_function(url, payload).then( function (response, textStatus, jqXHR) { self.on_rpc_response(); if (!response.error) { deferred.resolve(response["result"], textStatus, jqXHR); } else if (response.error.data.type === "session_invalid") { self.uid = false; // TODO deprecate or use a deferred on login.do_ask_login() self.on_session_invalid(function() { self.rpc(url, payload.params, function() { deferred.resolve.apply(deferred, arguments); }, function() { deferred.reject.apply(deferred, arguments); }); }); } else { deferred.reject(response.error, $.Event()); } }, function(jqXHR, textStatus, errorThrown) { self.on_rpc_response(); var error = { code: -32098, message: "XmlHttpRequestError " + errorThrown, data: {type: "xhr"+textStatus, debug: jqXHR.responseText, objects: [jqXHR, errorThrown] } }; deferred.reject(error, $.Event()); }); // Allow deferred user to disable on_rpc_error in fail deferred.fail(function() { deferred.fail(function(error, event) { if (!event.isDefaultPrevented()) { self.on_rpc_error(error, event); } }); }).then(success_callback, error_callback).promise(); return deferred; }, /** * Raw JSON-RPC call * * @returns {jQuery.Deferred} ajax-webd deferred object */ rpc_json: function(url, payload) { var self = this; var ajax = _.extend({ type: "POST", dataType: 'json', contentType: 'application/json', data: JSON.stringify(payload), processData: false }, url); if (this.synch) ajax.async = false; return $.ajax(ajax); }, rpc_jsonp: function(url, payload) { var self = this; // extracted from payload to set on the url var data = { session_id: this.session_id, id: payload.id }; url.url = this.get_url(url.url); var ajax = _.extend({ type: "GET", dataType: 'jsonp', jsonp: 'jsonp', cache: false, data: data }, url); if (this.synch) ajax.async = false; var payload_str = JSON.stringify(payload); var payload_url = $.param({r:payload_str}); if(payload_url.length < 2000) { // Direct jsonp request ajax.data.r = payload_str; return $.ajax(ajax); } else { // Indirect jsonp request var ifid = _.uniqueId('oe_rpc_iframe'); var display = options.openerp.debug ? 'block' : 'none'; var $iframe = $(_.str.sprintf("<iframe src='javascript:false;' name='%s' id='%s' style='display:%s'></iframe>", ifid, ifid, display)); var $form = $('<form>') .attr('method', 'POST') .attr('target', ifid) .attr('enctype', "multipart/form-data") .attr('action', ajax.url + '?' + $.param(data)) .append($('<input type="hidden" name="r" />').attr('value', payload_str)) .hide() .appendTo($('body')); var cleanUp = function() { if ($iframe) { $iframe.unbind("load").attr("src", "javascript:false;").remove(); } $form.remove(); }; var deferred = $.Deferred(); // the first bind is fired up when the iframe is added to the DOM $iframe.bind('load', function() { // the second bind is fired up when the result of the form submission is received $iframe.unbind('load').bind('load', function() { $.ajax(ajax).always(function() { cleanUp(); }).then( function() { deferred.resolve.apply(deferred, arguments); }, function() { deferred.reject.apply(deferred, arguments); } ); }); // now that the iframe can receive data, we fill and submit the form $form.submit(); }); // append the iframe to the DOM (will trigger the first load) $form.after($iframe); return deferred; } }, on_rpc_request: function() { }, on_rpc_response: function() { }, on_rpc_error: function(error) { }, /** * Init a session, reloads from cookie, if it exists */ session_init: function () { var self = this; // TODO: session store in cookie should be optional this.session_id = this.get_cookie('session_id'); return this.session_reload().pipe(function(result) { var modules = openerp._modules.join(','); var deferred = self.rpc('/web/webclient/qweblist', {mods: modules}).pipe(self.do_load_qweb); if(self.session_is_valid()) { return deferred.pipe(function() { return self.load_modules(); }); } return deferred; }); }, /** * (re)loads the content of a session: db name, username, user id, session * context and status of the support contract * * @returns {$.Deferred} deferred indicating the session is done reloading */ session_reload: function () { var self = this; return this.rpc("/web/session/get_session_info", {}).then(function(result) { // If immediately follows a login (triggered by trying to restore // an invalid session or no session at all), refresh session data // (should not change, but just in case...) _.extend(self, { db: result.db, username: result.login, uid: result.uid, user_context: result.context, openerp_entreprise: result.openerp_entreprise }); }); }, session_is_valid: function() { return !!this.uid; }, /** * The session is validated either by login or by restoration of a previous session */ session_authenticate: function(db, login, password, _volatile) { var self = this; var base_location = document.location.protocol + '//' + document.location.host; var params = { db: db, login: login, password: password, base_location: base_location }; return this.rpc("/web/session/authenticate", params).pipe(function(result) { _.extend(self, { session_id: result.session_id, db: result.db, username: result.login, uid: result.uid, user_context: result.context, openerp_entreprise: result.openerp_entreprise }); if (!_volatile) { self.set_cookie('session_id', self.session_id); } return self.load_modules(); }); }, session_logout: function() { this.set_cookie('session_id', ''); }, on_session_valid: function() { }, /** * Called when a rpc call fail due to an invalid session. * By default, it's a noop */ on_session_invalid: function(retry_callback) { }, /** * Fetches a cookie stored by an openerp session * * @private * @param name the cookie's name */ get_cookie: function (name) { if (!this.name) { return null; } var nameEQ = this.name + '|' + name + '='; var cookies = document.cookie.split(';'); for(var i=0; i<cookies.length; ++i) { var cookie = cookies[i].replace(/^\s*/, ''); if(cookie.indexOf(nameEQ) === 0) { return JSON.parse(decodeURIComponent(cookie.substring(nameEQ.length))); } } return null; }, /** * Create a new cookie with the provided name and value * * @private * @param name the cookie's name * @param value the cookie's value * @param ttl the cookie's time to live, 1 year by default, set to -1 to delete */ set_cookie: function (name, value, ttl) { if (!this.name) { return; } ttl = ttl || 24*60*60*365; document.cookie = [ this.name + '|' + name + '=' + encodeURIComponent(JSON.stringify(value)), 'path=/', 'max-age=' + ttl, 'expires=' + new Date(new Date().getTime() + ttl*1000).toGMTString() ].join(';'); }, /** * Load additional web addons of that instance and init them * * @param {Boolean} [no_session_valid_signal=false] prevents load_module from triggering ``on_session_valid``. */ load_modules: function(no_session_valid_signal) { var self = this; return this.rpc('/web/session/modules', {}).pipe(function(result) { var lang = self.user_context.lang; var params = { mods: ["web"].concat(result), lang: lang}; var to_load = _.difference(result, self.module_list).join(','); self.module_list = result; return $.when( self.rpc('/web/webclient/csslist', {mods: to_load}, self.do_load_css), self.rpc('/web/webclient/qweblist', {mods: to_load}).pipe(self.do_load_qweb), self.rpc('/web/webclient/translations', params).pipe(function(trans) { openerp.web._t.database.set_bundle(trans); var file_list = ["/web/static/lib/datejs/globalization/" + lang.replace("_", "-") + ".js"]; return self.rpc('/web/webclient/jslist', {mods: to_load}).pipe(function(files) { return self.do_load_js(file_list.concat(files)); }); }) ).then(function() { self.on_modules_loaded(); if (!no_session_valid_signal) { self.on_session_valid(); } }); }); }, do_load_css: function (files) { var self = this; _.each(files, function (file) { $('head').append($('<link>', { 'href': self.get_url(file), 'rel': 'stylesheet', 'type': 'text/css' })); }); }, do_load_js: function(files) { var self = this; var d = $.Deferred(); if(files.length != 0) { var file = files.shift(); var tag = document.createElement('script'); tag.type = 'text/javascript'; tag.src = self.get_url(file); tag.onload = tag.onreadystatechange = function() { if ( (tag.readyState && tag.readyState != "loaded" && tag.readyState != "complete") || tag.onload_done ) return; tag.onload_done = true; self.do_load_js(files).then(function () { d.resolve(); }); }; var head = document.head || document.getElementsByTagName('head')[0]; head.appendChild(tag); } else { d.resolve(); } return d; }, do_load_qweb: function(files) { var self = this; _.each(files, function(file) { self.qweb_mutex.exec(function() { return self.rpc('/web/proxy/load', {path: file}).pipe(function(xml) { if (!xml) { return; } openerp.web.qweb.add_template(_.str.trim(xml)); }); }); }); return self.qweb_mutex.def; }, on_modules_loaded: function() { for(var j=0; j<this.module_list.length; j++) { var mod = this.module_list[j]; if(this.module_loaded[mod]) continue; openerp[mod] = {}; // init module mod if(openerp._openerp[mod] != undefined) { openerp._openerp[mod](openerp); this.module_loaded[mod] = true; } } }, get_url: function (file) { return this.prefix + file; }, /** * Cooperative file download implementation, for ajaxy APIs. * * Requires that the server side implements an httprequest correctly * setting the `fileToken` cookie to the value provided as the `token` * parameter. The cookie *must* be set on the `/` path and *must not* be * `httpOnly`. * * It would probably also be a good idea for the response to use a * `Content-Disposition: attachment` header, especially if the MIME is a * "known" type (e.g. text/plain, or for some browsers application/json * * @param {Object} options * @param {String} [options.url] used to dynamically create a form * @param {Object} [options.data] data to add to the form submission. If can be used without a form, in which case a form is created from scratch. Otherwise, added to form data * @param {HTMLFormElement} [options.form] the form to submit in order to fetch the file * @param {Function} [options.success] callback in case of download success * @param {Function} [options.error] callback in case of request error, provided with the error body * @param {Function} [options.complete] called after both ``success`` and ``error` callbacks have executed */ get_file: function (options) { // need to detect when the file is done downloading (not used // yet, but we'll need it to fix the UI e.g. with a throbber // while dump is being generated), iframe load event only fires // when the iframe content loads, so we need to go smarter: // http://geekswithblogs.net/GruffCode/archive/2010/10/28/detecting-the-file-download-dialog-in-the-browser.aspx var timer, token = new Date().getTime(), cookie_name = 'fileToken', cookie_length = cookie_name.length, CHECK_INTERVAL = 1000, id = _.uniqueId('get_file_frame'), remove_form = false; var $form, $form_data = $('<div>'); var complete = function () { if (options.complete) { options.complete(); } clearTimeout(timer); $form_data.remove(); $target.remove(); if (remove_form && $form) { $form.remove(); } }; var $target = $('<iframe style="display: none;">') .attr({id: id, name: id}) .appendTo(document.body) .load(function () { try { if (options.error) { options.error(JSON.parse( this.contentDocument.body.childNodes[1].textContent )); } } finally { complete(); } }); if (options.form) { $form = $(options.form); } else { remove_form = true; $form = $('<form>', { action: options.url, method: 'POST' }).appendTo(document.body); } _(_.extend({}, options.data || {}, {session_id: this.session_id, token: token})) .each(function (value, key) { $('<input type="hidden" name="' + key + '">') .val(value) .appendTo($form_data); }); $form .append($form_data) .attr('target', id) .get(0).submit(); var waitLoop = function () { var cookies = document.cookie.split(';'); // setup next check timer = setTimeout(waitLoop, CHECK_INTERVAL); for (var i=0; i<cookies.length; ++i) { var cookie = cookies[i].replace(/^\s*/, ''); if (!cookie.indexOf(cookie_name === 0)) { continue; } var cookie_val = cookie.substring(cookie_length + 1); if (parseInt(cookie_val, 10) !== token) { continue; } // clear cookie document.cookie = _.str.sprintf("%s=;expires=%s;path=/", cookie_name, new Date().toGMTString()); if (options.success) { options.success(); } complete(); return; } }; timer = setTimeout(waitLoop, CHECK_INTERVAL); }, synchronized_mode: function(to_execute) { var synch = this.synch; this.synch = true; try { return to_execute(); } finally { this.synch = synch; } } }); /** * Base class for all visual components. Provides a lot of functionalities helpful * for the management of a part of the DOM. * * Widget handles: * - Rendering with QWeb. * - Life-cycle management and parenting (when a parent is destroyed, all its children are * destroyed too). * - Insertion in DOM. * * Guide to create implementations of the Widget class: * ============================================== * * Here is a sample child class: * * MyWidget = openerp.base.Widget.extend({ * // the name of the QWeb template to use for rendering * template: "MyQWebTemplate", * * init: function(parent) { * this._super(parent); * // stuff that you want to init before the rendering * }, * start: function() { * // stuff you want to make after the rendering, `this.$element` holds a correct value * this.$element.find(".my_button").click(/* an example of event binding * /); * * // if you have some asynchronous operations, it's a good idea to return * // a promise in start() * var promise = this.rpc(...); * return promise; * } * }); * * Now this class can simply be used with the following syntax: * * var my_widget = new MyWidget(this); * my_widget.appendTo($(".some-div")); * * With these two lines, the MyWidget instance was inited, rendered, it was inserted into the * DOM inside the ".some-div" div and its events were binded. * * And of course, when you don't need that widget anymore, just do: * * my_widget.stop(); * * That will kill the widget in a clean way and erase its content from the dom. */ openerp.web.Widget = openerp.web.CallbackEnabled.extend(/** @lends openerp.web.Widget# */{ /** * The name of the QWeb template that will be used for rendering. Must be * redefined in subclasses or the default render() method can not be used. * * @type string */ template: null, /** * Tag name when creating a default $element. * @type string */ tag_name: 'div', /** * Constructs the widget and sets its parent if a parent is given. * * @constructs openerp.web.Widget * @extends openerp.web.CallbackEnabled * * @param {openerp.web.Widget} parent Binds the current instance to the given Widget instance. * When that widget is destroyed by calling stop(), the current instance will be * destroyed too. Can be null. * @param {String} element_id Deprecated. Sets the element_id. Only useful when you want * to bind the current Widget to an already existing part of the DOM, which is not compatible * with the DOM insertion methods provided by the current implementation of Widget. So * for new components this argument should not be provided any more. */ init: function(parent) { this._super(); this.session = openerp.connection; this.widget_parent = parent; this.widget_children = []; if(parent && parent.widget_children) { parent.widget_children.push(this); } // useful to know if the widget was destroyed and should not be used anymore this.widget_is_stopped = false; }, /** * Renders the current widget and appends it to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ appendTo: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.appendTo(t); }, target); }, /** * Renders the current widget and prepends it to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ prependTo: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.prependTo(t); }, target); }, /** * Renders the current widget and inserts it after to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ insertAfter: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.insertAfter(t); }, target); }, /** * Renders the current widget and inserts it before to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ insertBefore: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.insertBefore(t); }, target); }, /** * Renders the current widget and replaces the given jQuery object. * * @param target A jQuery object or a Widget instance. */ replace: function(target) { return this._render_and_insert(_.bind(function(t) { this.$element.replaceAll(t); }, this), target); }, _render_and_insert: function(insertion, target) { this.render_element(); if (target instanceof openerp.web.Widget) target = target.$element; insertion(target); this.on_inserted(this.$element, this); return this.start(); }, on_inserted: function(element, widget) {}, /** * Renders the element and insert the result of the render() method in this.$element. */ render_element: function() { var rendered = this.render(); if (rendered) { var elem = $(rendered); this.$element.replaceWith(elem); this.$element = elem; } return this; }, /** * Renders the widget using QWeb, `this.template` must be defined. * The context given to QWeb contains the "widget" key that references `this`. * * @param {Object} additional Additional context arguments to pass to the template. */ render: function (additional) { if (this.template) return openerp.web.qweb.render(this.template, _.extend({widget: this}, additional || {})); return null; }, /** * Method called after rendering. Mostly used to bind actions, perform asynchronous * calls, etc... * * By convention, the method should return a promise to inform the caller when * this widget has been initialized. * * @returns {jQuery.Deferred} */ start: function() { return $.Deferred().done().promise(); }, /** * Destroys the current widget, also destroys all its children before destroying itself. */ stop: function() { _.each(_.clone(this.widget_children), function(el) { el.stop(); }); if(this.$element != null) { this.$element.remove(); } if (this.widget_parent && this.widget_parent.widget_children) { this.widget_parent.widget_children = _.without(this.widget_parent.widget_children, this); } this.widget_parent = null; this.widget_is_stopped = true; }, /** * Informs the action manager to do an action. This supposes that * the action manager can be found amongst the ancestors of the current widget. * If that's not the case this method will simply return `false`. */ do_action: function(action, on_finished) { if (this.widget_parent) { return this.widget_parent.do_action(action, on_finished); } return false; }, do_notify: function() { if (this.widget_parent) { return this.widget_parent.do_notify.apply(this,arguments); } return false; }, do_warn: function() { if (this.widget_parent) { return this.widget_parent.do_warn.apply(this,arguments); } return false; }, rpc: function(url, data, success, error) { var def = $.Deferred().then(success, error); var self = this; openerp.connection.rpc(url, data). then(function() { if (!self.widget_is_stopped) def.resolve.apply(def, arguments); }, function() { if (!self.widget_is_stopped) def.reject.apply(def, arguments); }); return def.promise(); } }); /** * Deprecated. Do not use any more. */ openerp.web.OldWidget = openerp.web.Widget.extend({ init: function(parent, element_id) { this._super(parent); this.element_id = element_id; this.element_id = this.element_id || _.uniqueId('widget-'); var tmp = document.getElementById(this.element_id); this.$element = tmp ? $(tmp) : $(document.createElement(this.tag_name)); }, }); openerp.web.TranslationDataBase = openerp.web.Class.extend(/** @lends openerp.web.TranslationDataBase# */{ /** * @constructs openerp.web.TranslationDataBase * @extends openerp.web.Class */ init: function() { this.db = {}; this.parameters = {"direction": 'ltr', "date_format": '%m/%d/%Y', "time_format": '%H:%M:%S', "grouping": [], "decimal_point": ".", "thousands_sep": ","}; }, set_bundle: function(translation_bundle) { var self = this; this.db = {}; var modules = _.keys(translation_bundle.modules); modules.sort(); if (_.include(modules, "web")) { modules = ["web"].concat(_.without(modules, "web")); } _.each(modules, function(name) { self.add_module_translation(translation_bundle.modules[name]); }); if (translation_bundle.lang_parameters) { this.parameters = translation_bundle.lang_parameters; this.parameters.grouping = py.eval( this.parameters.grouping).toJSON(); } }, add_module_translation: function(mod) { var self = this; _.each(mod.messages, function(message) { if (self.db[message.id] === undefined) { self.db[message.id] = message.string; } }); }, build_translation_function: function() { var self = this; var fcnt = function(str) { var tmp = self.get(str); return tmp === undefined ? str : tmp; }; fcnt.database = this; return fcnt; }, get: function(key) { if (this.db[key]) return this.db[key]; return undefined; } }); /** Configure blockui */ if ($.blockUI) { $.blockUI.defaults.baseZ = 1100; $.blockUI.defaults.message = '<img src="/web/static/src/img/throbber2.gif">'; } /** Configure default qweb */ openerp.web._t = new openerp.web.TranslationDataBase().build_translation_function(); /** * Lazy translation function, only performs the translation when actually * printed (e.g. inserted into a template) * * Useful when defining translatable strings in code evaluated before the * translation database is loaded, as class attributes or at the top-level of * an OpenERP Web module * * @param {String} s string to translate * @returns {Object} lazy translation object */ openerp.web._lt = function (s) { return {toString: function () { return openerp.web._t(s); }} }; openerp.web.qweb = new QWeb2.Engine(); openerp.web.qweb.debug = (window.location.search.indexOf('?debug') !== -1); openerp.web.qweb.default_dict = { '_' : _, '_t' : openerp.web._t }; openerp.web.qweb.format_text_node = function(s) { // Note that 'this' is the Qweb Node of the text var translation = this.node.parentNode.attributes['t-translation']; if (translation && translation.value === 'off') { return s; } var ts = _.str.trim(s); if (ts.length === 0) { return s; } var tr = openerp.web._t(ts); return tr === ts ? s : tr; } /** Jquery extentions */ $.Mutex = (function() { function Mutex() { this.def = $.Deferred().resolve(); }; Mutex.prototype.exec = function(action) { var current = this.def; var next = this.def = $.Deferred(); return current.pipe(function() { return $.when(action()).always(function() { next.resolve(); }); }); }; return Mutex; })(); /** Setup default connection */ openerp.connection = new openerp.web.Connection(); openerp.web.qweb.default_dict['__debug__'] = openerp.connection.debug; $.async_when = function() { var async = false; var def = $.Deferred(); $.when.apply($, arguments).then(function() { var args = arguments; var action = function() { def.resolve.apply(def, args); }; if (async) action(); else setTimeout(action, 0); }, function() { var args = arguments; var action = function() { def.reject.apply(def, args); }; if (async) action(); else setTimeout(action, 0); }); async = true; return def; }; // special tweak for the web client var old_async_when = $.async_when; $.async_when = function() { if (openerp.connection.synch) return $.when.apply(this, arguments); else return old_async_when.apply(this, arguments); }; }; // vim:et fdc=0 fdl=0 foldnestmax=3 fdm=syntax:
addons/web/static/src/js/core.js
/*--------------------------------------------------------- * OpenERP Web core *--------------------------------------------------------*/ var console; if (!console) { console = {log: function () {}}; } if (!console.debug) { console.debug = console.log; } openerp.web.core = function(openerp) { /** * John Resig Class with factory improvement */ (function() { var initializing = false, fnTest = /xyz/.test(function(){xyz;}) ? /\b_super\b/ : /.*/; // The web Class implementation (does nothing) /** * Extended version of John Resig's Class pattern * * @class */ openerp.web.Class = function(){}; /** * Subclass an existing class * * @param {Object} prop class-level properties (class attributes and instance methods) to set on the new class */ openerp.web.Class.extend = function(prop) { var _super = this.prototype; // Instantiate a web class (but only create the instance, // don't run the init constructor) initializing = true; var prototype = new this(); initializing = false; // Copy the properties over onto the new prototype for (var name in prop) { // Check if we're overwriting an existing function prototype[name] = typeof prop[name] == "function" && typeof _super[name] == "function" && fnTest.test(prop[name]) ? (function(name, fn) { return function() { var tmp = this._super; // Add a new ._super() method that is the same // method but on the super-class this._super = _super[name]; // The method only need to be bound temporarily, so // we remove it when we're done executing var ret = fn.apply(this, arguments); this._super = tmp; return ret; }; })(name, prop[name]) : prop[name]; } // The dummy class constructor function Class() { // All construction is actually done in the init method if (!initializing && this.init) { var ret = this.init.apply(this, arguments); if (ret) { return ret; } } return this; } Class.include = function (properties) { for (var name in properties) { if (typeof properties[name] !== 'function' || !fnTest.test(properties[name])) { prototype[name] = properties[name]; } else if (typeof prototype[name] === 'function' && prototype.hasOwnProperty(name)) { prototype[name] = (function (name, fn, previous) { return function () { var tmp = this._super; this._super = previous; var ret = fn.apply(this, arguments); this._super = tmp; return ret; } })(name, properties[name], prototype[name]); } else if (typeof _super[name] === 'function') { prototype[name] = (function (name, fn) { return function () { var tmp = this._super; this._super = _super[name]; var ret = fn.apply(this, arguments); this._super = tmp; return ret; } })(name, properties[name]); } } }; // Populate our constructed prototype object Class.prototype = prototype; // Enforce the constructor to be what we expect Class.constructor = Class; // And make this class extendable Class.extend = arguments.callee; return Class; }; })(); openerp.web.callback = function(obj, method) { var callback = function() { var args = Array.prototype.slice.call(arguments); var r; for(var i = 0; i < callback.callback_chain.length; i++) { var c = callback.callback_chain[i]; if(c.unique) { callback.callback_chain.splice(i, 1); i -= 1; } var result = c.callback.apply(c.self, c.args.concat(args)); if (c.callback === method) { // return the result of the original method r = result; } // TODO special value to stop the chain // openerp.web.callback_stop } return r; }; callback.callback_chain = []; callback.add = function(f) { if(typeof(f) == 'function') { f = { callback: f, args: Array.prototype.slice.call(arguments, 1) }; } f.self = f.self || null; f.args = f.args || []; f.unique = !!f.unique; if(f.position == 'last') { callback.callback_chain.push(f); } else { callback.callback_chain.unshift(f); } return callback; }; callback.add_first = function(f) { return callback.add.apply(null,arguments); }; callback.add_last = function(f) { return callback.add({ callback: f, args: Array.prototype.slice.call(arguments, 1), position: "last" }); }; callback.remove = function(f) { callback.callback_chain = _.difference(callback.callback_chain, _.filter(callback.callback_chain, function(el) { return el.callback === f; })); return callback; }; return callback.add({ callback: method, self:obj, args:Array.prototype.slice.call(arguments, 2) }); }; /** * Generates an inherited class that replaces all the methods by null methods (methods * that does nothing and always return undefined). * * @param {Class} claz * @param {Object} add Additional functions to override. * @return {Class} */ openerp.web.generate_null_object_class = function(claz, add) { var newer = {}; var copy_proto = function(prototype) { for (var name in prototype) { if(typeof prototype[name] == "function") { newer[name] = function() {}; } } if (prototype.prototype) copy_proto(prototype.prototype); }; copy_proto(claz.prototype); newer.init = openerp.web.Widget.prototype.init; var tmpclass = claz.extend(newer); return tmpclass.extend(add || {}); }; /** * web error for lookup failure * * @class */ openerp.web.NotFound = openerp.web.Class.extend( /** @lends openerp.web.NotFound# */ { }); openerp.web.KeyNotFound = openerp.web.NotFound.extend( /** @lends openerp.web.KeyNotFound# */ { /** * Thrown when a key could not be found in a mapping * * @constructs openerp.web.KeyNotFound * @extends openerp.web.NotFound * @param {String} key the key which could not be found */ init: function (key) { this.key = key; }, toString: function () { return "The key " + this.key + " was not found"; } }); openerp.web.ObjectNotFound = openerp.web.NotFound.extend( /** @lends openerp.web.ObjectNotFound# */ { /** * Thrown when an object path does not designate a valid class or object * in the openerp hierarchy. * * @constructs openerp.web.ObjectNotFound * @extends openerp.web.NotFound * @param {String} path the invalid object path */ init: function (path) { this.path = path; }, toString: function () { return "Could not find any object of path " + this.path; } }); openerp.web.Registry = openerp.web.Class.extend( /** @lends openerp.web.Registry# */ { /** * Stores a mapping of arbitrary key (strings) to object paths (as strings * as well). * * Resolves those paths at query time in order to always fetch the correct * object, even if those objects have been overloaded/replaced after the * registry was created. * * An object path is simply a dotted name from the openerp root to the * object pointed to (e.g. ``"openerp.web.Connection"`` for an OpenERP * connection object). * * @constructs openerp.web.Registry * @param {Object} mapping a mapping of keys to object-paths */ init: function (mapping) { this.map = mapping || {}; }, /** * Retrieves the object matching the provided key string. * * @param {String} key the key to fetch the object for * @param {Boolean} [silent_error=false] returns undefined if the key or object is not found, rather than throwing an exception * @returns {Class} the stored class, to initialize * * @throws {openerp.web.KeyNotFound} if the object was not in the mapping * @throws {openerp.web.ObjectNotFound} if the object path was invalid */ get_object: function (key, silent_error) { var path_string = this.map[key]; if (path_string === undefined) { if (silent_error) { return void 'nooo'; } throw new openerp.web.KeyNotFound(key); } var object_match = openerp; var path = path_string.split('.'); // ignore first section for(var i=1; i<path.length; ++i) { object_match = object_match[path[i]]; if (object_match === undefined) { if (silent_error) { return void 'noooooo'; } throw new openerp.web.ObjectNotFound(path_string); } } return object_match; }, /** * Tries a number of keys, and returns the first object matching one of * the keys. * * @param {Array} keys a sequence of keys to fetch the object for * @returns {Class} the first class found matching an object * * @throws {openerp.web.KeyNotFound} if none of the keys was in the mapping * @trows {openerp.web.ObjectNotFound} if a found object path was invalid */ get_any: function (keys) { for (var i=0; i<keys.length; ++i) { var key = keys[i]; if (key === undefined || !(key in this.map)) { continue; } return this.get_object(key); } throw new openerp.web.KeyNotFound(keys.join(',')); }, /** * Adds a new key and value to the registry. * * This method can be chained. * * @param {String} key * @param {String} object_path fully qualified dotted object path * @returns {openerp.web.Registry} itself */ add: function (key, object_path) { this.map[key] = object_path; return this; }, /** * Creates and returns a copy of the current mapping, with the provided * mapping argument added in (replacing existing keys if needed) * * @param {Object} [mapping={}] a mapping of keys to object-paths */ clone: function (mapping) { return new openerp.web.Registry( _.extend({}, this.map, mapping || {})); } }); openerp.web.CallbackEnabled = openerp.web.Class.extend(/** @lends openerp.web.CallbackEnabled# */{ /** * @constructs openerp.web.CallbackEnabled * @extends openerp.web.Class */ init: function() { // Transform on_* method into openerp.web.callbacks for (var name in this) { if(typeof(this[name]) == "function") { this[name].debug_name = name; // bind ALL function to this not only on_and _do ? if((/^on_|^do_/).test(name)) { this[name] = openerp.web.callback(this, this[name]); } } } }, /** * Proxies a method of the object, in order to keep the right ``this`` on * method invocations. * * This method is similar to ``Function.prototype.bind`` or ``_.bind``, and * even more so to ``jQuery.proxy`` with a fundamental difference: its * resolution of the method being called is lazy, meaning it will use the * method as it is when the proxy is called, not when the proxy is created. * * Other methods will fix the bound method to what it is when creating the * binding/proxy, which is fine in most javascript code but problematic in * OpenERP Web where developers may want to replace existing callbacks with * theirs. * * The semantics of this precisely replace closing over the method call. * * @param {String} method_name name of the method to invoke * @returns {Function} proxied method */ proxy: function (method_name) { var self = this; return function () { return self[method_name].apply(self, arguments); } } }); openerp.web.Connection = openerp.web.CallbackEnabled.extend( /** @lends openerp.web.Connection# */{ /** * @constructs openerp.web.Connection * @extends openerp.web.CallbackEnabled * * @param {String} [server] JSON-RPC endpoint hostname * @param {String} [port] JSON-RPC endpoint port */ init: function() { this._super(); this.server = null; this.debug = ($.deparam($.param.querystring()).debug != undefined); // TODO: session store in cookie should be optional this.name = openerp._session_id; this.qweb_mutex = new $.Mutex(); }, bind: function(origin) { var window_origin = location.protocol+"//"+location.host; this.origin = origin ? _.str.rtrim(origin,'/') : window_origin; this.prefix = this.origin; this.server = this.origin; // keep chs happy openerp.web.qweb.default_dict['_s'] = this.origin; this.rpc_function = (this.origin == window_origin) ? this.rpc_json : this.rpc_jsonp; this.session_id = false; this.uid = false; this.username = false; this.user_context= {}; this.db = false; this.openerp_entreprise = false; this.module_list = []; this.module_loaded = {"web": true}; this.context = {}; this.shortcuts = []; this.active_id = null; return this.session_init(); }, /** * Executes an RPC call, registering the provided callbacks. * * Registers a default error callback if none is provided, and handles * setting the correct session id and session context in the parameter * objects * * @param {String} url RPC endpoint * @param {Object} params call parameters * @param {Function} success_callback function to execute on RPC call success * @param {Function} error_callback function to execute on RPC call failure * @returns {jQuery.Deferred} jquery-provided ajax deferred */ rpc: function(url, params, success_callback, error_callback) { var self = this; // url can be an $.ajax option object if (_.isString(url)) { url = { url: url }; } // Construct a JSON-RPC2 request, method is currently unused params.session_id = this.session_id; if (this.debug) params.debug = 1; var payload = { jsonrpc: '2.0', method: 'call', params: params, id: _.uniqueId('r') }; var deferred = $.Deferred(); this.on_rpc_request(); this.rpc_function(url, payload).then( function (response, textStatus, jqXHR) { self.on_rpc_response(); if (!response.error) { deferred.resolve(response["result"], textStatus, jqXHR); } else if (response.error.data.type === "session_invalid") { self.uid = false; // TODO deprecate or use a deferred on login.do_ask_login() self.on_session_invalid(function() { self.rpc(url, payload.params, function() { deferred.resolve.apply(deferred, arguments); }, function() { deferred.reject.apply(deferred, arguments); }); }); } else { deferred.reject(response.error, $.Event()); } }, function(jqXHR, textStatus, errorThrown) { self.on_rpc_response(); var error = { code: -32098, message: "XmlHttpRequestError " + errorThrown, data: {type: "xhr"+textStatus, debug: jqXHR.responseText, objects: [jqXHR, errorThrown] } }; deferred.reject(error, $.Event()); }); // Allow deferred user to disable on_rpc_error in fail deferred.fail(function() { deferred.fail(function(error, event) { if (!event.isDefaultPrevented()) { self.on_rpc_error(error, event); } }); }).then(success_callback, error_callback).promise(); return deferred; }, /** * Raw JSON-RPC call * * @returns {jQuery.Deferred} ajax-webd deferred object */ rpc_json: function(url, payload) { var self = this; var ajax = _.extend({ type: "POST", dataType: 'json', contentType: 'application/json', data: JSON.stringify(payload), processData: false }, url); if (this.synch) ajax.async = false; return $.ajax(ajax); }, rpc_jsonp: function(url, payload) { var self = this; // extracted from payload to set on the url var data = { session_id: this.session_id, id: payload.id }; url.url = this.get_url(url.url); var ajax = _.extend({ type: "GET", dataType: 'jsonp', jsonp: 'jsonp', cache: false, data: data }, url); if (this.synch) ajax.async = false; var payload_str = JSON.stringify(payload); var payload_url = $.param({r:payload_str}); if(payload_url.length < 2000) { // Direct jsonp request ajax.data.r = payload_str; return $.ajax(ajax); } else { // Indirect jsonp request var ifid = _.uniqueId('oe_rpc_iframe'); var display = options.openerp.debug ? 'block' : 'none'; var $iframe = $(_.str.sprintf("<iframe src='javascript:false;' name='%s' id='%s' style='display:%s'></iframe>", ifid, ifid, display)); var $form = $('<form>') .attr('method', 'POST') .attr('target', ifid) .attr('enctype', "multipart/form-data") .attr('action', ajax.url + '?' + $.param(data)) .append($('<input type="hidden" name="r" />').attr('value', payload_str)) .hide() .appendTo($('body')); var cleanUp = function() { if ($iframe) { $iframe.unbind("load").attr("src", "javascript:false;").remove(); } $form.remove(); }; var deferred = $.Deferred(); // the first bind is fired up when the iframe is added to the DOM $iframe.bind('load', function() { // the second bind is fired up when the result of the form submission is received $iframe.unbind('load').bind('load', function() { $.ajax(ajax).always(function() { cleanUp(); }).then( function() { deferred.resolve.apply(deferred, arguments); }, function() { deferred.reject.apply(deferred, arguments); } ); }); // now that the iframe can receive data, we fill and submit the form $form.submit(); }); // append the iframe to the DOM (will trigger the first load) $form.after($iframe); return deferred; } }, on_rpc_request: function() { }, on_rpc_response: function() { }, on_rpc_error: function(error) { }, /** * Init a session, reloads from cookie, if it exists */ session_init: function () { var self = this; // TODO: session store in cookie should be optional this.session_id = this.get_cookie('session_id'); return this.session_reload().pipe(function(result) { var modules = openerp._modules.join(','); var deferred = self.rpc('/web/webclient/qweblist', {mods: modules}).pipe(self.do_load_qweb); if(self.session_is_valid()) { return deferred.pipe(function() { return self.load_modules(); }); } return deferred; }); }, /** * (re)loads the content of a session: db name, username, user id, session * context and status of the support contract * * @returns {$.Deferred} deferred indicating the session is done reloading */ session_reload: function () { var self = this; return this.rpc("/web/session/get_session_info", {}).then(function(result) { // If immediately follows a login (triggered by trying to restore // an invalid session or no session at all), refresh session data // (should not change, but just in case...) _.extend(self, { db: result.db, username: result.login, uid: result.uid, user_context: result.context, openerp_entreprise: result.openerp_entreprise }); }); }, session_is_valid: function() { return !!this.uid; }, /** * The session is validated either by login or by restoration of a previous session */ session_authenticate: function(db, login, password, _volatile) { var self = this; var base_location = document.location.protocol + '//' + document.location.host; var params = { db: db, login: login, password: password, base_location: base_location }; return this.rpc("/web/session/authenticate", params).pipe(function(result) { _.extend(self, { session_id: result.session_id, db: result.db, username: result.login, uid: result.uid, user_context: result.context, openerp_entreprise: result.openerp_entreprise }); if (!_volatile) { self.set_cookie('session_id', self.session_id); } return self.load_modules(); }); }, session_logout: function() { this.set_cookie('session_id', ''); }, on_session_valid: function() { }, /** * Called when a rpc call fail due to an invalid session. * By default, it's a noop */ on_session_invalid: function(retry_callback) { }, /** * Fetches a cookie stored by an openerp session * * @private * @param name the cookie's name */ get_cookie: function (name) { if (!this.name) { return null; } var nameEQ = this.name + '|' + name + '='; var cookies = document.cookie.split(';'); for(var i=0; i<cookies.length; ++i) { var cookie = cookies[i].replace(/^\s*/, ''); if(cookie.indexOf(nameEQ) === 0) { return JSON.parse(decodeURIComponent(cookie.substring(nameEQ.length))); } } return null; }, /** * Create a new cookie with the provided name and value * * @private * @param name the cookie's name * @param value the cookie's value * @param ttl the cookie's time to live, 1 year by default, set to -1 to delete */ set_cookie: function (name, value, ttl) { if (!this.name) { return; } ttl = ttl || 24*60*60*365; document.cookie = [ this.name + '|' + name + '=' + encodeURIComponent(JSON.stringify(value)), 'path=/', 'max-age=' + ttl, 'expires=' + new Date(new Date().getTime() + ttl*1000).toGMTString() ].join(';'); }, /** * Load additional web addons of that instance and init them * * @param {Boolean} [no_session_valid_signal=false] prevents load_module from triggering ``on_session_valid``. */ load_modules: function(no_session_valid_signal) { var self = this; return this.rpc('/web/session/modules', {}).pipe(function(result) { var lang = self.user_context.lang; var params = { mods: ["web"].concat(result), lang: lang}; var to_load = _.difference(result, self.module_list).join(','); self.module_list = result; return $.when( self.rpc('/web/webclient/csslist', {mods: to_load}, self.do_load_css), self.rpc('/web/webclient/qweblist', {mods: to_load}).pipe(self.do_load_qweb), self.rpc('/web/webclient/translations', params).pipe(function(trans) { openerp.web._t.database.set_bundle(trans); var file_list = ["/web/static/lib/datejs/globalization/" + lang.replace("_", "-") + ".js"]; return self.rpc('/web/webclient/jslist', {mods: to_load}).pipe(function(files) { return self.do_load_js(file_list.concat(files)); }); }) ).then(function() { self.on_modules_loaded(); if (!no_session_valid_signal) { self.on_session_valid(); } }); }); }, do_load_css: function (files) { var self = this; _.each(files, function (file) { $('head').append($('<link>', { 'href': self.get_url(file), 'rel': 'stylesheet', 'type': 'text/css' })); }); }, do_load_js: function(files) { var self = this; var d = $.Deferred(); if(files.length != 0) { var file = files.shift(); var tag = document.createElement('script'); tag.type = 'text/javascript'; tag.src = self.get_url(file); tag.onload = tag.onreadystatechange = function() { if ( (tag.readyState && tag.readyState != "loaded" && tag.readyState != "complete") || tag.onload_done ) return; tag.onload_done = true; self.do_load_js(files).then(function () { d.resolve(); }); }; var head = document.head || document.getElementsByTagName('head')[0]; head.appendChild(tag); } else { d.resolve(); } return d; }, do_load_qweb: function(files) { var self = this; _.each(files, function(file) { self.qweb_mutex.exec(function() { return self.rpc('/web/proxy/load', {path: file}).pipe(function(xml) { if (!xml) { return; } openerp.web.qweb.add_template(_.str.trim(xml)); }); }); }); return self.qweb_mutex.def; }, on_modules_loaded: function() { for(var j=0; j<this.module_list.length; j++) { var mod = this.module_list[j]; if(this.module_loaded[mod]) continue; openerp[mod] = {}; // init module mod if(openerp._openerp[mod] != undefined) { openerp._openerp[mod](openerp); this.module_loaded[mod] = true; } } }, get_url: function (file) { return this.prefix + file; }, /** * Cooperative file download implementation, for ajaxy APIs. * * Requires that the server side implements an httprequest correctly * setting the `fileToken` cookie to the value provided as the `token` * parameter. The cookie *must* be set on the `/` path and *must not* be * `httpOnly`. * * It would probably also be a good idea for the response to use a * `Content-Disposition: attachment` header, especially if the MIME is a * "known" type (e.g. text/plain, or for some browsers application/json * * @param {Object} options * @param {String} [options.url] used to dynamically create a form * @param {Object} [options.data] data to add to the form submission. If can be used without a form, in which case a form is created from scratch. Otherwise, added to form data * @param {HTMLFormElement} [options.form] the form to submit in order to fetch the file * @param {Function} [options.success] callback in case of download success * @param {Function} [options.error] callback in case of request error, provided with the error body * @param {Function} [options.complete] called after both ``success`` and ``error` callbacks have executed */ get_file: function (options) { // need to detect when the file is done downloading (not used // yet, but we'll need it to fix the UI e.g. with a throbber // while dump is being generated), iframe load event only fires // when the iframe content loads, so we need to go smarter: // http://geekswithblogs.net/GruffCode/archive/2010/10/28/detecting-the-file-download-dialog-in-the-browser.aspx var timer, token = new Date().getTime(), cookie_name = 'fileToken', cookie_length = cookie_name.length, CHECK_INTERVAL = 1000, id = _.uniqueId('get_file_frame'), remove_form = false; var $form, $form_data = $('<div>'); var complete = function () { if (options.complete) { options.complete(); } clearTimeout(timer); $form_data.remove(); $target.remove(); if (remove_form && $form) { $form.remove(); } }; var $target = $('<iframe style="display: none;">') .attr({id: id, name: id}) .appendTo(document.body) .load(function () { try { if (options.error) { options.error(JSON.parse( this.contentDocument.body.childNodes[1].textContent )); } } finally { complete(); } }); if (options.form) { $form = $(options.form); } else { remove_form = true; $form = $('<form>', { action: options.url, method: 'POST' }).appendTo(document.body); } _(_.extend({}, options.data || {}, {session_id: this.session_id, token: token})) .each(function (value, key) { $('<input type="hidden" name="' + key + '">') .val(value) .appendTo($form_data); }); $form .append($form_data) .attr('target', id) .get(0).submit(); var waitLoop = function () { var cookies = document.cookie.split(';'); // setup next check timer = setTimeout(waitLoop, CHECK_INTERVAL); for (var i=0; i<cookies.length; ++i) { var cookie = cookies[i].replace(/^\s*/, ''); if (!cookie.indexOf(cookie_name === 0)) { continue; } var cookie_val = cookie.substring(cookie_length + 1); if (parseInt(cookie_val, 10) !== token) { continue; } // clear cookie document.cookie = _.str.sprintf("%s=;expires=%s;path=/", cookie_name, new Date().toGMTString()); if (options.success) { options.success(); } complete(); return; } }; timer = setTimeout(waitLoop, CHECK_INTERVAL); }, synchronized_mode: function(to_execute) { var synch = this.synch; this.synch = true; try { return to_execute(); } finally { this.synch = synch; } } }); /** * Base class for all visual components. Provides a lot of functionalities helpful * for the management of a part of the DOM. * * Widget handles: * - Rendering with QWeb. * - Life-cycle management and parenting (when a parent is destroyed, all its children are * destroyed too). * - Insertion in DOM. * * Guide to create implementations of the Widget class: * ============================================== * * Here is a sample child class: * * MyWidget = openerp.base.Widget.extend({ * // the name of the QWeb template to use for rendering * template: "MyQWebTemplate", * * init: function(parent) { * this._super(parent); * // stuff that you want to init before the rendering * }, * start: function() { * // stuff you want to make after the rendering, `this.$element` holds a correct value * this.$element.find(".my_button").click(/* an example of event binding * /); * * // if you have some asynchronous operations, it's a good idea to return * // a promise in start() * var promise = this.rpc(...); * return promise; * } * }); * * Now this class can simply be used with the following syntax: * * var my_widget = new MyWidget(this); * my_widget.appendTo($(".some-div")); * * With these two lines, the MyWidget instance was inited, rendered, it was inserted into the * DOM inside the ".some-div" div and its events were binded. * * And of course, when you don't need that widget anymore, just do: * * my_widget.stop(); * * That will kill the widget in a clean way and erase its content from the dom. */ openerp.web.Widget = openerp.web.CallbackEnabled.extend(/** @lends openerp.web.Widget# */{ /** * The name of the QWeb template that will be used for rendering. Must be * redefined in subclasses or the default render() method can not be used. * * @type string */ template: null, /** * Tag name when creating a default $element. * @type string */ tag_name: 'div', /** * Constructs the widget and sets its parent if a parent is given. * * @constructs openerp.web.Widget * @extends openerp.web.CallbackEnabled * * @param {openerp.web.Widget} parent Binds the current instance to the given Widget instance. * When that widget is destroyed by calling stop(), the current instance will be * destroyed too. Can be null. * @param {String} element_id Deprecated. Sets the element_id. Only useful when you want * to bind the current Widget to an already existing part of the DOM, which is not compatible * with the DOM insertion methods provided by the current implementation of Widget. So * for new components this argument should not be provided any more. */ init: function(parent) { this._super(); this.session = openerp.connection; this.widget_parent = parent; this.widget_children = []; if(parent && parent.widget_children) { parent.widget_children.push(this); } // useful to know if the widget was destroyed and should not be used anymore this.widget_is_stopped = false; }, /** * Renders the current widget and appends it to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ appendTo: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.appendTo(t); }, target); }, /** * Renders the current widget and prepends it to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ prependTo: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.prependTo(t); }, target); }, /** * Renders the current widget and inserts it after to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ insertAfter: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.insertAfter(t); }, target); }, /** * Renders the current widget and inserts it before to the given jQuery object or Widget. * * @param target A jQuery object or a Widget instance. */ insertBefore: function(target) { var self = this; return this._render_and_insert(function(t) { self.$element.insertBefore(t); }, target); }, /** * Renders the current widget and replaces the given jQuery object. * * @param target A jQuery object or a Widget instance. */ replace: function(target) { return this._render_and_insert(_.bind(function(t) { this.$element.replaceAll(t); }, this), target); }, _render_and_insert: function(insertion, target) { this.render_element(); if (target instanceof openerp.web.Widget) target = target.$element; insertion(target); this.on_inserted(this.$element, this); return this.start(); }, on_inserted: function(element, widget) {}, /** * Renders the element and insert the result of the render() method in this.$element. */ render_element: function() { var rendered = this.render(); if (rendered) { var elem = $(rendered); this.$element.replaceWith(elem); this.$element = elem; } return this; }, /** * Renders the widget using QWeb, `this.template` must be defined. * The context given to QWeb contains the "widget" key that references `this`. * * @param {Object} additional Additional context arguments to pass to the template. */ render: function (additional) { if (this.template) return openerp.web.qweb.render(this.template, _.extend({widget: this}, additional || {})); return null; }, /** * Method called after rendering. Mostly used to bind actions, perform asynchronous * calls, etc... * * By convention, the method should return a promise to inform the caller when * this widget has been initialized. * * @returns {jQuery.Deferred} */ start: function() { return $.Deferred().done().promise(); }, /** * Destroys the current widget, also destroys all its children before destroying itself. */ stop: function() { _.each(_.clone(this.widget_children), function(el) { el.stop(); }); if(this.$element != null) { this.$element.remove(); } if (this.widget_parent && this.widget_parent.widget_children) { this.widget_parent.widget_children = _.without(this.widget_parent.widget_children, this); } this.widget_parent = null; this.widget_is_stopped = true; }, /** * Informs the action manager to do an action. This supposes that * the action manager can be found amongst the ancestors of the current widget. * If that's not the case this method will simply return `false`. */ do_action: function(action, on_finished) { if (this.widget_parent) { return this.widget_parent.do_action(action, on_finished); } return false; }, do_notify: function() { if (this.widget_parent) { return this.widget_parent.do_notify.apply(this,arguments); } return false; }, do_warn: function() { if (this.widget_parent) { return this.widget_parent.do_warn.apply(this,arguments); } return false; }, rpc: function(url, data, success, error) { var def = $.Deferred().then(success, error); var self = this; openerp.connection.rpc(url, data). then(function() { if (!self.widget_is_stopped) def.resolve.apply(def, arguments); }, function() { if (!self.widget_is_stopped) def.reject.apply(def, arguments); }); return def.promise(); } }); openerp.web.OldWidget = openerp.web.Widget.extend({ init: function(parent, /** @deprecated */ element_id) { this._super(parent); this.element_id = element_id; this.element_id = this.element_id || _.uniqueId('widget-'); var tmp = document.getElementById(this.element_id); this.$element = tmp ? $(tmp) : $(document.createElement(this.tag_name)); }, }); openerp.web.TranslationDataBase = openerp.web.Class.extend(/** @lends openerp.web.TranslationDataBase# */{ /** * @constructs openerp.web.TranslationDataBase * @extends openerp.web.Class */ init: function() { this.db = {}; this.parameters = {"direction": 'ltr', "date_format": '%m/%d/%Y', "time_format": '%H:%M:%S', "grouping": [], "decimal_point": ".", "thousands_sep": ","}; }, set_bundle: function(translation_bundle) { var self = this; this.db = {}; var modules = _.keys(translation_bundle.modules); modules.sort(); if (_.include(modules, "web")) { modules = ["web"].concat(_.without(modules, "web")); } _.each(modules, function(name) { self.add_module_translation(translation_bundle.modules[name]); }); if (translation_bundle.lang_parameters) { this.parameters = translation_bundle.lang_parameters; this.parameters.grouping = py.eval( this.parameters.grouping).toJSON(); } }, add_module_translation: function(mod) { var self = this; _.each(mod.messages, function(message) { if (self.db[message.id] === undefined) { self.db[message.id] = message.string; } }); }, build_translation_function: function() { var self = this; var fcnt = function(str) { var tmp = self.get(str); return tmp === undefined ? str : tmp; }; fcnt.database = this; return fcnt; }, get: function(key) { if (this.db[key]) return this.db[key]; return undefined; } }); /** Configure blockui */ if ($.blockUI) { $.blockUI.defaults.baseZ = 1100; $.blockUI.defaults.message = '<img src="/web/static/src/img/throbber2.gif">'; } /** Configure default qweb */ openerp.web._t = new openerp.web.TranslationDataBase().build_translation_function(); /** * Lazy translation function, only performs the translation when actually * printed (e.g. inserted into a template) * * Useful when defining translatable strings in code evaluated before the * translation database is loaded, as class attributes or at the top-level of * an OpenERP Web module * * @param {String} s string to translate * @returns {Object} lazy translation object */ openerp.web._lt = function (s) { return {toString: function () { return openerp.web._t(s); }} }; openerp.web.qweb = new QWeb2.Engine(); openerp.web.qweb.debug = (window.location.search.indexOf('?debug') !== -1); openerp.web.qweb.default_dict = { '_' : _, '_t' : openerp.web._t }; openerp.web.qweb.format_text_node = function(s) { // Note that 'this' is the Qweb Node of the text var translation = this.node.parentNode.attributes['t-translation']; if (translation && translation.value === 'off') { return s; } var ts = _.str.trim(s); if (ts.length === 0) { return s; } var tr = openerp.web._t(ts); return tr === ts ? s : tr; } /** Jquery extentions */ $.Mutex = (function() { function Mutex() { this.def = $.Deferred().resolve(); }; Mutex.prototype.exec = function(action) { var current = this.def; var next = this.def = $.Deferred(); return current.pipe(function() { return $.when(action()).always(function() { next.resolve(); }); }); }; return Mutex; })(); /** Setup default connection */ openerp.connection = new openerp.web.Connection(); openerp.web.qweb.default_dict['__debug__'] = openerp.connection.debug; $.async_when = function() { var async = false; var def = $.Deferred(); $.when.apply($, arguments).then(function() { var args = arguments; var action = function() { def.resolve.apply(def, args); }; if (async) action(); else setTimeout(action, 0); }, function() { var args = arguments; var action = function() { def.reject.apply(def, args); }; if (async) action(); else setTimeout(action, 0); }); async = true; return def; }; // special tweak for the web client var old_async_when = $.async_when; $.async_when = function() { if (openerp.connection.synch) return $.when.apply(this, arguments); else return old_async_when.apply(this, arguments); }; }; // vim:et fdc=0 fdl=0 foldnestmax=3 fdm=syntax:
[imp] added documentation
addons/web/static/src/js/core.js
[imp] added documentation
<ide><path>ddons/web/static/src/js/core.js <ide> } <ide> }); <ide> <add>/** <add> * Deprecated. Do not use any more. <add> */ <ide> openerp.web.OldWidget = openerp.web.Widget.extend({ <del> init: function(parent, /** @deprecated */ element_id) { <add> init: function(parent, element_id) { <ide> this._super(parent); <ide> this.element_id = element_id; <ide> this.element_id = this.element_id || _.uniqueId('widget-');
Java
mit
9bb43be4b2fa5695b54b5d90797ca86334db89e5
0
Leviter/datafactory
package factory; import data.Addresses; import data.impl.DEAddressesImpl; import data.impl.NLAddressesImpl; import lombok.Getter; import java.util.Locale; public class AddressesFactory { @Getter private Addresses addresses; public AddressesFactory(Locale locale) { if (locale != null) { // Use the country based on the ISO 3166-2 code switch (locale.getCountry()) { case "DE": addresses = new DEAddressesImpl(); break; case "NL": addresses = new NLAddressesImpl(); break; default: addresses = new NLAddressesImpl(); break; } } else { addresses = new NLAddressesImpl(); } } }
src/main/java/factory/AddressesFactory.java
package factory; import data.Addresses; import data.impl.NLAddressesImpl; import lombok.Getter; import java.util.Locale; public class AddressesFactory { @Getter private Addresses addresses; public AddressesFactory(Locale locale) { if (locale != null) { // Use the country based on the ISO 3166-2 code switch (locale.getCountry()) { case "NL": addresses = new NLAddressesImpl(); break; default: addresses = new NLAddressesImpl(); break; } } else { addresses = new NLAddressesImpl(); } } }
Added German to the address factory
src/main/java/factory/AddressesFactory.java
Added German to the address factory
<ide><path>rc/main/java/factory/AddressesFactory.java <ide> package factory; <ide> <ide> import data.Addresses; <add>import data.impl.DEAddressesImpl; <ide> import data.impl.NLAddressesImpl; <ide> import lombok.Getter; <ide> <ide> if (locale != null) { <ide> // Use the country based on the ISO 3166-2 code <ide> switch (locale.getCountry()) { <add> case "DE": <add> addresses = new DEAddressesImpl(); <add> break; <ide> case "NL": <ide> addresses = new NLAddressesImpl(); <ide> break;
Java
apache-2.0
1acb9a795ac2c89ee79bdd27bf8960b19e232e0c
0
matteobertozzi/hbase,francisliu/hbase_namespace,matteobertozzi/hbase,francisliu/hbase_namespace,francisliu/hbase_namespace,francisliu/hbase_namespace,jyates/hbase,matteobertozzi/hbase,francisliu/hbase_namespace,jyates/hbase,francisliu/hbase_namespace,matteobertozzi/hbase,matteobertozzi/hbase,jyates/hbase,matteobertozzi/hbase,jyates/hbase,jyates/hbase,jyates/hbase,jyates/hbase,matteobertozzi/hbase,jyates/hbase,francisliu/hbase_namespace,matteobertozzi/hbase,matteobertozzi/hbase,francisliu/hbase_namespace,jyates/hbase,francisliu/hbase_namespace
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; /** * Used to communicate with a single HBase table. * * @since 0.21.0 */ @InterfaceAudience.Public @InterfaceStability.Stable public interface HTableInterface extends Closeable { /** * Gets the name of this table. * * @return the table name. */ byte[] getTableName(); /** * Returns the {@link Configuration} object used by this instance. * <p> * The reference returned is not a copy, so any change made to it will * affect this instance. */ Configuration getConfiguration(); /** * Gets the {@link HTableDescriptor table descriptor} for this table. * @throws IOException if a remote or network exception occurs. */ HTableDescriptor getTableDescriptor() throws IOException; /** * Test for the existence of columns in the table, as specified in the Get. * <p> * * This will return true if the Get matches one or more keys, false if not. * <p> * * This is a server-side call so it prevents any data from being transfered to * the client. * * @param get the Get * @return true if the specified Get matches one or more keys, false if not * @throws IOException e */ boolean exists(Get get) throws IOException; /** * Method that does a batch call on Deletes, Gets and Puts. The ordering of * execution of the actions is not defined. Meaning if you do a Put and a * Get in the same {@link #batch} call, you will not necessarily be * guaranteed that the Get returns what the Put had put. * * @param actions list of Get, Put, Delete objects * @param results Empty Object[], same size as actions. Provides access to partial * results, in case an exception is thrown. A null in the result array means that * the call for that action failed, even after retries * @throws IOException * @since 0.90.0 */ void batch(final List<?extends Row> actions, final Object[] results) throws IOException, InterruptedException; /** * Same as {@link #batch(List, Object[])}, but returns an array of * results instead of using a results parameter reference. * * @param actions list of Get, Put, Delete objects * @return the results from the actions. A null in the return array means that * the call for that action failed, even after retries * @throws IOException * @since 0.90.0 */ Object[] batch(final List<? extends Row> actions) throws IOException, InterruptedException; /** * Same as {@link #batch(List, Object[])}, but with a callback. * @since 0.96.0 */ public <R> void batchCallback( final List<? extends Row> actions, final Object[] results, final Batch.Callback<R> callback) throws IOException, InterruptedException; /** * Same as {@link #batch(List)}, but with a callback. * @since 0.96.0 */ public <R> Object[] batchCallback( List<? extends Row> actions, Batch.Callback<R> callback) throws IOException, InterruptedException; /** * Extracts certain cells from a given row. * @param get The object that specifies what data to fetch and from which row. * @return The data coming from the specified row, if it exists. If the row * specified doesn't exist, the {@link Result} instance returned won't * contain any {@link KeyValue}, as indicated by {@link Result#isEmpty()}. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ Result get(Get get) throws IOException; /** * Extracts certain cells from the given rows, in batch. * * @param gets The objects that specify what data to fetch and from which rows. * * @return The data coming from the specified rows, if it exists. If the row * specified doesn't exist, the {@link Result} instance returned won't * contain any {@link KeyValue}, as indicated by {@link Result#isEmpty()}. * If there are any failures even after retries, there will be a null in * the results array for those Gets, AND an exception will be thrown. * @throws IOException if a remote or network exception occurs. * * @since 0.90.0 */ Result[] get(List<Get> gets) throws IOException; /** * Return the row that matches <i>row</i> exactly, * or the one that immediately precedes it. * * @param row A row key. * @param family Column family to include in the {@link Result}. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 * * @deprecated As of version 0.92 this method is deprecated without * replacement. * getRowOrBefore is used internally to find entries in .META. and makes * various assumptions about the table (which are true for .META. but not * in general) to be efficient. */ Result getRowOrBefore(byte[] row, byte[] family) throws IOException; /** * Returns a scanner on the current table as specified by the {@link Scan} * object. * Note that the passed {@link Scan}'s start row and caching properties * maybe changed. * * @param scan A configured {@link Scan} object. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(Scan scan) throws IOException; /** * Gets a scanner on the current table for the given family. * * @param family The column family to scan. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(byte[] family) throws IOException; /** * Gets a scanner on the current table for the given family and qualifier. * * @param family The column family to scan. * @param qualifier The column qualifier to scan. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException; /** * Puts some data in the table. * <p> * If {@link #isAutoFlush isAutoFlush} is false, the update is buffered * until the internal buffer is full. * @param put The data to put. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void put(Put put) throws IOException; /** * Puts some data in the table, in batch. * <p> * If {@link #isAutoFlush isAutoFlush} is false, the update is buffered * until the internal buffer is full. * <p> * This can be used for group commit, or for submitting user defined * batches. The writeBuffer will be periodically inspected while the List * is processed, so depending on the List size the writeBuffer may flush * not at all, or more than once. * @param puts The list of mutations to apply. The batch put is done by * aggregating the iteration of the Puts over the write buffer * at the client-side for a single RPC call. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void put(List<Put> puts) throws IOException; /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the put. If the passed value is null, the check * is for the lack of column (ie: non-existance) * * @param row to check * @param family column family to check * @param qualifier column qualifier to check * @param value the expected value * @param put data to put if check succeeds * @throws IOException e * @return true if the new put was executed, false otherwise */ boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put) throws IOException; /** * Deletes the specified cells/row. * * @param delete The object that specifies what to delete. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void delete(Delete delete) throws IOException; /** * Deletes the specified cells/rows in bulk. * @param deletes List of things to delete. List gets modified by this * method (in particular it gets re-ordered, so the order in which the elements * are inserted in the list gives no guarantee as to the order in which the * {@link Delete}s are executed). * @throws IOException if a remote or network exception occurs. In that case * the {@code deletes} argument will contain the {@link Delete} instances * that have not be successfully applied. * @since 0.20.1 */ void delete(List<Delete> deletes) throws IOException; /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the delete. If the passed value is null, the * check is for the lack of column (ie: non-existance) * * @param row to check * @param family column family to check * @param qualifier column qualifier to check * @param value the expected value * @param delete data to delete if check succeeds * @throws IOException e * @return true if the new delete was executed, false otherwise */ boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException; /** * Performs multiple mutations atomically on a single row. Currently * {@link Put} and {@link Delete} are supported. * * @param arm object that specifies the set of mutations to perform * atomically * @throws IOException */ public void mutateRow(final RowMutations rm) throws IOException; /** * Appends values to one or more columns within a single row. * <p> * This operation does not appear atomic to readers. Appends are done * under a single row lock, so write operations to a row are synchronized, but * readers do not take row locks so get and scan operations can see this * operation partially completed. * * @param append object that specifies the columns and amounts to be used * for the increment operations * @throws IOException e * @return values of columns after the append operation (maybe null) */ public Result append(final Append append) throws IOException; /** * Increments one or more columns within a single row. * <p> * This operation does not appear atomic to readers. Increments are done * under a single row lock, so write operations to a row are synchronized, but * readers do not take row locks so get and scan operations can see this * operation partially completed. * * @param increment object that specifies the columns and amounts to be used * for the increment operations * @throws IOException e * @return values of columns after the increment */ public Result increment(final Increment increment) throws IOException; /** * Atomically increments a column value. * <p> * Equivalent to {@link #incrementColumnValue(byte[], byte[], byte[], * long, boolean) incrementColumnValue}(row, family, qualifier, amount, * <b>true</b>)} * @param row The row that contains the cell to increment. * @param family The column family of the cell to increment. * @param qualifier The column qualifier of the cell to increment. * @param amount The amount to increment the cell with (or decrement, if the * amount is negative). * @return The new value, post increment. * @throws IOException if a remote or network exception occurs. */ long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException; /** * Atomically increments a column value. If the column value already exists * and is not a big-endian long, this could throw an exception. If the column * value does not yet exist it is initialized to <code>amount</code> and * written to the specified column. * * <p>Setting writeToWAL to false means that in a fail scenario, you will lose * any increments that have not been flushed. * @param row The row that contains the cell to increment. * @param family The column family of the cell to increment. * @param qualifier The column qualifier of the cell to increment. * @param amount The amount to increment the cell with (or decrement, if the * amount is negative). * @param writeToWAL if {@code true}, the operation will be applied to the * Write Ahead Log (WAL). This makes the operation slower but safer, as if * the call returns successfully, it is guaranteed that the increment will * be safely persisted. When set to {@code false}, the call may return * successfully before the increment is safely persisted, so it's possible * that the increment be lost in the event of a failure happening before the * operation gets persisted. * @return The new value, post increment. * @throws IOException if a remote or network exception occurs. */ long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException; /** * Tells whether or not 'auto-flush' is turned on. * * @return {@code true} if 'auto-flush' is enabled (default), meaning * {@link Put} operations don't get buffered/delayed and are immediately * executed. */ boolean isAutoFlush(); /** * Executes all the buffered {@link Put} operations. * <p> * This method gets called once automatically for every {@link Put} or batch * of {@link Put}s (when <code>put(List<Put>)</code> is used) when * {@link #isAutoFlush} is {@code true}. * @throws IOException if a remote or network exception occurs. */ void flushCommits() throws IOException; /** * Releases any resources held or pending changes in internal buffers. * * @throws IOException if a remote or network exception occurs. */ void close() throws IOException; /** * Obtains a lock on a row. * * @param row The row to lock. * @return A {@link RowLock} containing the row and lock id. * @throws IOException if a remote or network exception occurs. * @see RowLock * @see #unlockRow */ RowLock lockRow(byte[] row) throws IOException; /** * Releases a row lock. * * @param rl The row lock to release. * @throws IOException if a remote or network exception occurs. * @see RowLock * @see #unlockRow */ void unlockRow(RowLock rl) throws IOException; /** * Creates and returns a proxy to the CoprocessorProtocol instance running in the * region containing the specified row. The row given does not actually have * to exist. Whichever region would contain the row based on start and end keys will * be used. Note that the {@code row} parameter is also not passed to the * coprocessor handler registered for this protocol, unless the {@code row} * is separately passed as an argument in a proxy method call. The parameter * here is just used to locate the region used to handle the call. * * @param protocol The class or interface defining the remote protocol * @param row The row key used to identify the remote region location * @return A CoprocessorProtocol instance * @deprecated since 0.96. Use {@link HTableInterface#coprocessorService(byte[])} instead. */ @Deprecated <T extends CoprocessorProtocol> T coprocessorProxy(Class<T> protocol, byte[] row); /** * Invoke the passed * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against * the {@link CoprocessorProtocol} instances running in the selected regions. * All regions beginning with the region containing the <code>startKey</code> * row, through to the region containing the <code>endKey</code> row (inclusive) * will be used. If <code>startKey</code> or <code>endKey</code> is * <code>null</code>, the first and last regions in the table, respectively, * will be used in the range selection. * * @param protocol the CoprocessorProtocol implementation to call * @param startKey start region selection with region containing this row * @param endKey select regions up to and including the region containing * this row * @param callable wraps the CoprocessorProtocol implementation method calls * made per-region * @param <T> CoprocessorProtocol subclass for the remote invocation * @param <R> Return type for the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * method * @return a <code>Map</code> of region names to * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} return values * * @deprecated since 0.96. Use * {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)} instead. */ @Deprecated <T extends CoprocessorProtocol, R> Map<byte[],R> coprocessorExec( Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T,R> callable) throws IOException, Throwable; /** * Invoke the passed * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against * the {@link CoprocessorProtocol} instances running in the selected regions. * All regions beginning with the region containing the <code>startKey</code> * row, through to the region containing the <code>endKey</code> row * (inclusive) * will be used. If <code>startKey</code> or <code>endKey</code> is * <code>null</code>, the first and last regions in the table, respectively, * will be used in the range selection. * * <p> * For each result, the given * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} * method will be called. *</p> * * @param protocol the CoprocessorProtocol implementation to call * @param startKey start region selection with region containing this row * @param endKey select regions up to and including the region containing * this row * @param callable wraps the CoprocessorProtocol implementation method calls * made per-region * @param callback an instance upon which * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} with the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * return value for each region * @param <T> CoprocessorProtocol subclass for the remote invocation * @param <R> Return type for the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * method * * @deprecated since 0.96. * Use {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} instead. */ @Deprecated <T extends CoprocessorProtocol, R> void coprocessorExec( Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T,R> callable, Batch.Callback<R> callback) throws IOException, Throwable; /** * Creates and returns a {@link com.google.protobuf.RpcChannel} instance connected to the * table region containing the specified row. The row given does not actually have * to exist. Whichever region would contain the row based on start and end keys will * be used. Note that the {@code row} parameter is also not passed to the * coprocessor handler registered for this protocol, unless the {@code row} * is separately passed as an argument in the service request. The parameter * here is only used to locate the region used to handle the call. * * <p> * The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published * coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations: * </p> * * <div style="background-color: #cccccc; padding: 2px"> * <blockquote><pre> * CoprocessorRpcChannel channel = myTable.coprocessorService(rowkey); * MyService.BlockingInterface service = MyService.newBlockingStub(channel); * MyCallRequest request = MyCallRequest.newBuilder() * ... * .build(); * MyCallResponse response = service.myCall(null, request); * </pre></blockquote></div> * * @param row The row key used to identify the remote region location * @return A CoprocessorRpcChannel instance */ CoprocessorRpcChannel coprocessorService(byte[] row); /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} * instance. * * @param service the protocol buffer {@code Service} implementation to call * @param startKey start region selection with region containing this row. If {@code null}, the * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once * per table region, using the {@link Service} instance connected to that region. * @param <T> the {@link Service} subclass to connect to * @param <R> Return type for the {@code callable} parameter's * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method * @return a map of result values keyed by region name */ <T extends Service, R> Map<byte[],R> coprocessorService(final Class<T> service, byte[] startKey, byte[] endKey, final Batch.Call<T,R> callable) throws ServiceException, Throwable; /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} * instance. * * <p> * The given * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} * method will be called with the return value from each region's {@link Batch.Call#call(Object)} * invocation. *</p> * * @param service the protocol buffer {@code Service} implementation to call * @param startKey start region selection with region containing this row. If {@code null}, the * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once * per table region, using the {@link Service} instance connected to that region. * @param callback * @param <T> the {@link Service} subclass to connect to * @param <R> Return type for the {@code callable} parameter's * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method */ <T extends Service, R> void coprocessorService(final Class<T> service, byte[] startKey, byte[] endKey, final Batch.Call<T,R> callable, final Batch.Callback<R> callback) throws ServiceException, Throwable; /** * See {@link #setAutoFlush(boolean, boolean)} * * @param autoFlush * Whether or not to enable 'auto-flush'. */ public void setAutoFlush(boolean autoFlush); /** * Turns 'auto-flush' on or off. * <p> * When enabled (default), {@link Put} operations don't get buffered/delayed * and are immediately executed. Failed operations are not retried. This is * slower but safer. * <p> * Turning off {@link #autoFlush} means that multiple {@link Put}s will be * accepted before any RPC is actually sent to do the write operations. If the * application dies before pending writes get flushed to HBase, data will be * lost. * <p> * When you turn {@link #autoFlush} off, you should also consider the * {@link #clearBufferOnFail} option. By default, asynchronous {@link Put} * requests will be retried on failure until successful. However, this can * pollute the writeBuffer and slow down batching performance. Additionally, * you may want to issue a number of Put requests and call * {@link #flushCommits()} as a barrier. In both use cases, consider setting * clearBufferOnFail to true to erase the buffer after {@link #flushCommits()} * has been called, regardless of success. * * @param autoFlush * Whether or not to enable 'auto-flush'. * @param clearBufferOnFail * Whether to keep Put failures in the writeBuffer * @see #flushCommits */ public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail); /** * Returns the maximum size in bytes of the write buffer for this HTable. * <p> * The default value comes from the configuration parameter * {@code hbase.client.write.buffer}. * @return The size of the write buffer in bytes. */ public long getWriteBufferSize(); /** * Sets the size of the buffer in bytes. * <p> * If the new size is less than the current amount of data in the * write buffer, the buffer gets flushed. * @param writeBufferSize The new write buffer size, in bytes. * @throws IOException if a remote or network exception occurs. */ public void setWriteBufferSize(long writeBufferSize) throws IOException; }
hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; /** * Used to communicate with a single HBase table. * * @since 0.21.0 */ @InterfaceAudience.Public @InterfaceStability.Stable public interface HTableInterface extends Closeable { /** * Gets the name of this table. * * @return the table name. */ byte[] getTableName(); /** * Returns the {@link Configuration} object used by this instance. * <p> * The reference returned is not a copy, so any change made to it will * affect this instance. */ Configuration getConfiguration(); /** * Gets the {@link HTableDescriptor table descriptor} for this table. * @throws IOException if a remote or network exception occurs. */ HTableDescriptor getTableDescriptor() throws IOException; /** * Test for the existence of columns in the table, as specified in the Get. * <p> * * This will return true if the Get matches one or more keys, false if not. * <p> * * This is a server-side call so it prevents any data from being transfered to * the client. * * @param get the Get * @return true if the specified Get matches one or more keys, false if not * @throws IOException e */ boolean exists(Get get) throws IOException; /** * Method that does a batch call on Deletes, Gets and Puts. The ordering of * execution of the actions is not defined. Meaning if you do a Put and a * Get in the same {@link #batch} call, you will not necessarily be * guaranteed that the Get returns what the Put had put. * * @param actions list of Get, Put, Delete objects * @param results Empty Object[], same size as actions. Provides access to partial * results, in case an exception is thrown. A null in the result array means that * the call for that action failed, even after retries * @throws IOException * @since 0.90.0 */ void batch(final List<?extends Row> actions, final Object[] results) throws IOException, InterruptedException; /** * Same as {@link #batch(List, Object[])}, but returns an array of * results instead of using a results parameter reference. * * @param actions list of Get, Put, Delete objects * @return the results from the actions. A null in the return array means that * the call for that action failed, even after retries * @throws IOException * @since 0.90.0 */ Object[] batch(final List<? extends Row> actions) throws IOException, InterruptedException; /** * Same as {@link #batch(List, Object[])}, but with a callback. * @since 0.96.0 */ public <R> void batchCallback( final List<? extends Row> actions, final Object[] results, final Batch.Callback<R> callback) throws IOException, InterruptedException; /** * Same as {@link #batch(List)}, but with a callback. * @since 0.96.0 */ public <R> Object[] batchCallback( List<? extends Row> actions, Batch.Callback<R> callback) throws IOException, InterruptedException; /** * Extracts certain cells from a given row. * @param get The object that specifies what data to fetch and from which row. * @return The data coming from the specified row, if it exists. If the row * specified doesn't exist, the {@link Result} instance returned won't * contain any {@link KeyValue}, as indicated by {@link Result#isEmpty()}. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ Result get(Get get) throws IOException; /** * Extracts certain cells from the given rows, in batch. * * @param gets The objects that specify what data to fetch and from which rows. * * @return The data coming from the specified rows, if it exists. If the row * specified doesn't exist, the {@link Result} instance returned won't * contain any {@link KeyValue}, as indicated by {@link Result#isEmpty()}. * If there are any failures even after retries, there will be a null in * the results array for those Gets, AND an exception will be thrown. * @throws IOException if a remote or network exception occurs. * * @since 0.90.0 */ Result[] get(List<Get> gets) throws IOException; /** * Return the row that matches <i>row</i> exactly, * or the one that immediately precedes it. * * @param row A row key. * @param family Column family to include in the {@link Result}. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 * * @deprecated As of version 0.92 this method is deprecated without * replacement. * getRowOrBefore is used internally to find entries in .META. and makes * various assumptions about the table (which are true for .META. but not * in general) to be efficient. */ Result getRowOrBefore(byte[] row, byte[] family) throws IOException; /** * Returns a scanner on the current table as specified by the {@link Scan} * object. * Note that the passed {@link Scan}'s start row and caching properties * maybe changed. * * @param scan A configured {@link Scan} object. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(Scan scan) throws IOException; /** * Gets a scanner on the current table for the given family. * * @param family The column family to scan. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(byte[] family) throws IOException; /** * Gets a scanner on the current table for the given family and qualifier. * * @param family The column family to scan. * @param qualifier The column qualifier to scan. * @return A scanner. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException; /** * Puts some data in the table. * <p> * If {@link #isAutoFlush isAutoFlush} is false, the update is buffered * until the internal buffer is full. * @param put The data to put. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void put(Put put) throws IOException; /** * Puts some data in the table, in batch. * <p> * If {@link #isAutoFlush isAutoFlush} is false, the update is buffered * until the internal buffer is full. * <p> * This can be used for group commit, or for submitting user defined * batches. The writeBuffer will be periodically inspected while the List * is processed, so depending on the List size the writeBuffer may flush * not at all, or more than once. * @param puts The list of mutations to apply. The batch put is done by * aggregating the iteration of the Puts over the write buffer * at the client-side for a single RPC call. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void put(List<Put> puts) throws IOException; /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the put. If the passed value is null, the check * is for the lack of column (ie: non-existance) * * @param row to check * @param family column family to check * @param qualifier column qualifier to check * @param value the expected value * @param put data to put if check succeeds * @throws IOException e * @return true if the new put was executed, false otherwise */ boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, byte[] value, Put put) throws IOException; /** * Deletes the specified cells/row. * * @param delete The object that specifies what to delete. * @throws IOException if a remote or network exception occurs. * @since 0.20.0 */ void delete(Delete delete) throws IOException; /** * Deletes the specified cells/rows in bulk. * @param deletes List of things to delete. List gets modified by this * method (in particular it gets re-ordered, so the order in which the elements * are inserted in the list gives no guarantee as to the order in which the * {@link Delete}s are executed). * @throws IOException if a remote or network exception occurs. In that case * the {@code deletes} argument will contain the {@link Delete} instances * that have not be successfully applied. * @since 0.20.1 */ void delete(List<Delete> deletes) throws IOException; /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the delete. If the passed value is null, the * check is for the lack of column (ie: non-existance) * * @param row to check * @param family column family to check * @param qualifier column qualifier to check * @param value the expected value * @param delete data to delete if check succeeds * @throws IOException e * @return true if the new delete was executed, false otherwise */ boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException; /** * Performs multiple mutations atomically on a single row. Currently * {@link Put} and {@link Delete} are supported. * * @param arm object that specifies the set of mutations to perform * atomically * @throws IOException */ public void mutateRow(final RowMutations rm) throws IOException; /** * Appends values to one or more columns within a single row. * <p> * This operation does not appear atomic to readers. Appends are done * under a single row lock, so write operations to a row are synchronized, but * readers do not take row locks so get and scan operations can see this * operation partially completed. * * @param append object that specifies the columns and amounts to be used * for the increment operations * @throws IOException e * @return values of columns after the append operation (maybe null) */ public Result append(final Append append) throws IOException; /** * Increments one or more columns within a single row. * <p> * This operation does not appear atomic to readers. Increments are done * under a single row lock, so write operations to a row are synchronized, but * readers do not take row locks so get and scan operations can see this * operation partially completed. * * @param increment object that specifies the columns and amounts to be used * for the increment operations * @throws IOException e * @return values of columns after the increment */ public Result increment(final Increment increment) throws IOException; /** * Atomically increments a column value. * <p> * Equivalent to {@link #incrementColumnValue(byte[], byte[], byte[], * long, boolean) incrementColumnValue}(row, family, qualifier, amount, * <b>true</b>)} * @param row The row that contains the cell to increment. * @param family The column family of the cell to increment. * @param qualifier The column qualifier of the cell to increment. * @param amount The amount to increment the cell with (or decrement, if the * amount is negative). * @return The new value, post increment. * @throws IOException if a remote or network exception occurs. */ long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount) throws IOException; /** * Atomically increments a column value. If the column value already exists * and is not a big-endian long, this could throw an exception. If the column * value does not yet exist it is initialized to <code>amount</code> and * written to the specified column. * * <p>Setting writeToWAL to false means that in a fail scenario, you will lose * any increments that have not been flushed. * @param row The row that contains the cell to increment. * @param family The column family of the cell to increment. * @param qualifier The column qualifier of the cell to increment. * @param amount The amount to increment the cell with (or decrement, if the * amount is negative). * @param writeToWAL if {@code true}, the operation will be applied to the * Write Ahead Log (WAL). This makes the operation slower but safer, as if * the call returns successfully, it is guaranteed that the increment will * be safely persisted. When set to {@code false}, the call may return * successfully before the increment is safely persisted, so it's possible * that the increment be lost in the event of a failure happening before the * operation gets persisted. * @return The new value, post increment. * @throws IOException if a remote or network exception occurs. */ long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier, long amount, boolean writeToWAL) throws IOException; /** * Tells whether or not 'auto-flush' is turned on. * * @return {@code true} if 'auto-flush' is enabled (default), meaning * {@link Put} operations don't get buffered/delayed and are immediately * executed. */ boolean isAutoFlush(); /** * Executes all the buffered {@link Put} operations. * <p> * This method gets called once automatically for every {@link Put} or batch * of {@link Put}s (when <code>put(List<Put>)</code> is used) when * {@link #isAutoFlush} is {@code true}. * @throws IOException if a remote or network exception occurs. */ void flushCommits() throws IOException; /** * Releases any resources help or pending changes in internal buffers. * * @throws IOException if a remote or network exception occurs. */ void close() throws IOException; /** * Obtains a lock on a row. * * @param row The row to lock. * @return A {@link RowLock} containing the row and lock id. * @throws IOException if a remote or network exception occurs. * @see RowLock * @see #unlockRow */ RowLock lockRow(byte[] row) throws IOException; /** * Releases a row lock. * * @param rl The row lock to release. * @throws IOException if a remote or network exception occurs. * @see RowLock * @see #unlockRow */ void unlockRow(RowLock rl) throws IOException; /** * Creates and returns a proxy to the CoprocessorProtocol instance running in the * region containing the specified row. The row given does not actually have * to exist. Whichever region would contain the row based on start and end keys will * be used. Note that the {@code row} parameter is also not passed to the * coprocessor handler registered for this protocol, unless the {@code row} * is separately passed as an argument in a proxy method call. The parameter * here is just used to locate the region used to handle the call. * * @param protocol The class or interface defining the remote protocol * @param row The row key used to identify the remote region location * @return A CoprocessorProtocol instance * @deprecated since 0.96. Use {@link HTableInterface#coprocessorService(byte[])} instead. */ @Deprecated <T extends CoprocessorProtocol> T coprocessorProxy(Class<T> protocol, byte[] row); /** * Invoke the passed * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against * the {@link CoprocessorProtocol} instances running in the selected regions. * All regions beginning with the region containing the <code>startKey</code> * row, through to the region containing the <code>endKey</code> row (inclusive) * will be used. If <code>startKey</code> or <code>endKey</code> is * <code>null</code>, the first and last regions in the table, respectively, * will be used in the range selection. * * @param protocol the CoprocessorProtocol implementation to call * @param startKey start region selection with region containing this row * @param endKey select regions up to and including the region containing * this row * @param callable wraps the CoprocessorProtocol implementation method calls * made per-region * @param <T> CoprocessorProtocol subclass for the remote invocation * @param <R> Return type for the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * method * @return a <code>Map</code> of region names to * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} return values * * @deprecated since 0.96. Use * {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)} instead. */ @Deprecated <T extends CoprocessorProtocol, R> Map<byte[],R> coprocessorExec( Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T,R> callable) throws IOException, Throwable; /** * Invoke the passed * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against * the {@link CoprocessorProtocol} instances running in the selected regions. * All regions beginning with the region containing the <code>startKey</code> * row, through to the region containing the <code>endKey</code> row * (inclusive) * will be used. If <code>startKey</code> or <code>endKey</code> is * <code>null</code>, the first and last regions in the table, respectively, * will be used in the range selection. * * <p> * For each result, the given * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} * method will be called. *</p> * * @param protocol the CoprocessorProtocol implementation to call * @param startKey start region selection with region containing this row * @param endKey select regions up to and including the region containing * this row * @param callable wraps the CoprocessorProtocol implementation method calls * made per-region * @param callback an instance upon which * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} with the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * return value for each region * @param <T> CoprocessorProtocol subclass for the remote invocation * @param <R> Return type for the * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} * method * * @deprecated since 0.96. * Use {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} instead. */ @Deprecated <T extends CoprocessorProtocol, R> void coprocessorExec( Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T,R> callable, Batch.Callback<R> callback) throws IOException, Throwable; /** * Creates and returns a {@link com.google.protobuf.RpcChannel} instance connected to the * table region containing the specified row. The row given does not actually have * to exist. Whichever region would contain the row based on start and end keys will * be used. Note that the {@code row} parameter is also not passed to the * coprocessor handler registered for this protocol, unless the {@code row} * is separately passed as an argument in the service request. The parameter * here is only used to locate the region used to handle the call. * * <p> * The obtained {@link com.google.protobuf.RpcChannel} instance can be used to access a published * coprocessor {@link com.google.protobuf.Service} using standard protobuf service invocations: * </p> * * <div style="background-color: #cccccc; padding: 2px"> * <blockquote><pre> * CoprocessorRpcChannel channel = myTable.coprocessorService(rowkey); * MyService.BlockingInterface service = MyService.newBlockingStub(channel); * MyCallRequest request = MyCallRequest.newBuilder() * ... * .build(); * MyCallResponse response = service.myCall(null, request); * </pre></blockquote></div> * * @param row The row key used to identify the remote region location * @return A CoprocessorRpcChannel instance */ CoprocessorRpcChannel coprocessorService(byte[] row); /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} * instance. * * @param service the protocol buffer {@code Service} implementation to call * @param startKey start region selection with region containing this row. If {@code null}, the * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once * per table region, using the {@link Service} instance connected to that region. * @param <T> the {@link Service} subclass to connect to * @param <R> Return type for the {@code callable} parameter's * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method * @return a map of result values keyed by region name */ <T extends Service, R> Map<byte[],R> coprocessorService(final Class<T> service, byte[] startKey, byte[] endKey, final Batch.Call<T,R> callable) throws ServiceException, Throwable; /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} * instance. * * <p> * The given * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} * method will be called with the return value from each region's {@link Batch.Call#call(Object)} * invocation. *</p> * * @param service the protocol buffer {@code Service} implementation to call * @param startKey start region selection with region containing this row. If {@code null}, the * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once * per table region, using the {@link Service} instance connected to that region. * @param callback * @param <T> the {@link Service} subclass to connect to * @param <R> Return type for the {@code callable} parameter's * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method */ <T extends Service, R> void coprocessorService(final Class<T> service, byte[] startKey, byte[] endKey, final Batch.Call<T,R> callable, final Batch.Callback<R> callback) throws ServiceException, Throwable; /** * See {@link #setAutoFlush(boolean, boolean)} * * @param autoFlush * Whether or not to enable 'auto-flush'. */ public void setAutoFlush(boolean autoFlush); /** * Turns 'auto-flush' on or off. * <p> * When enabled (default), {@link Put} operations don't get buffered/delayed * and are immediately executed. Failed operations are not retried. This is * slower but safer. * <p> * Turning off {@link #autoFlush} means that multiple {@link Put}s will be * accepted before any RPC is actually sent to do the write operations. If the * application dies before pending writes get flushed to HBase, data will be * lost. * <p> * When you turn {@link #autoFlush} off, you should also consider the * {@link #clearBufferOnFail} option. By default, asynchronous {@link Put} * requests will be retried on failure until successful. However, this can * pollute the writeBuffer and slow down batching performance. Additionally, * you may want to issue a number of Put requests and call * {@link #flushCommits()} as a barrier. In both use cases, consider setting * clearBufferOnFail to true to erase the buffer after {@link #flushCommits()} * has been called, regardless of success. * * @param autoFlush * Whether or not to enable 'auto-flush'. * @param clearBufferOnFail * Whether to keep Put failures in the writeBuffer * @see #flushCommits */ public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail); /** * Returns the maximum size in bytes of the write buffer for this HTable. * <p> * The default value comes from the configuration parameter * {@code hbase.client.write.buffer}. * @return The size of the write buffer in bytes. */ public long getWriteBufferSize(); /** * Sets the size of the buffer in bytes. * <p> * If the new size is less than the current amount of data in the * write buffer, the buffer gets flushed. * @param writeBufferSize The new write buffer size, in bytes. * @throws IOException if a remote or network exception occurs. */ public void setWriteBufferSize(long writeBufferSize) throws IOException; }
HBASE-6885 Typo in the Javadoc for close method of HTableInterface class git-svn-id: 949c06ec81f1cb709fd2be51dd530a930344d7b3@1390673 13f79535-47bb-0310-9956-ffa450edef68
hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
HBASE-6885 Typo in the Javadoc for close method of HTableInterface class
<ide><path>base-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java <ide> void flushCommits() throws IOException; <ide> <ide> /** <del> * Releases any resources help or pending changes in internal buffers. <add> * Releases any resources held or pending changes in internal buffers. <ide> * <ide> * @throws IOException if a remote or network exception occurs. <ide> */
Java
apache-2.0
9203a0b29ca0e33fce55b0a6ddd95b211a061a2d
0
Netflix/photon,Netflix/photon
/* * * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.imflibrary.st2067_2; import com.netflix.imflibrary.IMFConstraints; import com.netflix.imflibrary.IMFErrorLogger; import com.netflix.imflibrary.IMFErrorLoggerImpl; import com.netflix.imflibrary.KLVPacket; import com.netflix.imflibrary.MXFOperationalPattern1A; import com.netflix.imflibrary.RESTfulInterfaces.IMPValidator; import com.netflix.imflibrary.exceptions.IMFException; import com.netflix.imflibrary.exceptions.MXFException; import com.netflix.imflibrary.utils.DOMNodeObjectModel; import com.netflix.imflibrary.st0377.HeaderPartition; import com.netflix.imflibrary.st0377.PrimerPack; import com.netflix.imflibrary.st0377.header.GenericPackage; import com.netflix.imflibrary.st0377.header.InterchangeObject; import com.netflix.imflibrary.st0377.header.Preface; import com.netflix.imflibrary.st0377.header.SourcePackage; import com.netflix.imflibrary.st2067_2.CompositionModels.CompositionModel_st2067_2_2013; import com.netflix.imflibrary.st2067_2.CompositionModels.IMFCoreConstraintsChecker_st2067_2_2013; import com.netflix.imflibrary.utils.ByteArrayDataProvider; import com.netflix.imflibrary.utils.ByteProvider; import com.netflix.imflibrary.utils.ErrorLogger; import com.netflix.imflibrary.utils.FileByteRangeProvider; import com.netflix.imflibrary.utils.ResourceByteRangeProvider; import com.netflix.imflibrary.utils.UUIDHelper; import com.netflix.imflibrary.utils.Utilities; import com.netflix.imflibrary.writerTools.RegXMLLibHelper; import com.netflix.imflibrary.writerTools.utils.ValidationEventHandlerImpl; import com.sandflow.smpte.klv.Triplet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.DocumentFragment; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.util.*; import java.util.List; /** * This class represents a canonical model of the XML type 'CompositionPlaylistType' defined by SMPTE st2067-3, * A Composition object can be constructed from an XML file only if it satisfies all the constraints specified * in st2067-3 and st2067-2. This object model is intended to be agnostic of specific versions of the definitions of a * CompositionPlaylist(st2067-3) and its accompanying Core constraints(st2067-2). */ @Immutable public final class Composition { private static final Logger logger = LoggerFactory.getLogger(Composition.class); private static final String dcmlTypes_schema_path = "org/smpte_ra/schemas/st0433_2008/dcmlTypes/dcmlTypes.xsd"; private static final String xmldsig_core_schema_path = "org/w3/_2000_09/xmldsig/xmldsig-core-schema.xsd"; public static final Set<String> supportedCPLSchemaURIs = Collections.unmodifiableSet(new HashSet<String>(){{ add("http://www.smpte-ra.org/schemas/2067-3/2013");}}); private static class CoreConstraintsSchemas{ private final String coreConstraintsSchemaPath; private final String coreConstraintsContext; private CoreConstraintsSchemas(String coreConstraintsSchemaPath, String coreConstraintsContext){ this.coreConstraintsSchemaPath = coreConstraintsSchemaPath; this.coreConstraintsContext = coreConstraintsContext; } private String getCoreConstraintsSchemaPath(){ return this.coreConstraintsSchemaPath; } private String getCoreConstraintsContext(){ return this.coreConstraintsContext; } } public static final List<CoreConstraintsSchemas> supportedIMFCoreConstraintsSchemas = Collections.unmodifiableList (new ArrayList<CoreConstraintsSchemas>() {{ add( new CoreConstraintsSchemas("org/smpte_ra/schemas/st2067_2_2013/imf-core-constraints-20130620-pal.xsd", "org.smpte_ra.schemas.st2067_2_2013")); add( new CoreConstraintsSchemas("org/smpte_ra/schemas/st2067_2_2016/imf-core-constraints.xsd", "org.smpte_ra.schemas.st2067_2_2016"));}}); private final JAXBElement compositionPlaylistTypeJAXBElement; private final String coreConstraintsVersion; private final UUID uuid; private final EditRate editRate; private final Map<UUID, ? extends VirtualTrack> virtualTrackMap; /** * Constructor for a {@link Composition Composition} object from a XML file * @param compositionPlaylistXMLFile the input XML file that is conformed to schema and constraints specified in st2067-3:2013 and st2067-2:2013 * @param imfErrorLogger an error logger for recording any errors - cannot be null * @throws IOException - any I/O related error is exposed through an IOException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public Composition(File compositionPlaylistXMLFile, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException, SAXException, JAXBException, URISyntaxException { this(new FileByteRangeProvider(compositionPlaylistXMLFile), imfErrorLogger); } /** * Constructor for a {@link Composition Composition} object from a XML file * @param resourceByteRangeProvider corresponding to the Composition XML file. * @param imfErrorLogger an error logger for recording any errors - cannot be null * @throws IOException - any I/O related error is exposed through an IOException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public Composition(ResourceByteRangeProvider resourceByteRangeProvider, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException, SAXException, JAXBException, URISyntaxException { int numErrors = imfErrorLogger.getNumberOfErrors(); String cplNameSpaceURI = getCompositionNamespaceURI(resourceByteRangeProvider, imfErrorLogger); String namespaceVersion = getCPLNamespaceVersion(cplNameSpaceURI); String imf_cpl_schema_path = getIMFCPLSchemaPath(namespaceVersion); CoreConstraintsSchemas coreConstraintsSchema = this.supportedIMFCoreConstraintsSchemas.get(0); JAXBElement jaxbElement = null; for(int i=0; i<supportedIMFCoreConstraintsSchemas.size(); i++) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try (InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize() - 1); InputStream xmldsig_core_is = contextClassLoader.getResourceAsStream(Composition.xmldsig_core_schema_path); InputStream dcmlTypes_is = contextClassLoader.getResourceAsStream(Composition.dcmlTypes_schema_path); InputStream imf_cpl_is = contextClassLoader.getResourceAsStream(imf_cpl_schema_path); InputStream imf_core_constraints_is = contextClassLoader.getResourceAsStream(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsSchemaPath);) { StreamSource[] streamSources = new StreamSource[4]; streamSources[0] = new StreamSource(xmldsig_core_is); streamSources[1] = new StreamSource(dcmlTypes_is); streamSources[2] = new StreamSource(imf_cpl_is); streamSources[3] = new StreamSource(imf_core_constraints_is); SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = schemaFactory.newSchema(streamSources); ValidationEventHandlerImpl validationEventHandlerImpl = new ValidationEventHandlerImpl(true); JAXBContext jaxbContext = JAXBContext.newInstance(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsContext); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); unmarshaller.setEventHandler(validationEventHandlerImpl); unmarshaller.setSchema(schema); jaxbElement = (JAXBElement) unmarshaller.unmarshal(inputStream); coreConstraintsSchema = supportedIMFCoreConstraintsSchemas.get(i); if (validationEventHandlerImpl.hasErrors()) { throw new IMFException(validationEventHandlerImpl.toString()); } //CompositionPlaylistType compositionPlaylistType = compositionPlaylistTypeJAXBElement.getValue(); //this.compositionPlaylistType = compositionPlaylistType; break; //No errors so we can break out without trying other Core constraints schema namespaces. } catch (SAXException | JAXBException e) { if(i == supportedIMFCoreConstraintsSchemas.size()-1) { throw e; } } } this.compositionPlaylistTypeJAXBElement = jaxbElement; this.coreConstraintsVersion = coreConstraintsSchema.getCoreConstraintsContext(); switch(coreConstraintsVersion){ case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) this.compositionPlaylistTypeJAXBElement.getValue(); this.virtualTrackMap = CompositionModel_st2067_2_2013.getVirtualTracksMap(compositionPlaylistType, imfErrorLogger); if(!IMFCoreConstraintsChecker_st2067_2_2013.checkVirtualTracks(compositionPlaylistType, this.virtualTrackMap, imfErrorLogger)){ StringBuilder stringBuilder = new StringBuilder(); for(int i=numErrors; i<imfErrorLogger.getErrors().size() ; i++){ stringBuilder.append(String.format("%n")); stringBuilder.append(imfErrorLogger.getErrors().get(i)); } throw new IMFException(String.format("Found following errors while validating the virtual tracks in the Composition %n %s", stringBuilder.toString())); } this.uuid = UUIDHelper.fromUUIDAsURNStringToUUID(compositionPlaylistType.getId()); this.editRate = new EditRate(compositionPlaylistType.getEditRate()); break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } if ((imfErrorLogger != null) && (imfErrorLogger.getNumberOfErrors() > numErrors)) { throw new IMFException(String.format("Found %d errors in CompositionPlaylist XML file", imfErrorLogger.getNumberOfErrors() - numErrors)); } } private static final String getIMFCPLSchemaPath(String namespaceVersion){ String imf_cpl_schema_path; switch(namespaceVersion){ case "2013": imf_cpl_schema_path = "org/smpte_ra/schemas/st2067_3_2013/imf-cpl.xsd"; break; case "2016": imf_cpl_schema_path = "org/smpte_ra/schemas/st2067_3_2016/imf-cpl.xsd"; break; default: throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the following schema URIs %s", Utilities.serializeObjectCollectionToString(supportedCPLSchemaURIs))); } return imf_cpl_schema_path; } @Nullable private static final String getCompositionNamespaceURI(ResourceByteRangeProvider resourceByteRangeProvider, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException { String result = ""; try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); documentBuilder.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.WARNING, exception.getMessage())); } @Override public void error(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.NON_FATAL, exception.getMessage())); } @Override public void fatalError(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, exception.getMessage())); } }); Document document = documentBuilder.parse(inputStream); //obtain root node NodeList nodeList = null; for(String cplNamespaceURI : Composition.supportedCPLSchemaURIs) { nodeList = document.getElementsByTagNameNS(cplNamespaceURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { result = cplNamespaceURI; break; } } } catch(ParserConfigurationException | SAXException e) { throw new IMFException(String.format("Error occurred while trying to determine the Composition Playlist Namespace URI, XML document appears to be invalid. Error Message : %s", e.getMessage())); } if(result.isEmpty()) { throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the following schema URIs %s", Utilities.serializeObjectCollectionToString(supportedCPLSchemaURIs))); } return result; } private static final String getCPLNamespaceVersion(String namespaceURI){ String[] uriComponents = namespaceURI.split("/"); String namespaceVersion = uriComponents[uriComponents.length - 1]; return namespaceVersion; } private final String serializeIMFCoreConstaintsSchemasToString(List<CoreConstraintsSchemas> coreConstraintsSchemas){ StringBuilder stringBuilder = new StringBuilder(); for(CoreConstraintsSchemas coreConstraintsSchema : coreConstraintsSchemas){ stringBuilder.append(String.format("%n")); stringBuilder.append(coreConstraintsSchema.getCoreConstraintsContext()); } return stringBuilder.toString(); } /** * A stateless method that verifies if the raw data represented by the ResourceByteRangeProvider corresponds to a valid * IMF Composition Playlist document * @param resourceByteRangeProvider - a byte range provider for the document that needs to be verified * @return - a boolean indicating if the document represented is an IMF Composition or not * @throws IOException - any I/O related error is exposed through an IOException */ public static boolean isFileOfSupportedSchema(ResourceByteRangeProvider resourceByteRangeProvider) throws IOException{ try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document document = documentBuilder.parse(inputStream); NodeList nodeList = null; for(String supportedSchemaURI : supportedCPLSchemaURIs) { //obtain root node nodeList = document.getElementsByTagNameNS(supportedSchemaURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { return true; } } } catch(ParserConfigurationException | SAXException e) { return false; } return false; } /** * A method that returns a string representation of a Composition object * * @return string representing the object */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("======= Composition : %s =======%n", this.uuid)); sb.append(this.editRate.toString()); return sb.toString(); } /** * A method that confirms if the inputStream corresponds to a Composition document instance. * @param resourceByteRangeProvider corresponding to the Composition XML file. * @return a boolean indicating if the input file is a Composition document * @throws IOException - any I/O related error is exposed through an IOException */ public static boolean isCompositionPlaylist(ResourceByteRangeProvider resourceByteRangeProvider) throws IOException{ try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document document = documentBuilder.parse(inputStream); //obtain root node NodeList nodeList = null; for(String cplNamespaceURI : Composition.supportedCPLSchemaURIs) { nodeList = document.getElementsByTagNameNS(cplNamespaceURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { return true; } } } catch(ParserConfigurationException | SAXException e) { return false; } return false; } /** * Getter for the composition edit rate as specified in the Composition XML file * @return the edit rate associated with the Composition */ public EditRate getEditRate() { return this.editRate; } /** * Getter for the virtual track map associated with this Composition * @return {@link java.util.Map Map}&lt;{@link java.util.UUID UUID},{@link Composition.VirtualTrack VirtualTrack}&gt;. The UUID key corresponds to VirtualTrackID */ public Map<UUID, ? extends VirtualTrack> getVirtualTrackMap() { return Collections.unmodifiableMap(this.virtualTrackMap); } /** * Getter for the UUID corresponding to this Composition document * @return the uuid of this Composition object */ public UUID getUUID() { return this.uuid; } /** * Getter for the CompositionPlaylistType object model of the Composition defined by the st2067-3 schema. * @return the composition playlist type object model. */ private JAXBElement getCompositionPlaylistTypeJAXBElement(){ return this.compositionPlaylistTypeJAXBElement; } /** * Getter for the CoreConstraintsURI corresponding to this CompositionPlaylist * @return the uri for the CoreConstraints schema for this CompositionPlaylist */ public String getCoreConstraintsVersion(){ return this.coreConstraintsVersion; } /** * Getter for the video VirtualTrack in this Composition * @return the video virtual track that is a part of this composition or null if there is not video virtual track */ @Nullable public VirtualTrack getVideoVirtualTrack(){ switch(coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": Iterator iterator = this.virtualTrackMap.entrySet().iterator(); while (iterator != null && iterator.hasNext()) { Composition.VirtualTrack virtualTrack = ((Map.Entry<UUID, ? extends Composition.VirtualTrack>) iterator.next()).getValue(); if (virtualTrack.getSequenceTypeEnum().equals(SequenceTypeEnum.MainImageSequence)) { return virtualTrack; } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return null; } /** * Getter for the audio VirtualTracks in this Composition * @return a list of audio virtual tracks that are a part of this composition or an empty list if there are none */ public List<? extends VirtualTrack> getAudioVirtualTracks(){ List<VirtualTrack> audioVirtualTracks = new ArrayList<>(); Iterator iterator = this.getVirtualTrackMap().entrySet().iterator(); while(iterator != null && iterator.hasNext()) { Composition.VirtualTrack virtualTrack = ((Map.Entry<UUID, ? extends Composition.VirtualTrack>) iterator.next()).getValue(); if (virtualTrack.getSequenceTypeEnum().equals(SequenceTypeEnum.MainAudioSequence)) { audioVirtualTracks.add(virtualTrack); } } return Collections.unmodifiableList(audioVirtualTracks); } public static void validateCompositionPlaylistSchema(ResourceByteRangeProvider resourceByteRangeProvider, IMFErrorLogger imfErrorLogger) throws IOException, SAXException { String cplNameSpaceURI = getCompositionNamespaceURI(resourceByteRangeProvider, imfErrorLogger); String namespaceVersion = getCPLNamespaceVersion(cplNameSpaceURI); String imf_cpl_schema_path = getIMFCPLSchemaPath(namespaceVersion); for (int i=0; i<supportedIMFCoreConstraintsSchemas.size(); i++) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try (InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize() - 1); InputStream xmldsig_core_is = contextClassLoader.getResourceAsStream(Composition.xmldsig_core_schema_path); InputStream dcmlTypes_is = contextClassLoader.getResourceAsStream(Composition.dcmlTypes_schema_path); InputStream imf_cpl_is = contextClassLoader.getResourceAsStream(imf_cpl_schema_path); InputStream imf_core_constraints_is = contextClassLoader.getResourceAsStream(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsSchemaPath);) { StreamSource inputSource = new StreamSource(inputStream); StreamSource[] streamSources = new StreamSource[4]; streamSources[0] = new StreamSource(xmldsig_core_is); streamSources[1] = new StreamSource(dcmlTypes_is); streamSources[2] = new StreamSource(imf_cpl_is); streamSources[3] = new StreamSource(imf_core_constraints_is); SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = schemaFactory.newSchema(streamSources); Validator validator = schema.newValidator(); validator.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.WARNING, exception.getMessage())); } @Override public void error(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.NON_FATAL, exception.getMessage())); } @Override public void fatalError(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, exception.getMessage())); } }); validator.validate(inputSource); break;//No errors so we can break out without trying other Core constraints schema namespaces. } catch (SAXException e) { if(i == supportedIMFCoreConstraintsSchemas.size()-1) { throw e; } } } } /** * This class is an immutable implementation of a rational number described as a ratio of two longs and used to hold * non-integer frame rate values */ @Immutable public static final class EditRate { private final Long numerator; private final Long denominator; /** * Constructor for the rational frame rate number. * @param numbers the input list of numbers. The first number in the list is treated as the numerator and the second as * the denominator. Construction succeeds only if the list has exactly two numbers */ public EditRate(List<Long> numbers) { if (numbers.size() != 2) { throw new IMFException(String.format( "Input list is expected to contain 2 numbers representing numerator and denominator respectively, found %d numbers in list %s", numbers.size(), Arrays.toString(numbers.toArray()))); } else if(numbers.get(0) == 0 || numbers.get(1) == 0){ throw new IMFException(String.format( "Input list is expected to contain 2 non-zero numbers representing numerator and denominator of the EditRate respectively, found Numerator %d, Denominator %d", numbers.get(0), numbers.get(1))); } this.numerator = numbers.get(0); this.denominator = numbers.get(1); } /** * Getter for the frame rate numerator * @return a long value corresponding to the frame rate numerator */ public Long getNumerator() { return this.numerator; } /** * Getter for the frame rate denominator * @return a long value corresponding to the frame rate denominator */ public Long getDenominator() { return this.denominator; } /** * A method that returns a string representation of a Composition object * * @return string representing the object */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("=================== EditRate =====================\n"); sb.append(String.format("numerator = %d, denominator = %d%n", this.numerator, this.denominator)); return sb.toString(); } /** * Overridden equals method. * @param object the EditRate to be compared with. * @return boolean false if the object is null or is not an instance of the EditRate class. */ @Override public boolean equals(Object object){ if(object == null || !(object instanceof EditRate)){ return false; } EditRate other = (EditRate) object; return ((this.getNumerator().equals(other.getNumerator())) && (this.getDenominator().equals(other.getDenominator()))); } /** * A Java compliant implementation of the hashCode() method * @return integer containing the hash code corresponding to this object */ @Override public int hashCode(){ int hash = 1; hash = hash * 31 + this.numerator.hashCode(); /*Numerator can be used since it is non-null*/ hash = hash * 31 + this.denominator.hashCode();/*Another field that is indicated to be non-null*/ return hash; } } /** * This class enumerates various types of {@link org.smpte_ra.schemas.st2067_2_2013.SequenceType Sequence} that are valid in * Composition document that is compliant with st2067-2:2013. Such types are mostly defined in Section 6.3 of st2067-2:2013 */ public static enum SequenceTypeEnum { MarkerSequence("MarkerSequence"), MainImageSequence("MainImageSequence"), MainAudioSequence("MainAudioSequence"), SubtitlesSequence("SubtitlesSequence"), HearingImpairedCaptionsSequence("HearingImpairedCaptionsSequence"), VisuallyImpairedTextSequence("VisuallyImpairedTextSequence"), CommentarySequence("CommentarySequence"), KaraokeSequence("KaraokeSequence"), AncillaryDataSequence("AncillaryDataSequence"), Unknown("Unknown"); private final String name; private SequenceTypeEnum(String name) { this.name = name; } /** * A getter for the SequenceTypeEnum given a string that represents the name of a SequenceTypeEnum * @param name the string that should represent the SequenceTypeEnum * @return the SequenceTypeEnum value corresponding to the name that was passed */ public static SequenceTypeEnum getSequenceTypeEnum(String name) { switch (name) { case "MainImageSequence": return MainImageSequence; case "MainAudioSequence": return MainAudioSequence; case "SubtitlesSequence": return SubtitlesSequence; case "HearingImpairedCaptionsSequence": return HearingImpairedCaptionsSequence; case "VisuallyImpairedTextSequence": return VisuallyImpairedTextSequence; case "CommentarySequence": return CommentarySequence; case "KaraokeSequence": return KaraokeSequence; case "AncillaryDataSequence": return AncillaryDataSequence; default: return Unknown; } } /** * An override of the toString() method * @return a string representing the SequenceTypeEnum */ @Override public String toString(){ return this.name; } } /** * The class is an immutable implementation of the virtual track concept defined in Section 6.9.3 of st2067-3:2013. A * virtual track is characterized by its UUID and the type of sequence it holds */ @Immutable public abstract static class VirtualTrack { protected final UUID trackID; protected final SequenceTypeEnum sequenceTypeEnum; protected final List<UUID> resourceIds = new ArrayList<>(); /** * Constructor for a VirtualTrack object * @param trackID the UUID associated with this VirtualTrack object * @param sequenceTypeEnum the type of the associated sequence */ public VirtualTrack(UUID trackID, SequenceTypeEnum sequenceTypeEnum) { this.trackID = trackID; this.sequenceTypeEnum = sequenceTypeEnum; } /** * Getter for the sequence type associated with this VirtualTrack object * @return the sequence type associated with this VirtualTrack object as an enum */ public SequenceTypeEnum getSequenceTypeEnum() { return this.sequenceTypeEnum; } /** * Getter for the UUID associated with this VirtualTrack object * @return the UUID associated with the Virtual track */ public UUID getTrackID(){ return this.trackID; } /** * Getter for the UUIDs of the resources that are a part of this virtual track * @return an unmodifiable list of UUIDs of resources that are a part of this virtual track */ public List<UUID> getTrackResourceIds(){ return Collections.unmodifiableList(this.resourceIds); } /** * A method to determine the equivalence of any 2 virtual tracks. * @param other - the object to compare against * @return boolean indicating if the 2 virtual tracks are equivalent or represent the same timeline */ public abstract boolean equivalent(VirtualTrack other); } /** * A utility method to retrieve the VirtualTracks within a Composition. * @return A list of VirtualTracks in the Composition. * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ @Nonnull public List<? extends Composition.VirtualTrack> getVirtualTracks() throws IOException, IMFException, SAXException, JAXBException, URISyntaxException { Map<UUID, ? extends Composition.VirtualTrack> virtualTrackMap = this.getVirtualTrackMap(); return new ArrayList<>(virtualTrackMap.values()); } /** * A utility method to retrieve the UUIDs of the Track files referenced by a Virtual track within a Composition. * @param virtualTrack - object model of an IMF virtual track {@link Composition.VirtualTrack} * @return A list of TrackFileResourceType objects corresponding to the virtual track in the Composition. * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ @Nonnull public List<ResourceIdTuple> getVirtualTrackResourceIDs(@Nonnull Composition.VirtualTrack virtualTrack) throws IOException, IMFException, SAXException, JAXBException, URISyntaxException { List<ResourceIdTuple> virtualTrackResourceIDs = new ArrayList<>(); switch(coreConstraintsVersion){ case "org.smpte_ra.schemas.st2067_2_2013": CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 virtualTrack_st2067_2_2013 = CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013.class.cast(virtualTrack); List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resourceList = virtualTrack_st2067_2_2013.getResourceList(); if(resourceList != null && resourceList.size() > 0) { for (org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType trackFileResourceType : resourceList) { virtualTrackResourceIDs.add(new ResourceIdTuple(UUIDHelper.fromUUIDAsURNStringToUUID(trackFileResourceType.getTrackFileId()) , UUIDHelper.fromUUIDAsURNStringToUUID(trackFileResourceType.getSourceEncoding()))); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return Collections.unmodifiableList(virtualTrackResourceIDs); } /** * A utility method that will analyze the EssenceDescriptorList in a Composition and construct a HashMap mapping * a UUID to a EssenceDescriptor. * @return a HashMap mapping the UUID to its corresponding EssenceDescriptor in the Composition */ public Map<UUID, DOMNodeObjectModel> getEssenceDescriptorListMap(){ Map<UUID, DOMNodeObjectModel> essenceDescriptorMap = new HashMap<>(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) this.compositionPlaylistTypeJAXBElement.getValue(); List<org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType> essenceDescriptors = compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor(); for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : essenceDescriptors) { UUID uuid = UUIDHelper.fromUUIDAsURNStringToUUID(essenceDescriptorBaseType.getId()); DOMNodeObjectModel domNodeObjectModel = null; for (Object object : essenceDescriptorBaseType.getAny()) { domNodeObjectModel = new DOMNodeObjectModel((Node) object); } if (domNodeObjectModel != null) { essenceDescriptorMap.put(uuid, domNodeObjectModel); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return Collections.unmodifiableMap(essenceDescriptorMap); } public Map<Set<DOMNodeObjectModel>, ? extends Composition.VirtualTrack> getAudioVirtualTracksMap() { List<? extends Composition.VirtualTrack> audioVirtualTracks = this.getAudioVirtualTracks(); Map<UUID, DOMNodeObjectModel> essenceDescriptorListMap = this.getEssenceDescriptorListMap(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": Map<Set<DOMNodeObjectModel>, CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013> audioVirtualTrackMap = new HashMap<>(); for (Composition.VirtualTrack audioVirtualTrack : audioVirtualTracks) { Set<DOMNodeObjectModel> set = new HashSet<>(); CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 audioVirtualTrack_st2067_2_2013 = CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013.class.cast(audioVirtualTrack); List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resources = audioVirtualTrack_st2067_2_2013.getResourceList(); for (org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType resource : resources) { set.add(essenceDescriptorListMap.get(UUIDHelper.fromUUIDAsURNStringToUUID(resource.getSourceEncoding())));//Fetch and add the EssenceDescriptor referenced by the resource via the SourceEncoding element to the ED set. } audioVirtualTrackMap.put(set, audioVirtualTrack_st2067_2_2013); } return Collections.unmodifiableMap(audioVirtualTrackMap); case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } } /** * This class is a representation of a Resource SourceEncoding element and trackFileId tuple. */ public static final class ResourceIdTuple{ private final UUID trackFileId; private final UUID sourceEncoding; private ResourceIdTuple(UUID trackFileId, UUID sourceEncoding){ this.trackFileId = trackFileId; this.sourceEncoding = sourceEncoding; } /** * A getter for the trackFileId referenced by the resource corresponding to this ResourceIdTuple * @return the trackFileId associated with this ResourceIdTuple */ public UUID getTrackFileId(){ return this.trackFileId; } /** * A getter for the source encoding element referenced by the resource corresponding to this ResourceIdTuple * @return the source encoding element associated with this ResourceIdTuple */ public UUID getSourceEncoding(){ return this.sourceEncoding; } } /** * This method can be used to determine if a Composition is conformant. Conformance checks * perform deeper inspection of the Composition and the EssenceDescriptors corresponding to the * resources referenced by the Composition. * @param headerPartitionTuples list of HeaderPartitionTuples corresponding to the IMF essences referenced in the Composition * @param imfErrorLogger an error logging object * @return boolean to indicate of the Composition is conformant or not * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public boolean conformVirtualTrackInComposition(List<IMPValidator.HeaderPartitionTuple> headerPartitionTuples, IMFErrorLogger imfErrorLogger, boolean conformAllVirtualTracks) throws IOException, IMFException, SAXException, JAXBException, URISyntaxException{ boolean result = true; /* * The algorithm for conformance checking a Composition (CPL) would be * 1) Verify that every EssenceDescriptor element in the EssenceDescriptor list (EDL) is referenced through its id element if conformAllVirtualTracks is enabled * by at least one TrackFileResource within the Virtual tracks in the Composition (see section 6.1.10 of SMPTE st2067-3:2-13). * 2) Verify that all track file resources within a virtual track have a corresponding essence descriptor in the essence descriptor list. * 3) Verify that the EssenceDescriptors in the EssenceDescriptorList element in the Composition are present in * the physical essence files referenced by the resources of a virtual track and are equal. */ /*The following check simultaneously verifies 1) and 2) from above.*/ Set<UUID> resourceEssenceDescriptorIDsSet = getResourceEssenceDescriptorIdsSet(); Iterator resourceEssenceDescriptorIDs = resourceEssenceDescriptorIDsSet.iterator(); Set<UUID> cplEssenceDescriptorIDsSet = getEssenceDescriptorIdsSet(); Iterator cplEssenceDescriptorIDs = cplEssenceDescriptorIDsSet.iterator(); while(resourceEssenceDescriptorIDs.hasNext()){ UUID resourceEssenceDescriptorUUID = (UUID) resourceEssenceDescriptorIDs.next(); if(!cplEssenceDescriptorIDsSet.contains(resourceEssenceDescriptorUUID)) { result &= false; imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor ID %s referenced by a VirtualTrack Resource does not have a corresponding EssenceDescriptor in the EssenceDescriptorList in the CPL", resourceEssenceDescriptorUUID.toString())); } } /** * The following checks that at least one of the Virtual Tracks references an EssenceDescriptor in the EDL. This * check should be performed only when we need to conform all the Virtual Tracks in the CPL. */ if(conformAllVirtualTracks) { while (cplEssenceDescriptorIDs.hasNext()) { UUID cplEssenceDescriptorUUID = (UUID) cplEssenceDescriptorIDs.next(); if (!resourceEssenceDescriptorIDsSet.contains(cplEssenceDescriptorUUID)) { result &= false; imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptorID %s in the CPL EssenceDescriptorList is not referenced by any resource in any of the Virtual tracks in the CPL, this violates the constraint in st2067-3:2013 section 6.1.10.1", cplEssenceDescriptorUUID.toString())); } } } if(!result){ return result; } /*The following check verifies 3) from above.*/ result &= compareEssenceDescriptors(getResourcesEssenceDescriptorMap(headerPartitionTuples), this.getEssenceDescriptorListMap(), imfErrorLogger); return result; } private Set<UUID> getEssenceDescriptorIdsSet () { HashSet<UUID> essenceDescriptorIdsSet = new LinkedHashSet<>(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType)this.getCompositionPlaylistTypeJAXBElement().getValue(); List<org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType> essenceDescriptorList = compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor(); for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : essenceDescriptorList) { UUID sourceEncodingElement = UUIDHelper.fromUUIDAsURNStringToUUID(essenceDescriptorBaseType.getId()); /*Construct a set of SourceEncodingElements/IDs corresponding to every EssenceDescriptorBaseType in the EssenceDescriptorList*/ essenceDescriptorIdsSet.add(sourceEncodingElement); } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return essenceDescriptorIdsSet; } private Set<UUID> getResourceEssenceDescriptorIdsSet () throws IOException, SAXException, JAXBException, URISyntaxException{ List<Composition.VirtualTrack> virtualTracks = new ArrayList<>(this.getVirtualTrackMap().values()); LinkedHashSet<UUID> resourceSourceEncodingElementsSet = new LinkedHashSet<>(); for(Composition.VirtualTrack virtualTrack : virtualTracks){ List<Composition.ResourceIdTuple> resourceIdTuples = this.getVirtualTrackResourceIDs(virtualTrack); for(Composition.ResourceIdTuple resourceIdTuple : resourceIdTuples){ /*Construct a set of SourceEncodingElements corresponding to every TrackFileResource of this VirtualTrack*/ resourceSourceEncodingElementsSet.add(resourceIdTuple.getSourceEncoding()); } } return resourceSourceEncodingElementsSet; } private Map<UUID, List<DOMNodeObjectModel>> getResourcesEssenceDescriptorMap(List<IMPValidator.HeaderPartitionTuple> headerPartitionTuples) throws IOException, SAXException, JAXBException, URISyntaxException{ Map<UUID, List<DOMNodeObjectModel>> resourcesEssenceDescriptorMap = new LinkedHashMap<>(); /*Create a Map of FilePackage UUID which should be equal to the TrackFileId of the resource in the Composition if the asset is referenced and the HeaderPartitionTuple, Map<UUID, HeaderPartitionTuple>*/ Map<UUID, IMPValidator.HeaderPartitionTuple> resourceUUIDHeaderPartitionMap = new HashMap<>(); for(IMPValidator.HeaderPartitionTuple headerPartitionTuple : headerPartitionTuples) { //validate header partition MXFOperationalPattern1A.HeaderPartitionOP1A headerPartitionOP1A = MXFOperationalPattern1A.checkOperationalPattern1ACompliance(headerPartitionTuple.getHeaderPartition()); IMFConstraints.HeaderPartitionIMF headerPartitionIMF = IMFConstraints.checkIMFCompliance(headerPartitionOP1A); Preface preface = headerPartitionIMF.getHeaderPartitionOP1A().getHeaderPartition().getPreface(); GenericPackage genericPackage = preface.getContentStorage().getEssenceContainerDataList().get(0).getLinkedPackage(); SourcePackage filePackage = (SourcePackage)genericPackage; UUID packageUUID = filePackage.getPackageMaterialNumberasUUID(); resourceUUIDHeaderPartitionMap.put(packageUUID, headerPartitionTuple); } List<Composition.VirtualTrack> virtualTracks = new ArrayList<>(this.getVirtualTrackMap().values()); /*Go through all the Virtual Tracks in the Composition and construct a map of Resource Source Encoding Element and a list of DOM nodes representing every EssenceDescriptor in the HeaderPartition corresponding to that Resource*/ for(Composition.VirtualTrack virtualTrack : virtualTracks){ List<Composition.ResourceIdTuple> resourceIdTuples = this.getVirtualTrackResourceIDs(virtualTrack);/*Retrieve a list of ResourceIDTuples corresponding to this virtual track*/ for(Composition.ResourceIdTuple resourceIdTuple : resourceIdTuples){ IMPValidator.HeaderPartitionTuple headerPartitionTuple = resourceUUIDHeaderPartitionMap.get(resourceIdTuple.getTrackFileId()); if(headerPartitionTuple != null){ /*Create a DOM Node representation of the EssenceDescriptors present in this header partition corresponding to an IMFTrackFile*/ List<Node> essenceDescriptorDOMNodes = getEssenceDescriptorDOMNodes(headerPartitionTuple); List<DOMNodeObjectModel> domNodeObjectModels = new ArrayList<>(); for(Node node : essenceDescriptorDOMNodes){ domNodeObjectModels.add(new DOMNodeObjectModel(node)); } resourcesEssenceDescriptorMap.put(resourceIdTuple.getSourceEncoding(), domNodeObjectModels); } } } if(resourcesEssenceDescriptorMap.entrySet().size() == 0){ throw new MXFException(String.format("Composition does not refer to a single IMFEssence represented by the HeaderPartitions that were passed in.")); } return Collections.unmodifiableMap(resourcesEssenceDescriptorMap); } private List<Node> getEssenceDescriptorDOMNodes(IMPValidator.HeaderPartitionTuple headerPartitionTuple) throws IOException { try { List<InterchangeObject.InterchangeObjectBO> essenceDescriptors = headerPartitionTuple.getHeaderPartition().getEssenceDescriptors(); List<Node> essenceDescriptorNodes = new ArrayList<>(); for (InterchangeObject.InterchangeObjectBO essenceDescriptor : essenceDescriptors) { KLVPacket.Header essenceDescriptorHeader = essenceDescriptor.getHeader(); List<KLVPacket.Header> subDescriptorHeaders = this.getSubDescriptorKLVHeader(headerPartitionTuple.getHeaderPartition(), essenceDescriptor); /*Create a dom*/ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document document = docBuilder.newDocument(); DocumentFragment documentFragment = this.getEssenceDescriptorAsDocumentFragment(document, headerPartitionTuple, essenceDescriptorHeader, subDescriptorHeaders); Node node = documentFragment.getFirstChild(); essenceDescriptorNodes.add(node); } return essenceDescriptorNodes; } catch(ParserConfigurationException e){ throw new IMFException(e); } } private List<KLVPacket.Header> getSubDescriptorKLVHeader(HeaderPartition headerPartition, InterchangeObject.InterchangeObjectBO essenceDescriptor) { List<KLVPacket.Header> subDescriptorHeaders = new ArrayList<>(); List<InterchangeObject.InterchangeObjectBO> subDescriptors = headerPartition.getSubDescriptors(essenceDescriptor); for (InterchangeObject.InterchangeObjectBO subDescriptorBO : subDescriptors) { if (subDescriptorBO != null) { subDescriptorHeaders.add(subDescriptorBO.getHeader()); } } return Collections.unmodifiableList(subDescriptorHeaders); } private DocumentFragment getEssenceDescriptorAsDocumentFragment(Document document, IMPValidator.HeaderPartitionTuple headerPartitionTuple, KLVPacket.Header essenceDescriptor, List<KLVPacket.Header>subDescriptors) throws MXFException, IOException { document.setXmlStandalone(true); PrimerPack primerPack = headerPartitionTuple.getHeaderPartition().getPrimerPack(); ResourceByteRangeProvider resourceByteRangeProvider = headerPartitionTuple.getResourceByteRangeProvider(); RegXMLLibHelper regXMLLibHelper = new RegXMLLibHelper(primerPack.getHeader(), getByteProvider(resourceByteRangeProvider, primerPack.getHeader())); Triplet essenceDescriptorTriplet = regXMLLibHelper.getTripletFromKLVHeader(essenceDescriptor, getByteProvider(resourceByteRangeProvider, essenceDescriptor)); //DocumentFragment documentFragment = this.regXMLLibHelper.getDocumentFragment(essenceDescriptorTriplet, document); /*Get the Triplets corresponding to the SubDescriptors*/ List<Triplet> subDescriptorTriplets = new ArrayList<>(); for(KLVPacket.Header subDescriptorHeader : subDescriptors){ subDescriptorTriplets.add(regXMLLibHelper.getTripletFromKLVHeader(subDescriptorHeader, this.getByteProvider(resourceByteRangeProvider, subDescriptorHeader))); } return regXMLLibHelper.getEssenceDescriptorDocumentFragment(essenceDescriptorTriplet, subDescriptorTriplets, document); } private ByteProvider getByteProvider(ResourceByteRangeProvider resourceByteRangeProvider, KLVPacket.Header header) throws IOException { byte[] bytes = resourceByteRangeProvider.getByteRangeAsBytes(header.getByteOffset(), header.getByteOffset() + header.getKLSize() + header.getVSize()); ByteProvider byteProvider = new ByteArrayDataProvider(bytes); return byteProvider; } private boolean compareEssenceDescriptors(Map<UUID, List<DOMNodeObjectModel>> essenceDescriptorsMap, Map<UUID, DOMNodeObjectModel> eDLMap, IMFErrorLogger imfErrorLogger){ /** * An exhaustive compare of the eDLMap and essenceDescriptorsMap is required to ensure that the essence descriptors * in the EssenceDescriptorList and the EssenceDescriptors in the physical essence files corresponding to the * same source encoding element as indicated in the TrackFileResource and EDL are a good match. */ /** * The Maps passed in have the DOMObjectModel for every EssenceDescriptor in the EssenceDescriptorList in the CPL and * the essence descriptor in each of the essences referenced from every track file resource within each virtual track. */ /** * The following check ensures that we do not have a Track Resource that does not have a corresponding EssenceDescriptor element in the CPL's EDL */ Iterator<Map.Entry<UUID, List<DOMNodeObjectModel>>> essenceDescriptorsMapIterator = essenceDescriptorsMap.entrySet().iterator(); while(essenceDescriptorsMapIterator.hasNext()){ UUID sourceEncodingElement = essenceDescriptorsMapIterator.next().getKey(); if(!eDLMap.keySet().contains(sourceEncodingElement)){ imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Source Encoding Element %s in a track does not have a corresponding entry in the CPL's EDL", sourceEncodingElement.toString())); return false; } } /** * The following check ensures that we have atleast one EssenceDescriptor in a TrackFile that equals the corresponding EssenceDescriptor element in the CPL's EDL */ Iterator<Map.Entry<UUID, DOMNodeObjectModel>> eDLMapIterator = eDLMap.entrySet().iterator(); while(eDLMapIterator.hasNext()){ Map.Entry<UUID, DOMNodeObjectModel> entry = (Map.Entry<UUID, DOMNodeObjectModel>) eDLMapIterator.next(); List<DOMNodeObjectModel> domNodeObjectModels = essenceDescriptorsMap.get(entry.getKey()); if(domNodeObjectModels == null){ //This implies we did not find a single VirtualTrack that referenced this particular EssenceDescriptor in the EDL imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Id %s in the CPL's EDL is not referenced by a single resource within any of the VirtualTracks in the CPL, this violates the constraint in st2067-3:2013 section 6.1.10.1", entry.getKey().toString())); return false; } DOMNodeObjectModel referenceDOMNodeObjectModel = entry.getValue(); boolean intermediateResult = false; for(DOMNodeObjectModel domNodeObjectModel : domNodeObjectModels){ intermediateResult |= referenceDOMNodeObjectModel.equals(domNodeObjectModel); } if(!intermediateResult){ imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Id %s in the CPL's EDL doesn't match any EssenceDescriptors within the IMFTrackFile that references it", entry.getKey().toString())); return false; } } return true; } private static String usage() { StringBuilder sb = new StringBuilder(); sb.append(String.format("Usage:%n")); sb.append(String.format("%s <inputFile>%n", Composition.class.getName())); return sb.toString(); } public static void main(String[] args) throws Exception { if (args.length != 1) { logger.error(usage()); throw new IllegalArgumentException("Invalid parameters"); } File inputFile = new File(args[0]); logger.info(String.format("File Name is %s", inputFile.getName())); IMFErrorLogger imfErrorLogger = new IMFErrorLoggerImpl(); try { Composition composition = new Composition(inputFile, imfErrorLogger); logger.info(composition.toString()); List<? extends Composition.VirtualTrack> virtualTracks = composition.getVirtualTracks(); List<DOMNodeObjectModel> domNodeObjectModels = new ArrayList<>(); switch(composition.getCoreConstraintsVersion()) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) composition.getCompositionPlaylistTypeJAXBElement().getValue(); if (compositionPlaylistType.getEssenceDescriptorList() != null) { for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor()) { for (Object object : essenceDescriptorBaseType.getAny()) { Node node = (Node) object; domNodeObjectModels.add(new DOMNodeObjectModel(node)); } } } else { logger.error("No essence descriptor list was found in CPL"); } for(Composition.VirtualTrack virtualTrack : virtualTracks) { CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 virtualTrack_st2067_2_2013 = (CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013) virtualTrack; List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resourceList = virtualTrack_st2067_2_2013.getResourceList(); if (resourceList.size() == 0) { throw new Exception(String.format("CPL file has a VirtualTrack with no resources which is invalid")); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", composition.serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } for(int i=0; i<domNodeObjectModels.size(); i++) { logger.info(String.format("ObjectModel of EssenceDescriptor-%d in the EssenceDescriptorList in the CPL: %n%s", i, domNodeObjectModels.get(i).toString())); } } finally { for (ErrorLogger.ErrorObject errorObject : imfErrorLogger.getErrors()) { logger.error(errorObject.toString()); } } } }
src/main/java/com/netflix/imflibrary/st2067_2/Composition.java
/* * * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.imflibrary.st2067_2; import com.netflix.imflibrary.IMFConstraints; import com.netflix.imflibrary.IMFErrorLogger; import com.netflix.imflibrary.IMFErrorLoggerImpl; import com.netflix.imflibrary.KLVPacket; import com.netflix.imflibrary.MXFOperationalPattern1A; import com.netflix.imflibrary.RESTfulInterfaces.IMPValidator; import com.netflix.imflibrary.exceptions.IMFException; import com.netflix.imflibrary.exceptions.MXFException; import com.netflix.imflibrary.utils.DOMNodeObjectModel; import com.netflix.imflibrary.st0377.HeaderPartition; import com.netflix.imflibrary.st0377.PrimerPack; import com.netflix.imflibrary.st0377.header.GenericPackage; import com.netflix.imflibrary.st0377.header.InterchangeObject; import com.netflix.imflibrary.st0377.header.Preface; import com.netflix.imflibrary.st0377.header.SourcePackage; import com.netflix.imflibrary.st2067_2.CompositionModels.CompositionModel_st2067_2_2013; import com.netflix.imflibrary.st2067_2.CompositionModels.IMFCoreConstraintsChecker_st2067_2_2013; import com.netflix.imflibrary.utils.ByteArrayDataProvider; import com.netflix.imflibrary.utils.ByteProvider; import com.netflix.imflibrary.utils.ErrorLogger; import com.netflix.imflibrary.utils.FileByteRangeProvider; import com.netflix.imflibrary.utils.ResourceByteRangeProvider; import com.netflix.imflibrary.utils.UUIDHelper; import com.netflix.imflibrary.utils.Utilities; import com.netflix.imflibrary.writerTools.RegXMLLibHelper; import com.netflix.imflibrary.writerTools.utils.ValidationEventHandlerImpl; import com.sandflow.smpte.klv.Triplet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.DocumentFragment; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.util.*; import java.util.List; /** * This class represents a canonical model of the XML type 'CompositionPlaylistType' defined by SMPTE st2067-3, * A Composition object can be constructed from an XML file only if it satisfies all the constraints specified * in st2067-3 and st2067-2. This object model is intended to be agnostic of specific versions of the definitions of a * CompositionPlaylist(st2067-3) and its accompanying Core constraints(st2067-2). */ @Immutable public final class Composition { private static final Logger logger = LoggerFactory.getLogger(Composition.class); private static final String dcmlTypes_schema_path = "org/smpte_ra/schemas/st0433_2008/dcmlTypes/dcmlTypes.xsd"; private static final String xmldsig_core_schema_path = "org/w3/_2000_09/xmldsig/xmldsig-core-schema.xsd"; public static final Set<String> supportedCPLSchemaURIs = Collections.unmodifiableSet(new HashSet<String>(){{ add("http://www.smpte-ra.org/schemas/2067-3/2013");}}); private static class CoreConstraintsSchemas{ private final String coreConstraintsSchemaPath; private final String coreConstraintsContext; private CoreConstraintsSchemas(String coreConstraintsSchemaPath, String coreConstraintsContext){ this.coreConstraintsSchemaPath = coreConstraintsSchemaPath; this.coreConstraintsContext = coreConstraintsContext; } private String getCoreConstraintsSchemaPath(){ return this.coreConstraintsSchemaPath; } private String getCoreConstraintsContext(){ return this.coreConstraintsContext; } } public static final List<CoreConstraintsSchemas> supportedIMFCoreConstraintsSchemas = Collections.unmodifiableList (new ArrayList<CoreConstraintsSchemas>() {{ add( new CoreConstraintsSchemas("org/smpte_ra/schemas/st2067_2_2013/imf-core-constraints-20130620-pal.xsd", "org.smpte_ra.schemas.st2067_2_2013")); add( new CoreConstraintsSchemas("org/smpte_ra/schemas/st2067_2_2016/imf-core-constraints.xsd", "org.smpte_ra.schemas.st2067_2_2016"));}}); private final JAXBElement compositionPlaylistTypeJAXBElement; private final String coreConstraintsVersion; private final UUID uuid; private final EditRate editRate; private final Map<UUID, ? extends VirtualTrack> virtualTrackMap; /** * Constructor for a {@link Composition Composition} object from a XML file * @param compositionPlaylistXMLFile the input XML file that is conformed to schema and constraints specified in st2067-3:2013 and st2067-2:2013 * @param imfErrorLogger an error logger for recording any errors - cannot be null * @throws IOException - any I/O related error is exposed through an IOException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public Composition(File compositionPlaylistXMLFile, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException, SAXException, JAXBException, URISyntaxException { this(new FileByteRangeProvider(compositionPlaylistXMLFile), imfErrorLogger); } /** * Constructor for a {@link Composition Composition} object from a XML file * @param resourceByteRangeProvider corresponding to the Composition XML file. * @param imfErrorLogger an error logger for recording any errors - cannot be null * @throws IOException - any I/O related error is exposed through an IOException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public Composition(ResourceByteRangeProvider resourceByteRangeProvider, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException, SAXException, JAXBException, URISyntaxException { int numErrors = imfErrorLogger.getNumberOfErrors(); String cplNameSpaceURI = getCompositionNamespaceURI(resourceByteRangeProvider, imfErrorLogger); String namespaceVersion = getCPLNamespaceVersion(cplNameSpaceURI); String imf_cpl_schema_path = getIMFCPLSchemaPath(namespaceVersion); CoreConstraintsSchemas coreConstraintsSchema = this.supportedIMFCoreConstraintsSchemas.get(0); JAXBElement jaxbElement = null; for(int i=0; i<supportedIMFCoreConstraintsSchemas.size(); i++) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try (InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize() - 1); InputStream xmldsig_core_is = contextClassLoader.getResourceAsStream(Composition.xmldsig_core_schema_path); InputStream dcmlTypes_is = contextClassLoader.getResourceAsStream(Composition.dcmlTypes_schema_path); InputStream imf_cpl_is = contextClassLoader.getResourceAsStream(imf_cpl_schema_path); InputStream imf_core_constraints_is = contextClassLoader.getResourceAsStream(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsSchemaPath);) { StreamSource[] streamSources = new StreamSource[4]; streamSources[0] = new StreamSource(xmldsig_core_is); streamSources[1] = new StreamSource(dcmlTypes_is); streamSources[2] = new StreamSource(imf_cpl_is); streamSources[3] = new StreamSource(imf_core_constraints_is); SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = schemaFactory.newSchema(streamSources); ValidationEventHandlerImpl validationEventHandlerImpl = new ValidationEventHandlerImpl(true); JAXBContext jaxbContext = JAXBContext.newInstance(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsContext); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); unmarshaller.setEventHandler(validationEventHandlerImpl); unmarshaller.setSchema(schema); jaxbElement = (JAXBElement) unmarshaller.unmarshal(inputStream); coreConstraintsSchema = supportedIMFCoreConstraintsSchemas.get(i); if (validationEventHandlerImpl.hasErrors()) { throw new IMFException(validationEventHandlerImpl.toString()); } //CompositionPlaylistType compositionPlaylistType = compositionPlaylistTypeJAXBElement.getValue(); //this.compositionPlaylistType = compositionPlaylistType; break; //No errors so we can break out without trying other Core constraints schema namespaces. } catch (SAXException | JAXBException e) { if(i == supportedIMFCoreConstraintsSchemas.size()-1) { throw e; } } } this.compositionPlaylistTypeJAXBElement = jaxbElement; this.coreConstraintsVersion = coreConstraintsSchema.getCoreConstraintsContext(); switch(coreConstraintsVersion){ case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) this.compositionPlaylistTypeJAXBElement.getValue(); this.virtualTrackMap = CompositionModel_st2067_2_2013.getVirtualTracksMap(compositionPlaylistType, imfErrorLogger); if(!IMFCoreConstraintsChecker_st2067_2_2013.checkVirtualTracks(compositionPlaylistType, this.virtualTrackMap, imfErrorLogger)){ StringBuilder stringBuilder = new StringBuilder(); for(int i=numErrors; i<imfErrorLogger.getErrors().size() ; i++){ stringBuilder.append(String.format("%n")); stringBuilder.append(imfErrorLogger.getErrors().get(i)); } throw new IMFException(String.format("Found following errors while validating the virtual tracks in the Composition %n %s", stringBuilder.toString())); } this.uuid = UUIDHelper.fromUUIDAsURNStringToUUID(compositionPlaylistType.getId()); this.editRate = new EditRate(compositionPlaylistType.getEditRate()); break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } if ((imfErrorLogger != null) && (imfErrorLogger.getNumberOfErrors() > numErrors)) { throw new IMFException(String.format("Found %d errors in CompositionPlaylist XML file", imfErrorLogger.getNumberOfErrors() - numErrors)); } } private static final String getIMFCPLSchemaPath(String namespaceVersion){ String imf_cpl_schema_path; switch(namespaceVersion){ case "2013": imf_cpl_schema_path = "org/smpte_ra/schemas/st2067_3_2013/imf-cpl.xsd"; break; case "2016": imf_cpl_schema_path = "org/smpte_ra/schemas/st2067_3_2016/imf-cpl.xsd"; break; default: throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the following schema URIs %s", Utilities.serializeObjectCollectionToString(supportedCPLSchemaURIs))); } return imf_cpl_schema_path; } @Nullable private static final String getCompositionNamespaceURI(ResourceByteRangeProvider resourceByteRangeProvider, IMFErrorLogger imfErrorLogger) throws IOException { String result = ""; try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); documentBuilder.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.WARNING, exception.getMessage())); } @Override public void error(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.NON_FATAL, exception.getMessage())); } @Override public void fatalError(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, exception.getMessage())); } }); Document document = documentBuilder.parse(inputStream); //obtain root node NodeList nodeList = null; for(String cplNamespaceURI : Composition.supportedCPLSchemaURIs) { nodeList = document.getElementsByTagNameNS(cplNamespaceURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { result = cplNamespaceURI; break; } } } catch(ParserConfigurationException | SAXException e) { throw new IMFException(String.format("Error occurred while trying to determine the Composition Playlist Namespace URI, invalid CPL document Error Message : %s", e.getMessage())); } if(result.isEmpty()) { throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the following schema URIs %s", Utilities.serializeObjectCollectionToString(supportedCPLSchemaURIs))); } return result; } private static final String getCPLNamespaceVersion(String namespaceURI){ String[] uriComponents = namespaceURI.split("/"); String namespaceVersion = uriComponents[uriComponents.length - 1]; return namespaceVersion; } private final String serializeIMFCoreConstaintsSchemasToString(List<CoreConstraintsSchemas> coreConstraintsSchemas){ StringBuilder stringBuilder = new StringBuilder(); for(CoreConstraintsSchemas coreConstraintsSchema : coreConstraintsSchemas){ stringBuilder.append(String.format("%n")); stringBuilder.append(coreConstraintsSchema.getCoreConstraintsContext()); } return stringBuilder.toString(); } /** * A stateless method that verifies if the raw data represented by the ResourceByteRangeProvider corresponds to a valid * IMF Composition Playlist document * @param resourceByteRangeProvider - a byte range provider for the document that needs to be verified * @return - a boolean indicating if the document represented is an IMF Composition or not * @throws IOException - any I/O related error is exposed through an IOException */ public static boolean isFileOfSupportedSchema(ResourceByteRangeProvider resourceByteRangeProvider) throws IOException{ try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document document = documentBuilder.parse(inputStream); NodeList nodeList = null; for(String supportedSchemaURI : supportedCPLSchemaURIs) { //obtain root node nodeList = document.getElementsByTagNameNS(supportedSchemaURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { return true; } } } catch(ParserConfigurationException | SAXException e) { return false; } return false; } /** * A method that returns a string representation of a Composition object * * @return string representing the object */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("======= Composition : %s =======%n", this.uuid)); sb.append(this.editRate.toString()); return sb.toString(); } /** * A method that confirms if the inputStream corresponds to a Composition document instance. * @param resourceByteRangeProvider corresponding to the Composition XML file. * @return a boolean indicating if the input file is a Composition document * @throws IOException - any I/O related error is exposed through an IOException */ public static boolean isCompositionPlaylist(ResourceByteRangeProvider resourceByteRangeProvider) throws IOException{ try(InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize()-1);) { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document document = documentBuilder.parse(inputStream); //obtain root node NodeList nodeList = null; for(String cplNamespaceURI : Composition.supportedCPLSchemaURIs) { nodeList = document.getElementsByTagNameNS(cplNamespaceURI, "CompositionPlaylist"); if (nodeList != null && nodeList.getLength() == 1) { return true; } } } catch(ParserConfigurationException | SAXException e) { return false; } return false; } /** * Getter for the composition edit rate as specified in the Composition XML file * @return the edit rate associated with the Composition */ public EditRate getEditRate() { return this.editRate; } /** * Getter for the virtual track map associated with this Composition * @return {@link java.util.Map Map}&lt;{@link java.util.UUID UUID},{@link Composition.VirtualTrack VirtualTrack}&gt;. The UUID key corresponds to VirtualTrackID */ public Map<UUID, ? extends VirtualTrack> getVirtualTrackMap() { return Collections.unmodifiableMap(this.virtualTrackMap); } /** * Getter for the UUID corresponding to this Composition document * @return the uuid of this Composition object */ public UUID getUUID() { return this.uuid; } /** * Getter for the CompositionPlaylistType object model of the Composition defined by the st2067-3 schema. * @return the composition playlist type object model. */ private JAXBElement getCompositionPlaylistTypeJAXBElement(){ return this.compositionPlaylistTypeJAXBElement; } /** * Getter for the CoreConstraintsURI corresponding to this CompositionPlaylist * @return the uri for the CoreConstraints schema for this CompositionPlaylist */ public String getCoreConstraintsVersion(){ return this.coreConstraintsVersion; } /** * Getter for the video VirtualTrack in this Composition * @return the video virtual track that is a part of this composition or null if there is not video virtual track */ @Nullable public VirtualTrack getVideoVirtualTrack(){ switch(coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": Iterator iterator = this.virtualTrackMap.entrySet().iterator(); while (iterator != null && iterator.hasNext()) { Composition.VirtualTrack virtualTrack = ((Map.Entry<UUID, ? extends Composition.VirtualTrack>) iterator.next()).getValue(); if (virtualTrack.getSequenceTypeEnum().equals(SequenceTypeEnum.MainImageSequence)) { return virtualTrack; } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return null; } /** * Getter for the audio VirtualTracks in this Composition * @return a list of audio virtual tracks that are a part of this composition or an empty list if there are none */ public List<? extends VirtualTrack> getAudioVirtualTracks(){ List<VirtualTrack> audioVirtualTracks = new ArrayList<>(); Iterator iterator = this.getVirtualTrackMap().entrySet().iterator(); while(iterator != null && iterator.hasNext()) { Composition.VirtualTrack virtualTrack = ((Map.Entry<UUID, ? extends Composition.VirtualTrack>) iterator.next()).getValue(); if (virtualTrack.getSequenceTypeEnum().equals(SequenceTypeEnum.MainAudioSequence)) { audioVirtualTracks.add(virtualTrack); } } return Collections.unmodifiableList(audioVirtualTracks); } public static void validateCompositionPlaylistSchema(ResourceByteRangeProvider resourceByteRangeProvider, IMFErrorLogger imfErrorLogger) throws IOException, SAXException { String cplNameSpaceURI = getCompositionNamespaceURI(resourceByteRangeProvider, imfErrorLogger); String namespaceVersion = getCPLNamespaceVersion(cplNameSpaceURI); String imf_cpl_schema_path = getIMFCPLSchemaPath(namespaceVersion); for (int i=0; i<supportedIMFCoreConstraintsSchemas.size(); i++) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try (InputStream inputStream = resourceByteRangeProvider.getByteRangeAsStream(0, resourceByteRangeProvider.getResourceSize() - 1); InputStream xmldsig_core_is = contextClassLoader.getResourceAsStream(Composition.xmldsig_core_schema_path); InputStream dcmlTypes_is = contextClassLoader.getResourceAsStream(Composition.dcmlTypes_schema_path); InputStream imf_cpl_is = contextClassLoader.getResourceAsStream(imf_cpl_schema_path); InputStream imf_core_constraints_is = contextClassLoader.getResourceAsStream(supportedIMFCoreConstraintsSchemas.get(i).coreConstraintsSchemaPath);) { StreamSource inputSource = new StreamSource(inputStream); StreamSource[] streamSources = new StreamSource[4]; streamSources[0] = new StreamSource(xmldsig_core_is); streamSources[1] = new StreamSource(dcmlTypes_is); streamSources[2] = new StreamSource(imf_cpl_is); streamSources[3] = new StreamSource(imf_core_constraints_is); SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); Schema schema = schemaFactory.newSchema(streamSources); Validator validator = schema.newValidator(); validator.setErrorHandler(new ErrorHandler() { @Override public void warning(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.WARNING, exception.getMessage())); } @Override public void error(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.NON_FATAL, exception.getMessage())); } @Override public void fatalError(SAXParseException exception) throws SAXException { imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, exception.getMessage())); } }); validator.validate(inputSource); break;//No errors so we can break out without trying other Core constraints schema namespaces. } catch (SAXException e) { if(i == supportedIMFCoreConstraintsSchemas.size()-1) { throw e; } } } } /** * This class is an immutable implementation of a rational number described as a ratio of two longs and used to hold * non-integer frame rate values */ @Immutable public static final class EditRate { private final Long numerator; private final Long denominator; /** * Constructor for the rational frame rate number. * @param numbers the input list of numbers. The first number in the list is treated as the numerator and the second as * the denominator. Construction succeeds only if the list has exactly two numbers */ public EditRate(List<Long> numbers) { if (numbers.size() != 2) { throw new IMFException(String.format( "Input list is expected to contain 2 numbers representing numerator and denominator respectively, found %d numbers in list %s", numbers.size(), Arrays.toString(numbers.toArray()))); } else if(numbers.get(0) == 0 || numbers.get(1) == 0){ throw new IMFException(String.format( "Input list is expected to contain 2 non-zero numbers representing numerator and denominator of the EditRate respectively, found Numerator %d, Denominator %d", numbers.get(0), numbers.get(1))); } this.numerator = numbers.get(0); this.denominator = numbers.get(1); } /** * Getter for the frame rate numerator * @return a long value corresponding to the frame rate numerator */ public Long getNumerator() { return this.numerator; } /** * Getter for the frame rate denominator * @return a long value corresponding to the frame rate denominator */ public Long getDenominator() { return this.denominator; } /** * A method that returns a string representation of a Composition object * * @return string representing the object */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("=================== EditRate =====================\n"); sb.append(String.format("numerator = %d, denominator = %d%n", this.numerator, this.denominator)); return sb.toString(); } /** * Overridden equals method. * @param object the EditRate to be compared with. * @return boolean false if the object is null or is not an instance of the EditRate class. */ @Override public boolean equals(Object object){ if(object == null || !(object instanceof EditRate)){ return false; } EditRate other = (EditRate) object; return ((this.getNumerator().equals(other.getNumerator())) && (this.getDenominator().equals(other.getDenominator()))); } /** * A Java compliant implementation of the hashCode() method * @return integer containing the hash code corresponding to this object */ @Override public int hashCode(){ int hash = 1; hash = hash * 31 + this.numerator.hashCode(); /*Numerator can be used since it is non-null*/ hash = hash * 31 + this.denominator.hashCode();/*Another field that is indicated to be non-null*/ return hash; } } /** * This class enumerates various types of {@link org.smpte_ra.schemas.st2067_2_2013.SequenceType Sequence} that are valid in * Composition document that is compliant with st2067-2:2013. Such types are mostly defined in Section 6.3 of st2067-2:2013 */ public static enum SequenceTypeEnum { MarkerSequence("MarkerSequence"), MainImageSequence("MainImageSequence"), MainAudioSequence("MainAudioSequence"), SubtitlesSequence("SubtitlesSequence"), HearingImpairedCaptionsSequence("HearingImpairedCaptionsSequence"), VisuallyImpairedTextSequence("VisuallyImpairedTextSequence"), CommentarySequence("CommentarySequence"), KaraokeSequence("KaraokeSequence"), AncillaryDataSequence("AncillaryDataSequence"), Unknown("Unknown"); private final String name; private SequenceTypeEnum(String name) { this.name = name; } /** * A getter for the SequenceTypeEnum given a string that represents the name of a SequenceTypeEnum * @param name the string that should represent the SequenceTypeEnum * @return the SequenceTypeEnum value corresponding to the name that was passed */ public static SequenceTypeEnum getSequenceTypeEnum(String name) { switch (name) { case "MainImageSequence": return MainImageSequence; case "MainAudioSequence": return MainAudioSequence; case "SubtitlesSequence": return SubtitlesSequence; case "HearingImpairedCaptionsSequence": return HearingImpairedCaptionsSequence; case "VisuallyImpairedTextSequence": return VisuallyImpairedTextSequence; case "CommentarySequence": return CommentarySequence; case "KaraokeSequence": return KaraokeSequence; case "AncillaryDataSequence": return AncillaryDataSequence; default: return Unknown; } } /** * An override of the toString() method * @return a string representing the SequenceTypeEnum */ @Override public String toString(){ return this.name; } } /** * The class is an immutable implementation of the virtual track concept defined in Section 6.9.3 of st2067-3:2013. A * virtual track is characterized by its UUID and the type of sequence it holds */ @Immutable public abstract static class VirtualTrack { protected final UUID trackID; protected final SequenceTypeEnum sequenceTypeEnum; protected final List<UUID> resourceIds = new ArrayList<>(); /** * Constructor for a VirtualTrack object * @param trackID the UUID associated with this VirtualTrack object * @param sequenceTypeEnum the type of the associated sequence */ public VirtualTrack(UUID trackID, SequenceTypeEnum sequenceTypeEnum) { this.trackID = trackID; this.sequenceTypeEnum = sequenceTypeEnum; } /** * Getter for the sequence type associated with this VirtualTrack object * @return the sequence type associated with this VirtualTrack object as an enum */ public SequenceTypeEnum getSequenceTypeEnum() { return this.sequenceTypeEnum; } /** * Getter for the UUID associated with this VirtualTrack object * @return the UUID associated with the Virtual track */ public UUID getTrackID(){ return this.trackID; } /** * Getter for the UUIDs of the resources that are a part of this virtual track * @return an unmodifiable list of UUIDs of resources that are a part of this virtual track */ public List<UUID> getTrackResourceIds(){ return Collections.unmodifiableList(this.resourceIds); } /** * A method to determine the equivalence of any 2 virtual tracks. * @param other - the object to compare against * @return boolean indicating if the 2 virtual tracks are equivalent or represent the same timeline */ public abstract boolean equivalent(VirtualTrack other); } /** * A utility method to retrieve the VirtualTracks within a Composition. * @return A list of VirtualTracks in the Composition. * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ @Nonnull public List<? extends Composition.VirtualTrack> getVirtualTracks() throws IOException, IMFException, SAXException, JAXBException, URISyntaxException { Map<UUID, ? extends Composition.VirtualTrack> virtualTrackMap = this.getVirtualTrackMap(); return new ArrayList<>(virtualTrackMap.values()); } /** * A utility method to retrieve the UUIDs of the Track files referenced by a Virtual track within a Composition. * @param virtualTrack - object model of an IMF virtual track {@link Composition.VirtualTrack} * @return A list of TrackFileResourceType objects corresponding to the virtual track in the Composition. * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ @Nonnull public List<ResourceIdTuple> getVirtualTrackResourceIDs(@Nonnull Composition.VirtualTrack virtualTrack) throws IOException, IMFException, SAXException, JAXBException, URISyntaxException { List<ResourceIdTuple> virtualTrackResourceIDs = new ArrayList<>(); switch(coreConstraintsVersion){ case "org.smpte_ra.schemas.st2067_2_2013": CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 virtualTrack_st2067_2_2013 = CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013.class.cast(virtualTrack); List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resourceList = virtualTrack_st2067_2_2013.getResourceList(); if(resourceList != null && resourceList.size() > 0) { for (org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType trackFileResourceType : resourceList) { virtualTrackResourceIDs.add(new ResourceIdTuple(UUIDHelper.fromUUIDAsURNStringToUUID(trackFileResourceType.getTrackFileId()) , UUIDHelper.fromUUIDAsURNStringToUUID(trackFileResourceType.getSourceEncoding()))); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return Collections.unmodifiableList(virtualTrackResourceIDs); } /** * A utility method that will analyze the EssenceDescriptorList in a Composition and construct a HashMap mapping * a UUID to a EssenceDescriptor. * @return a HashMap mapping the UUID to its corresponding EssenceDescriptor in the Composition */ public Map<UUID, DOMNodeObjectModel> getEssenceDescriptorListMap(){ Map<UUID, DOMNodeObjectModel> essenceDescriptorMap = new HashMap<>(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) this.compositionPlaylistTypeJAXBElement.getValue(); List<org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType> essenceDescriptors = compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor(); for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : essenceDescriptors) { UUID uuid = UUIDHelper.fromUUIDAsURNStringToUUID(essenceDescriptorBaseType.getId()); DOMNodeObjectModel domNodeObjectModel = null; for (Object object : essenceDescriptorBaseType.getAny()) { domNodeObjectModel = new DOMNodeObjectModel((Node) object); } if (domNodeObjectModel != null) { essenceDescriptorMap.put(uuid, domNodeObjectModel); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return Collections.unmodifiableMap(essenceDescriptorMap); } public Map<Set<DOMNodeObjectModel>, ? extends Composition.VirtualTrack> getAudioVirtualTracksMap() { List<? extends Composition.VirtualTrack> audioVirtualTracks = this.getAudioVirtualTracks(); Map<UUID, DOMNodeObjectModel> essenceDescriptorListMap = this.getEssenceDescriptorListMap(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": Map<Set<DOMNodeObjectModel>, CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013> audioVirtualTrackMap = new HashMap<>(); for (Composition.VirtualTrack audioVirtualTrack : audioVirtualTracks) { Set<DOMNodeObjectModel> set = new HashSet<>(); CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 audioVirtualTrack_st2067_2_2013 = CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013.class.cast(audioVirtualTrack); List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resources = audioVirtualTrack_st2067_2_2013.getResourceList(); for (org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType resource : resources) { set.add(essenceDescriptorListMap.get(UUIDHelper.fromUUIDAsURNStringToUUID(resource.getSourceEncoding())));//Fetch and add the EssenceDescriptor referenced by the resource via the SourceEncoding element to the ED set. } audioVirtualTrackMap.put(set, audioVirtualTrack_st2067_2_2013); } return Collections.unmodifiableMap(audioVirtualTrackMap); case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } } /** * This class is a representation of a Resource SourceEncoding element and trackFileId tuple. */ public static final class ResourceIdTuple{ private final UUID trackFileId; private final UUID sourceEncoding; private ResourceIdTuple(UUID trackFileId, UUID sourceEncoding){ this.trackFileId = trackFileId; this.sourceEncoding = sourceEncoding; } /** * A getter for the trackFileId referenced by the resource corresponding to this ResourceIdTuple * @return the trackFileId associated with this ResourceIdTuple */ public UUID getTrackFileId(){ return this.trackFileId; } /** * A getter for the source encoding element referenced by the resource corresponding to this ResourceIdTuple * @return the source encoding element associated with this ResourceIdTuple */ public UUID getSourceEncoding(){ return this.sourceEncoding; } } /** * This method can be used to determine if a Composition is conformant. Conformance checks * perform deeper inspection of the Composition and the EssenceDescriptors corresponding to the * resources referenced by the Composition. * @param headerPartitionTuples list of HeaderPartitionTuples corresponding to the IMF essences referenced in the Composition * @param imfErrorLogger an error logging object * @return boolean to indicate of the Composition is conformant or not * @throws IOException - any I/O related error is exposed through an IOException. * @throws IMFException - any non compliant CPL documents will be signalled through an IMFException * @throws SAXException - exposes any issues with instantiating a {@link javax.xml.validation.Schema Schema} object * @throws JAXBException - any issues in serializing the XML document using JAXB are exposed through a JAXBException * @throws URISyntaxException exposes any issues instantiating a {@link java.net.URI URI} object */ public boolean conformVirtualTrackInComposition(List<IMPValidator.HeaderPartitionTuple> headerPartitionTuples, IMFErrorLogger imfErrorLogger, boolean conformAllVirtualTracks) throws IOException, IMFException, SAXException, JAXBException, URISyntaxException{ boolean result = true; /* * The algorithm for conformance checking a Composition (CPL) would be * 1) Verify that every EssenceDescriptor element in the EssenceDescriptor list (EDL) is referenced through its id element if conformAllVirtualTracks is enabled * by at least one TrackFileResource within the Virtual tracks in the Composition (see section 6.1.10 of SMPTE st2067-3:2-13). * 2) Verify that all track file resources within a virtual track have a corresponding essence descriptor in the essence descriptor list. * 3) Verify that the EssenceDescriptors in the EssenceDescriptorList element in the Composition are present in * the physical essence files referenced by the resources of a virtual track and are equal. */ /*The following check simultaneously verifies 1) and 2) from above.*/ Set<UUID> resourceEssenceDescriptorIDsSet = getResourceEssenceDescriptorIdsSet(); Iterator resourceEssenceDescriptorIDs = resourceEssenceDescriptorIDsSet.iterator(); Set<UUID> cplEssenceDescriptorIDsSet = getEssenceDescriptorIdsSet(); Iterator cplEssenceDescriptorIDs = cplEssenceDescriptorIDsSet.iterator(); while(resourceEssenceDescriptorIDs.hasNext()){ UUID resourceEssenceDescriptorUUID = (UUID) resourceEssenceDescriptorIDs.next(); if(!cplEssenceDescriptorIDsSet.contains(resourceEssenceDescriptorUUID)) { result &= false; imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor ID %s referenced by a VirtualTrack Resource does not have a corresponding EssenceDescriptor in the EssenceDescriptorList in the CPL", resourceEssenceDescriptorUUID.toString())); } } /** * The following checks that at least one of the Virtual Tracks references an EssenceDescriptor in the EDL. This * check should be performed only when we need to conform all the Virtual Tracks in the CPL. */ if(conformAllVirtualTracks) { while (cplEssenceDescriptorIDs.hasNext()) { UUID cplEssenceDescriptorUUID = (UUID) cplEssenceDescriptorIDs.next(); if (!resourceEssenceDescriptorIDsSet.contains(cplEssenceDescriptorUUID)) { result &= false; imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptorID %s in the CPL EssenceDescriptorList is not referenced by any resource in any of the Virtual tracks in the CPL, this violates the constraint in st2067-3:2013 section 6.1.10.1", cplEssenceDescriptorUUID.toString())); } } } if(!result){ return result; } /*The following check verifies 3) from above.*/ result &= compareEssenceDescriptors(getResourcesEssenceDescriptorMap(headerPartitionTuples), this.getEssenceDescriptorListMap(), imfErrorLogger); return result; } private Set<UUID> getEssenceDescriptorIdsSet () { HashSet<UUID> essenceDescriptorIdsSet = new LinkedHashSet<>(); switch(this.coreConstraintsVersion) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType)this.getCompositionPlaylistTypeJAXBElement().getValue(); List<org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType> essenceDescriptorList = compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor(); for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : essenceDescriptorList) { UUID sourceEncodingElement = UUIDHelper.fromUUIDAsURNStringToUUID(essenceDescriptorBaseType.getId()); /*Construct a set of SourceEncodingElements/IDs corresponding to every EssenceDescriptorBaseType in the EssenceDescriptorList*/ essenceDescriptorIdsSet.add(sourceEncodingElement); } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } return essenceDescriptorIdsSet; } private Set<UUID> getResourceEssenceDescriptorIdsSet () throws IOException, SAXException, JAXBException, URISyntaxException{ List<Composition.VirtualTrack> virtualTracks = new ArrayList<>(this.getVirtualTrackMap().values()); LinkedHashSet<UUID> resourceSourceEncodingElementsSet = new LinkedHashSet<>(); for(Composition.VirtualTrack virtualTrack : virtualTracks){ List<Composition.ResourceIdTuple> resourceIdTuples = this.getVirtualTrackResourceIDs(virtualTrack); for(Composition.ResourceIdTuple resourceIdTuple : resourceIdTuples){ /*Construct a set of SourceEncodingElements corresponding to every TrackFileResource of this VirtualTrack*/ resourceSourceEncodingElementsSet.add(resourceIdTuple.getSourceEncoding()); } } return resourceSourceEncodingElementsSet; } private Map<UUID, List<DOMNodeObjectModel>> getResourcesEssenceDescriptorMap(List<IMPValidator.HeaderPartitionTuple> headerPartitionTuples) throws IOException, SAXException, JAXBException, URISyntaxException{ Map<UUID, List<DOMNodeObjectModel>> resourcesEssenceDescriptorMap = new LinkedHashMap<>(); /*Create a Map of FilePackage UUID which should be equal to the TrackFileId of the resource in the Composition if the asset is referenced and the HeaderPartitionTuple, Map<UUID, HeaderPartitionTuple>*/ Map<UUID, IMPValidator.HeaderPartitionTuple> resourceUUIDHeaderPartitionMap = new HashMap<>(); for(IMPValidator.HeaderPartitionTuple headerPartitionTuple : headerPartitionTuples) { //validate header partition MXFOperationalPattern1A.HeaderPartitionOP1A headerPartitionOP1A = MXFOperationalPattern1A.checkOperationalPattern1ACompliance(headerPartitionTuple.getHeaderPartition()); IMFConstraints.HeaderPartitionIMF headerPartitionIMF = IMFConstraints.checkIMFCompliance(headerPartitionOP1A); Preface preface = headerPartitionIMF.getHeaderPartitionOP1A().getHeaderPartition().getPreface(); GenericPackage genericPackage = preface.getContentStorage().getEssenceContainerDataList().get(0).getLinkedPackage(); SourcePackage filePackage = (SourcePackage)genericPackage; UUID packageUUID = filePackage.getPackageMaterialNumberasUUID(); resourceUUIDHeaderPartitionMap.put(packageUUID, headerPartitionTuple); } List<Composition.VirtualTrack> virtualTracks = new ArrayList<>(this.getVirtualTrackMap().values()); /*Go through all the Virtual Tracks in the Composition and construct a map of Resource Source Encoding Element and a list of DOM nodes representing every EssenceDescriptor in the HeaderPartition corresponding to that Resource*/ for(Composition.VirtualTrack virtualTrack : virtualTracks){ List<Composition.ResourceIdTuple> resourceIdTuples = this.getVirtualTrackResourceIDs(virtualTrack);/*Retrieve a list of ResourceIDTuples corresponding to this virtual track*/ for(Composition.ResourceIdTuple resourceIdTuple : resourceIdTuples){ IMPValidator.HeaderPartitionTuple headerPartitionTuple = resourceUUIDHeaderPartitionMap.get(resourceIdTuple.getTrackFileId()); if(headerPartitionTuple != null){ /*Create a DOM Node representation of the EssenceDescriptors present in this header partition corresponding to an IMFTrackFile*/ List<Node> essenceDescriptorDOMNodes = getEssenceDescriptorDOMNodes(headerPartitionTuple); List<DOMNodeObjectModel> domNodeObjectModels = new ArrayList<>(); for(Node node : essenceDescriptorDOMNodes){ domNodeObjectModels.add(new DOMNodeObjectModel(node)); } resourcesEssenceDescriptorMap.put(resourceIdTuple.getSourceEncoding(), domNodeObjectModels); } } } if(resourcesEssenceDescriptorMap.entrySet().size() == 0){ throw new MXFException(String.format("Composition does not refer to a single IMFEssence represented by the HeaderPartitions that were passed in.")); } return Collections.unmodifiableMap(resourcesEssenceDescriptorMap); } private List<Node> getEssenceDescriptorDOMNodes(IMPValidator.HeaderPartitionTuple headerPartitionTuple) throws IOException { try { List<InterchangeObject.InterchangeObjectBO> essenceDescriptors = headerPartitionTuple.getHeaderPartition().getEssenceDescriptors(); List<Node> essenceDescriptorNodes = new ArrayList<>(); for (InterchangeObject.InterchangeObjectBO essenceDescriptor : essenceDescriptors) { KLVPacket.Header essenceDescriptorHeader = essenceDescriptor.getHeader(); List<KLVPacket.Header> subDescriptorHeaders = this.getSubDescriptorKLVHeader(headerPartitionTuple.getHeaderPartition(), essenceDescriptor); /*Create a dom*/ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document document = docBuilder.newDocument(); DocumentFragment documentFragment = this.getEssenceDescriptorAsDocumentFragment(document, headerPartitionTuple, essenceDescriptorHeader, subDescriptorHeaders); Node node = documentFragment.getFirstChild(); essenceDescriptorNodes.add(node); } return essenceDescriptorNodes; } catch(ParserConfigurationException e){ throw new IMFException(e); } } private List<KLVPacket.Header> getSubDescriptorKLVHeader(HeaderPartition headerPartition, InterchangeObject.InterchangeObjectBO essenceDescriptor) { List<KLVPacket.Header> subDescriptorHeaders = new ArrayList<>(); List<InterchangeObject.InterchangeObjectBO> subDescriptors = headerPartition.getSubDescriptors(essenceDescriptor); for (InterchangeObject.InterchangeObjectBO subDescriptorBO : subDescriptors) { if (subDescriptorBO != null) { subDescriptorHeaders.add(subDescriptorBO.getHeader()); } } return Collections.unmodifiableList(subDescriptorHeaders); } private DocumentFragment getEssenceDescriptorAsDocumentFragment(Document document, IMPValidator.HeaderPartitionTuple headerPartitionTuple, KLVPacket.Header essenceDescriptor, List<KLVPacket.Header>subDescriptors) throws MXFException, IOException { document.setXmlStandalone(true); PrimerPack primerPack = headerPartitionTuple.getHeaderPartition().getPrimerPack(); ResourceByteRangeProvider resourceByteRangeProvider = headerPartitionTuple.getResourceByteRangeProvider(); RegXMLLibHelper regXMLLibHelper = new RegXMLLibHelper(primerPack.getHeader(), getByteProvider(resourceByteRangeProvider, primerPack.getHeader())); Triplet essenceDescriptorTriplet = regXMLLibHelper.getTripletFromKLVHeader(essenceDescriptor, getByteProvider(resourceByteRangeProvider, essenceDescriptor)); //DocumentFragment documentFragment = this.regXMLLibHelper.getDocumentFragment(essenceDescriptorTriplet, document); /*Get the Triplets corresponding to the SubDescriptors*/ List<Triplet> subDescriptorTriplets = new ArrayList<>(); for(KLVPacket.Header subDescriptorHeader : subDescriptors){ subDescriptorTriplets.add(regXMLLibHelper.getTripletFromKLVHeader(subDescriptorHeader, this.getByteProvider(resourceByteRangeProvider, subDescriptorHeader))); } return regXMLLibHelper.getEssenceDescriptorDocumentFragment(essenceDescriptorTriplet, subDescriptorTriplets, document); } private ByteProvider getByteProvider(ResourceByteRangeProvider resourceByteRangeProvider, KLVPacket.Header header) throws IOException { byte[] bytes = resourceByteRangeProvider.getByteRangeAsBytes(header.getByteOffset(), header.getByteOffset() + header.getKLSize() + header.getVSize()); ByteProvider byteProvider = new ByteArrayDataProvider(bytes); return byteProvider; } private boolean compareEssenceDescriptors(Map<UUID, List<DOMNodeObjectModel>> essenceDescriptorsMap, Map<UUID, DOMNodeObjectModel> eDLMap, IMFErrorLogger imfErrorLogger){ /** * An exhaustive compare of the eDLMap and essenceDescriptorsMap is required to ensure that the essence descriptors * in the EssenceDescriptorList and the EssenceDescriptors in the physical essence files corresponding to the * same source encoding element as indicated in the TrackFileResource and EDL are a good match. */ /** * The Maps passed in have the DOMObjectModel for every EssenceDescriptor in the EssenceDescriptorList in the CPL and * the essence descriptor in each of the essences referenced from every track file resource within each virtual track. */ /** * The following check ensures that we do not have a Track Resource that does not have a corresponding EssenceDescriptor element in the CPL's EDL */ Iterator<Map.Entry<UUID, List<DOMNodeObjectModel>>> essenceDescriptorsMapIterator = essenceDescriptorsMap.entrySet().iterator(); while(essenceDescriptorsMapIterator.hasNext()){ UUID sourceEncodingElement = essenceDescriptorsMapIterator.next().getKey(); if(!eDLMap.keySet().contains(sourceEncodingElement)){ imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Source Encoding Element %s in a track does not have a corresponding entry in the CPL's EDL", sourceEncodingElement.toString())); return false; } } /** * The following check ensures that we have atleast one EssenceDescriptor in a TrackFile that equals the corresponding EssenceDescriptor element in the CPL's EDL */ Iterator<Map.Entry<UUID, DOMNodeObjectModel>> eDLMapIterator = eDLMap.entrySet().iterator(); while(eDLMapIterator.hasNext()){ Map.Entry<UUID, DOMNodeObjectModel> entry = (Map.Entry<UUID, DOMNodeObjectModel>) eDLMapIterator.next(); List<DOMNodeObjectModel> domNodeObjectModels = essenceDescriptorsMap.get(entry.getKey()); if(domNodeObjectModels == null){ //This implies we did not find a single VirtualTrack that referenced this particular EssenceDescriptor in the EDL imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Id %s in the CPL's EDL is not referenced by a single resource within any of the VirtualTracks in the CPL, this violates the constraint in st2067-3:2013 section 6.1.10.1", entry.getKey().toString())); return false; } DOMNodeObjectModel referenceDOMNodeObjectModel = entry.getValue(); boolean intermediateResult = false; for(DOMNodeObjectModel domNodeObjectModel : domNodeObjectModels){ intermediateResult |= referenceDOMNodeObjectModel.equals(domNodeObjectModel); } if(!intermediateResult){ imfErrorLogger.addError(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, String.format("EssenceDescriptor with Id %s in the CPL's EDL doesn't match any EssenceDescriptors within the IMFTrackFile that references it", entry.getKey().toString())); return false; } } return true; } private static String usage() { StringBuilder sb = new StringBuilder(); sb.append(String.format("Usage:%n")); sb.append(String.format("%s <inputFile>%n", Composition.class.getName())); return sb.toString(); } public static void main(String[] args) throws Exception { if (args.length != 1) { logger.error(usage()); throw new IllegalArgumentException("Invalid parameters"); } File inputFile = new File(args[0]); logger.info(String.format("File Name is %s", inputFile.getName())); try { IMFErrorLogger imfErrorLogger = new IMFErrorLoggerImpl(); Composition composition = new Composition(inputFile, imfErrorLogger); logger.info(composition.toString()); for (ErrorLogger.ErrorObject errorObject : imfErrorLogger.getErrors()) { logger.error(errorObject.toString()); } List<? extends Composition.VirtualTrack> virtualTracks = composition.getVirtualTracks(); List<DOMNodeObjectModel> domNodeObjectModels = new ArrayList<>(); switch(composition.getCoreConstraintsVersion()) { case "org.smpte_ra.schemas.st2067_2_2013": org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType compositionPlaylistType = (org.smpte_ra.schemas.st2067_2_2013.CompositionPlaylistType) composition.getCompositionPlaylistTypeJAXBElement().getValue(); if (compositionPlaylistType.getEssenceDescriptorList() != null) { for (org.smpte_ra.schemas.st2067_2_2013.EssenceDescriptorBaseType essenceDescriptorBaseType : compositionPlaylistType.getEssenceDescriptorList().getEssenceDescriptor()) { for (Object object : essenceDescriptorBaseType.getAny()) { Node node = (Node) object; domNodeObjectModels.add(new DOMNodeObjectModel(node)); } } } else { logger.error("No essence descriptor list was found in CPL"); } for(Composition.VirtualTrack virtualTrack : virtualTracks) { CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013 virtualTrack_st2067_2_2013 = (CompositionModel_st2067_2_2013.VirtualTrack_st2067_2_2013) virtualTrack; List<org.smpte_ra.schemas.st2067_2_2013.TrackFileResourceType> resourceList = virtualTrack_st2067_2_2013.getResourceList(); if (resourceList.size() == 0) { throw new Exception(String.format("CPL file has a VirtualTrack with no resources which is invalid")); } } break; case "org.smpte_ra.schemas.st2067_2_2016": throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the 2013 CoreConstraints schema URI")); default: throw new IMFException(String.format("Please check the CPL document, currently we only support the following CoreConstraints schema URIs %s", composition.serializeIMFCoreConstaintsSchemasToString(supportedIMFCoreConstraintsSchemas))); } for(int i=0; i<domNodeObjectModels.size(); i++) { logger.info(String.format("ObjectModel of EssenceDescriptor-%d in the EssenceDescriptorList in the CPL: %n%s", i, domNodeObjectModels.get(i).toString())); } } catch(Exception e) { throw new Exception(e); } } }
cleanup
src/main/java/com/netflix/imflibrary/st2067_2/Composition.java
cleanup
<ide><path>rc/main/java/com/netflix/imflibrary/st2067_2/Composition.java <ide> } <ide> <ide> @Nullable <del> private static final String getCompositionNamespaceURI(ResourceByteRangeProvider resourceByteRangeProvider, IMFErrorLogger imfErrorLogger) throws IOException { <add> private static final String getCompositionNamespaceURI(ResourceByteRangeProvider resourceByteRangeProvider, @Nonnull IMFErrorLogger imfErrorLogger) throws IOException { <ide> <ide> String result = ""; <ide> <ide> DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); <ide> documentBuilderFactory.setNamespaceAware(true); <ide> DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); <del> documentBuilder.setErrorHandler(new ErrorHandler() { <add> documentBuilder.setErrorHandler(new ErrorHandler() <add> { <ide> @Override <del> public void warning(SAXParseException exception) throws SAXException { <add> public void warning(SAXParseException exception) throws SAXException <add> { <ide> imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.WARNING, exception.getMessage())); <ide> } <ide> <ide> @Override <del> public void error(SAXParseException exception) throws SAXException { <add> public void error(SAXParseException exception) throws SAXException <add> { <ide> imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.NON_FATAL, exception.getMessage())); <ide> } <ide> <ide> @Override <del> public void fatalError(SAXParseException exception) throws SAXException { <add> public void fatalError(SAXParseException exception) throws SAXException <add> { <ide> imfErrorLogger.addError(new ErrorLogger.ErrorObject(IMFErrorLogger.IMFErrors.ErrorCodes.IMF_CPL_ERROR, IMFErrorLogger.IMFErrors.ErrorLevels.FATAL, exception.getMessage())); <ide> } <ide> }); <ide> NodeList nodeList = null; <ide> for(String cplNamespaceURI : Composition.supportedCPLSchemaURIs) { <ide> nodeList = document.getElementsByTagNameNS(cplNamespaceURI, "CompositionPlaylist"); <del> if (nodeList != null <del> && nodeList.getLength() == 1) <add> if (nodeList != null && nodeList.getLength() == 1) <ide> { <ide> result = cplNamespaceURI; <ide> break; <ide> } <ide> catch(ParserConfigurationException | SAXException e) <ide> { <del> throw new IMFException(String.format("Error occurred while trying to determine the Composition Playlist Namespace URI, invalid CPL document Error Message : %s", e.getMessage())); <add> throw new IMFException(String.format("Error occurred while trying to determine the Composition Playlist Namespace URI, XML document appears to be invalid. Error Message : %s", e.getMessage())); <ide> } <ide> if(result.isEmpty()) { <ide> throw new IMFException(String.format("Please check the CPL document and namespace URI, currently we only support the following schema URIs %s", Utilities.serializeObjectCollectionToString(supportedCPLSchemaURIs))); <ide> <ide> logger.info(String.format("File Name is %s", inputFile.getName())); <ide> <add> IMFErrorLogger imfErrorLogger = new IMFErrorLoggerImpl(); <ide> try <ide> { <del> IMFErrorLogger imfErrorLogger = new IMFErrorLoggerImpl(); <ide> Composition composition = new Composition(inputFile, imfErrorLogger); <ide> logger.info(composition.toString()); <del> for (ErrorLogger.ErrorObject errorObject : imfErrorLogger.getErrors()) <del> { <del> logger.error(errorObject.toString()); <del> } <ide> <ide> List<? extends Composition.VirtualTrack> virtualTracks = composition.getVirtualTracks(); <ide> List<DOMNodeObjectModel> domNodeObjectModels = new ArrayList<>(); <ide> logger.info(String.format("ObjectModel of EssenceDescriptor-%d in the EssenceDescriptorList in the CPL: %n%s", i, domNodeObjectModels.get(i).toString())); <ide> } <ide> } <del> catch(Exception e) <del> { <del> throw new Exception(e); <add> finally <add> { <add> for (ErrorLogger.ErrorObject errorObject : imfErrorLogger.getErrors()) <add> { <add> logger.error(errorObject.toString()); <add> } <ide> } <ide> } <ide>
JavaScript
mit
ae7ef24196cf16abb0e14cd2207572c01835f752
0
naoina/lodash,r14r/fork_javascript_lodash,xiwc/lodash,timruffles/lodash,tquetano-r7/lodash,msmorgan/lodash,samuelbeek/lodash,rtorr/lodash,Andrey-Pavlov/lodash,zhangguangyong/lodash,tgriesser/lodash,neouser99/lodash,tejokumar/lodash,krahman/lodash,mjosh954/lodash,jasnell/lodash,phillipalexander/lodash,nsamarcos/lodash,AndBicScadMedia/lodash,greyhwndz/lodash,AneesMohammed/lodash,timruffles/lodash,nbellowe/lodash,chrootsu/lodash,chrootsu/lodash,imjerrybao/lodash,prawnsalad/lodash,stewx/lodash,joshuaprior/lodash,lekoaf/lodash,naoina/lodash,stewx/lodash,gutenye/lodash,r14r/fork_javascript_lodash,jshanson7/lodash,andersonaguiar/lodash,zhangguangyong/lodash,schnerd/lodash,hafeez-syed/lodash,hitesh97/lodash,steelsojka/lodash,ricardohbin/lodash,timruffles/lodash,krrg/lodash,greyhwndz/lodash,julianocomg/lodash,dgoncalves1/lodash,benweet/lodash,xiwc/lodash,jacwright/lodash,jacwright/lodash,tonyonodi/lodash,phillipalexander/lodash,steelsojka/lodash,Droogans/lodash,reggi/lodash,jshanson7/lodash,studiowangfei/lodash,Xotic750/lodash,leolin1229/lodash,developer-prosenjit/lodash,msmorgan/lodash,Jaspersoft/lodash,boneskull/lodash,mshoaibraja/lodash,enng0227/lodash,samuelbeek/lodash,nbellowe/lodash,AndBicScadMedia/lodash,Moykn/lodash,enng0227/lodash,Andrey-Pavlov/lodash,ricardohbin/lodash,jzning-martian/lodash,leolin1229/lodash,naoina/lodash,imjerrybao/lodash,af7/lodash,javiosyc/lodash,r14r-work/fork_javascript_lodash,transGLUKator/lodash,PhiLhoSoft/lodash,tquetano-r7/lodash,lekoaf/lodash,gdi2290/lodash,PhiLhoSoft/lodash,stewx/lodash,woldie/lodash,schnerd/lodash,tonyonodi/lodash,jshanson7/lodash,lekkas/lodash,mshoaibraja/lodash,neouser99/lodash,Droogans/lodash,ror/lodash,r14r/fork_javascript_lodash,greyhwndz/lodash,rlugojr/lodash,beaugunderson/lodash,javiosyc/lodash,ajefremovs/lodash,mjosh954/lodash,huyinghuan/lodash,MaxPRafferty/lodash,phillipalexander/lodash,andersonaguiar/lodash,felixshu/lodash,woldie/lodash,krahman/lodash,tejokumar/lodash,tgriesser/lodash,Droogans/lodash,rlugojr/lodash,krrg/lodash,IveWong/lodash,lzheng571/lodash,af7/lodash,xixilive/lodash,ajefremovs/lodash,rtorr/lodash,hitesh97/lodash,hafeez-syed/lodash,studiowangfei/lodash,hafeez-syed/lodash,IveWong/lodash,codydaig/lodash,joshuaprior/lodash,af7/lodash,mshoaibraja/lodash,leolin1229/lodash,huyinghuan/lodash,studiowangfei/lodash,xixilive/lodash,Moykn/lodash,Andrey-Pavlov/lodash,schnerd/lodash,polarbird/lodash,julianocomg/lodash,Lottid/lodash,andersonaguiar/lodash,BernhardRode/lodash,tgriesser/lodash,jasnell/lodash,benweet/lodash,dgoncalves1/lodash,tquetano-r7/lodash,bnicart/lodash,prawnsalad/lodash,lzheng571/lodash,polarbird/lodash,MaxPRafferty/lodash,reggi/lodash,woldie/lodash,AndBicScadMedia/lodash,lekkas/lodash,jacwright/lodash,lekkas/lodash,krrg/lodash,Xotic750/lodash,BernhardRode/lodash,zestia/lodash,jzning-martian/lodash,shwaydogg/lodash,Lottid/lodash,rtorr/lodash,Xotic750/lodash,shwaydogg/lodash,therebelbeta/lodash,javiosyc/lodash,gdi2290/lodash,zhangguangyong/lodash,jzning-martian/lodash,bnicart/lodash,gutenye/lodash,ror/lodash,krahman/lodash,youprofit/lodash,justintung/lodash,PhiLhoSoft/lodash,shwaydogg/lodash,huyinghuan/lodash,mjosh954/lodash,justintung/lodash,polarbird/lodash,gutenye/lodash,felixshu/lodash,justintung/lodash,Moykn/lodash,xixilive/lodash,IveWong/lodash,jasnell/lodash,BernhardRode/lodash,therebelbeta/lodash,MaxPRafferty/lodash,zestia/lodash,bnicart/lodash,ror/lodash,prawnsalad/lodash,Jaspersoft/lodash,reggi/lodash,transGLUKator/lodash,xiwc/lodash,neouser99/lodash,Jaspersoft/lodash,developer-prosenjit/lodash,chrootsu/lodash,zestia/lodash,developer-prosenjit/lodash,codydaig/lodash,julianocomg/lodash,nsamarcos/lodash,AneesMohammed/lodash,imjerrybao/lodash,hitesh97/lodash,nbellowe/lodash,benweet/lodash,beaugunderson/lodash,nsamarcos/lodash,tejokumar/lodash,lekoaf/lodash,AneesMohammed/lodash,transGLUKator/lodash,Lottid/lodash,dgoncalves1/lodash,r14r-work/fork_javascript_lodash,youprofit/lodash,felixshu/lodash,ricardohbin/lodash,ajefremovs/lodash,joshuaprior/lodash,youprofit/lodash,boneskull/lodash,r14r-work/fork_javascript_lodash,therebelbeta/lodash,enng0227/lodash,lzheng571/lodash,samuelbeek/lodash,codydaig/lodash
;(function() { /** Used as a safe reference for `undefined` in pre ES5 environments */ var undefined; /** Used as the size to cover large array optimizations */ var largeArraySize = 200; /** Used as the maximum length an array-like object */ var maxSafeInteger = Math.pow(2, 53) - 1; /** Used as a reference to the global object */ var root = (typeof global == 'object' && global) || this; /** Used to store Lo-Dash to test for bad extensions/shims */ var lodashBizarro = root.lodashBizarro; /** Method and object shortcuts */ var phantom = root.phantom, amd = root.define && define.amd, argv = root.process && process.argv, document = !phantom && root.document, body = root.document && root.document.body, create = Object.create, freeze = Object.freeze, JSON = root.JSON, noop = function() {}, params = root.arguments, push = Array.prototype.push, slice = Array.prototype.slice, system = root.system, toString = Object.prototype.toString; /** The file path of the Lo-Dash file to test */ var filePath = (function() { var min = 0, result = []; if (phantom) { result = params = phantom.args; } else if (system) { min = 1; result = params = system.args; } else if (argv) { min = 2; result = params = argv; } else if (params) { result = params; } var last = result[result.length - 1]; result = (result.length > min && !/test(?:\.js)?$/.test(last)) ? last : '../lodash.js'; if (!amd) { try { result = require('fs').realpathSync(result); } catch(e) { } try { result = require.resolve(result); } catch(e) { } } return result; }()); /** The `ui` object */ var ui = root.ui || (root.ui = { 'buildPath': filePath, 'loaderPath': '', 'isModularize': /\b(?:commonjs|(index|main)\.js|lodash-(?:amd|node)|modularize|npm)\b/.test(filePath), 'urlParams': {} }); /** The basename of the Lo-Dash file to test */ var basename = /[\w.-]+$/.exec(filePath)[0]; /** Detect if in a Java environment */ var isJava = !document && !!root.java; /** Used to indicate testing a modularized build */ var isModularize = ui.isModularize; /** Detect if testing `npm` modules */ var isNpm = isModularize && /\bnpm\b/.test([ui.buildPath, ui.urlParams.build]); /** Detects if running in PhantomJS */ var isPhantom = phantom || typeof callPhantom == 'function'; /** Detect if running in Rhino */ var isRhino = isJava && typeof global == 'function' && global().Array === root.Array; /** Used to test Web Workers */ var Worker = !(ui.isForeign || isModularize) && document && root.Worker; /** Used to test host objects in IE */ try { var xml = new ActiveXObject('Microsoft.XMLDOM'); } catch(e) { } /** Use a single "load" function */ var load = (typeof require == 'function' && !amd) ? require : (isJava && root.load) || noop; /** The unit testing framework */ var QUnit = (function() { return root.QUnit || ( root.addEventListener || (root.addEventListener = noop), root.setTimeout || (root.setTimeout = noop), root.QUnit = load('../vendor/qunit/qunit/qunit.js') || root.QUnit, addEventListener === noop && delete root.addEventListener, root.QUnit ); }()); /** Load and install QUnit Extras */ var qe = load('../vendor/qunit-extras/qunit-extras.js'); if (qe) { qe.runInContext(root); } /*--------------------------------------------------------------------------*/ // log params provided to `test.js` if (params) { console.log('test.js invoked with arguments: ' + JSON.stringify(slice.call(params))); } // exit early if going to run tests in a PhantomJS web page if (phantom && isModularize) { var page = require('webpage').create(); page.open(filePath, function(status) { if (status != 'success') { console.log('PhantomJS failed to load page: ' + filePath); phantom.exit(1); } }); page.onCallback = function(details) { var coverage = details.coverage; if (coverage) { var fs = require('fs'), cwd = fs.workingDirectory, sep = fs.separator; fs.write([cwd, 'coverage', 'coverage.json'].join(sep), JSON.stringify(coverage)); } phantom.exit(details.failed ? 1 : 0); }; page.onConsoleMessage = function(message) { console.log(message); }; page.onInitialized = function() { page.evaluate(function() { document.addEventListener('DOMContentLoaded', function() { QUnit.done(function(details) { details.coverage = window.__coverage__; callPhantom(details); }); }); }); }; return; } /*--------------------------------------------------------------------------*/ /** The `lodash` function to test */ var _ = root._ || (root._ = ( _ = load(filePath) || root._, _ = _._ || _, (_.runInContext ? _.runInContext(root) : _) )); /** Used to pass falsey values to methods */ var falsey = [, '', 0, false, NaN, null, undefined]; /** Used to pass empty values to methods */ var empties = [[], {}].concat(falsey.slice(1)); /** Used as the property name for wrapper metadata */ var expando = '__lodash@' + _.VERSION + '__'; /** Used to set property descriptors */ var defineProperty = (function() { try { var o = {}, func = Object.defineProperty, result = func(o, o, o) && func; } catch(e) { } return result; }()); /** Used to check problem JScript properties (a.k.a. the `[[DontEnum]]` bug) */ var shadowedProps = [ 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf' ]; /** Used to check problem JScript properties too */ var shadowedObject = _.invert(shadowedProps); /** Used to check whether methods support typed arrays */ var typedArrays = [ 'Float32Array', 'Int8Array', 'Int16Array', 'Int32Array', 'Uint8Array', 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array' ]; /** Used to check for problems removing whitespace */ var whitespace = ' \t\x0B\f\xA0\ufeff\n\r\u2028\u2029\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'; /** * Removes all own enumerable properties from a given object. * * @private * @param {Object} object The object to empty. */ function emptyObject(object) { _.forOwn(object, function(value, key, object) { delete object[key]; }); } /** * Sets a non-enumerable property value on `object`. * * Note: This function is used to avoid a bug in older versions of V8 where * overwriting non-enumerable built-ins makes them enumerable. * See https://code.google.com/p/v8/issues/detail?id=1623 * * @private * @param {Object} object The object augment. * @param {string} key The name of the property to set. * @param {*} value The property value. */ function setProperty(object, key, value) { try { defineProperty(object, key, { 'configurable': true, 'enumerable': false, 'writable': true, 'value': value }); } catch(e) { object[key] = value; } } /** * Skips a given number of tests with a passing result. * * @private * @param {number} [count=1] The number of tests to skip. */ function skipTest(count) { count || (count = 1); while (count--) { ok(true, 'test skipped'); } } /*--------------------------------------------------------------------------*/ // setup values for Node.js (function() { if (amd) { return; } try { // add values from a different realm _.extend(_, require('vm').runInNewContext([ '({', "'_arguments': (function() { return arguments; }(1, 2, 3)),", "'_array': [1, 2, 3],", "'_boolean': new Boolean(false),", "'_date': new Date,", "'_errors': [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError],", "'_function': function() {},", "'_nan': NaN,", "'_null': null,", "'_number': new Number(0),", "'_object': { 'a': 1, 'b': 2, 'c': 3 },", "'_regexp': /x/,", "'_string': new String('a'),", "'_undefined': undefined", '})' ].join('\n'))); } catch(e) { return; } // load ES6 Set shim require('./asset/set'); // expose `baseEach` for better code coverage if (isModularize && !isNpm) { var path = require('path'), baseEach = require(path.join(path.dirname(filePath), 'internals', 'baseEach.js')); _._baseEach = baseEach.baseEach || baseEach; } // allow bypassing native checks var _fnToString = Function.prototype.toString; setProperty(Function.prototype, 'toString', function wrapper() { setProperty(Function.prototype, 'toString', _fnToString); var result = this === Set ? this.toString() : _fnToString.call(this); setProperty(Function.prototype, 'toString', wrapper); return result; }); // fake DOM setProperty(global, 'window', {}); setProperty(global.window, 'document', {}); setProperty(global.window.document, 'createDocumentFragment', function() { return { 'nodeType': 11 }; }); // fake `WinRTError` setProperty(global, 'WinRTError', Error); // add extensions Function.prototype._method = function() {}; // set bad shims var _isArray = Array.isArray; setProperty(Array, 'isArray', function() {}); var _now = Date.now; setProperty(Date, 'now', function() {}); var _create = create; setProperty(Object, 'create', function() {}); var _defineProperty = Object.defineProperty; setProperty(Object, 'defineProperty', function() {}); var _getPrototypeOf = Object.getPrototypeOf; setProperty(Object, 'getPrototypeOf', function() {}); var _keys = Object.keys; setProperty(Object, 'keys', function() {}); var _hasOwnProperty = Object.prototype.hasOwnProperty; setProperty(Object.prototype, 'hasOwnProperty', function(key) { if (key == '1' && _.isArguments(this) && _.isEqual(_.values(this), [0, 0])) { throw new Error; } return _hasOwnProperty.call(this, key); }); var _contains = String.prototype.contains; setProperty(String.prototype, 'contains', _contains ? function() {} : Boolean); // clear cache so Lo-Dash can be reloaded emptyObject(require.cache); // load Lo-Dash and expose it to the bad extensions/shims lodashBizarro = (lodashBizarro = require(filePath))._ || lodashBizarro; // restore native methods setProperty(Array, 'isArray', _isArray); setProperty(Date, 'now', _now); setProperty(Object, 'create', _create); setProperty(Object, 'defineProperty', _defineProperty); setProperty(Object, 'getPrototypeOf', _getPrototypeOf); setProperty(Object, 'keys', _keys); setProperty(Object.prototype, 'hasOwnProperty', _hasOwnProperty); setProperty(Function.prototype, 'toString', _fnToString); if (_contains) { setProperty(String.prototype, 'contains', _contains); } else { delete String.prototype.contains; } delete global.window; delete global.WinRTError; delete Function.prototype._method; }()); // add values from an iframe (function() { if (_._object || !document) { return; } var iframe = document.createElement('iframe'); iframe.frameBorder = iframe.height = iframe.width = 0; body.appendChild(iframe); var idoc = (idoc = iframe.contentDocument || iframe.contentWindow).document || idoc; idoc.write([ '<script>', 'parent._._arguments = (function() { return arguments; }(1, 2, 3));', 'parent._._array = [1, 2, 3];', 'parent._._boolean = new Boolean(false);', 'parent._._date = new Date;', "parent._._element = document.createElement('div');", 'parent._._errors = [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError];', 'parent._._function = function() {};', 'parent._._nan = NaN;', 'parent._._null = null;', 'parent._._number = new Number(0);', "parent._._object = { 'a': 1, 'b': 2, 'c': 3 };", 'parent._._regexp = /x/;', "parent._._string = new String('a');", 'parent._._undefined = undefined;', '<\/script>' ].join('\n')); idoc.close(); }()); // add web worker (function() { if (!Worker) { return; } var worker = new Worker('./asset/worker.js?t=' + (+new Date)); worker.addEventListener('message', function(e) { _._VERSION = e.data || ''; }, false); worker.postMessage(ui.buildPath); }()); /*--------------------------------------------------------------------------*/ // explicitly call `QUnit.module()` instead of `module()` // in case we are in a CLI environment QUnit.module(basename); (function() { test('supports loading ' + basename + ' as the "lodash" module', 1, function() { if (amd) { strictEqual((lodashModule || {}).moduleName, 'lodash'); } else { skipTest(); } }); test('supports loading ' + basename + ' with the Require.js "shim" configuration option', 1, function() { if (amd && /requirejs/.test(ui.loaderPath)) { strictEqual((shimmedModule || {}).moduleName, 'shimmed'); } else { skipTest(); } }); test('supports loading ' + basename + ' as the "underscore" module', 1, function() { if (amd) { strictEqual((underscoreModule || {}).moduleName, 'underscore'); } else { skipTest(); } }); asyncTest('supports loading ' + basename + ' in a web worker', 1, function() { if (Worker) { var limit = 15000, start = +new Date; var attempt = function() { var actual = _._VERSION; if ((new Date - start) < limit && typeof actual != 'string') { setTimeout(attempt, 16); return; } strictEqual(actual, _.VERSION); QUnit.start(); }; attempt(); } else { skipTest(); QUnit.start(); } }); test('should not add `Function.prototype` extensions to lodash', 1, function() { if (lodashBizarro) { ok(!('_method' in lodashBizarro)); } else { skipTest(); } }); test('should avoid overwritten native methods', 9, function() { function Foo() {} function message(methodName) { return '`_.' + methodName + '` should avoid overwritten native methods'; } var object = { 'a': 1 }, otherObject = { 'b': 2 }, largeArray = _.times(largeArraySize, _.constant(object)); if (lodashBizarro) { try { var actual = [lodashBizarro.isArray([]), lodashBizarro.isArray({ 'length': 0 })]; } catch(e) { actual = null; } deepEqual(actual, [true, false], message('Array.isArray')); try { actual = lodashBizarro.now(); } catch(e) { actual = null; } ok(typeof actual == 'number', message('Date.now')); try { actual = [lodashBizarro.create(Foo.prototype, object), lodashBizarro.create()]; } catch(e) { actual = null; } ok(actual[0] instanceof Foo, message('Object.create')); deepEqual(actual[1], {}, message('Object.create')); try { actual = lodashBizarro.bind(function() { return this.a; }, object); } catch(e) { actual = null; } ok(!(expando in actual), message('Object.defineProperty')); try { actual = [lodashBizarro.isPlainObject({}), lodashBizarro.isPlainObject([])]; } catch(e) { actual = null; } deepEqual(actual, [true, false], message('Object.getPrototypeOf')); try { actual = [lodashBizarro.keys(object), lodashBizarro.keys()]; } catch(e) { actual = null; } deepEqual(actual, [['a'], []], message('Object.keys')); try { actual = [ lodashBizarro.difference([object, otherObject], largeArray), lodashBizarro.intersection(largeArray, [object]), lodashBizarro.uniq(largeArray) ]; } catch(e) { actual = null; } deepEqual(actual, [[otherObject], [object], [object]], message('Set')); try { actual = lodashBizarro.contains('abc', 'c'); } catch(e) { actual = null; } strictEqual(actual, true, message('String#contains')); } else { skipTest(9); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash constructor'); (function() { test('creates a new instance when called without the `new` operator', 1, function() { ok(_() instanceof _); }); test('should return provided `lodash` instances', 1,function() { var wrapped = _(false); strictEqual(_(wrapped), wrapped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.after'); (function() { function after(n, times) { var count = 0; _.times(times, _.after(n, function() { count++; })); return count; } test('should create a function that executes `func` after `n` calls', 4, function() { strictEqual(after(5, 5), 1, 'after(n) should execute `func` after being called `n` times'); strictEqual(after(5, 4), 0, 'after(n) should not execute `func` unless called `n` times'); strictEqual(after(0, 0), 0, 'after(0) should not execute `func` immediately'); strictEqual(after(0, 1), 1, 'after(0) should execute `func` when called once'); }); test('should coerce non-finite `n` values to `0`', 3, function() { _.each([-Infinity, NaN, Infinity], function(n) { strictEqual(after(n, 1), 1); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.assign'); (function() { test('should assign properties of a source object to the destination object', 1, function() { deepEqual(_.assign({ 'a': 1 }, { 'b': 2 }), { 'a': 1, 'b': 2 }); }); test('should assign own source properties', 1, function() { function Foo() { this.a = 1; this.c = 3; } Foo.prototype.b = 2; deepEqual(_.assign({}, new Foo), { 'a': 1, 'c': 3 }); }); test('should accept multiple source objects', 2, function() { var expected = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.assign({ 'a': 1 }, { 'b': 2 }, { 'c': 3 }), expected); deepEqual(_.assign({ 'a': 1 }, { 'b': 2, 'c': 2 }, { 'c': 3 }), expected); }); test('should overwrite source properties', 1, function() { var expected = { 'a': 3, 'b': 2, 'c': 1 }; deepEqual(_.assign({ 'a': 1, 'b': 2 }, expected), expected); }); test('should assign source properties with `null` and `undefined` values', 1, function() { var expected = { 'a': null, 'b': undefined, 'c': null }; deepEqual(_.assign({ 'a': 1, 'b': 2 }, expected), expected); }); test('should work with a callback', 1, function() { var actual = _.assign({ 'a': 1, 'b': 2 }, { 'a': 3, 'c': 3 }, function(a, b) { return typeof a == 'undefined' ? b : a; }); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); test('should be aliased', 1, function() { strictEqual(_.extend, _.assign); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.at'); (function() { var args = arguments; test('should return `undefined` for nonexistent keys', 1, function() { var actual = _.at(['a', 'b', 'c'], [2, 4, 0]); deepEqual(actual, ['c', undefined, 'a']); }); test('should return an empty array when no keys are provided', 1, function() { deepEqual(_.at(['a', 'b', 'c']), []); }); test('should accept multiple key arguments', 1, function() { var actual = _.at(['a', 'b', 'c', 'd'], 3, 0, 2); deepEqual(actual, ['d', 'a', 'c']); }); test('should work with an `arguments` object for `collection`', 1, function() { var actual = _.at(args, [2, 0]); deepEqual(actual, ['c', 'a']); }); test('should work with an object for `collection`', 1, function() { var actual = _.at({ 'a': 1, 'b': 2, 'c': 3 }, ['c', 'a']); deepEqual(actual, [3, 1]); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { deepEqual(_.at(collection, [2, 0]), ['c', 'a']); }); }); }('a', 'b', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bind'); (function() { function fn() { var args = [this]; push.apply(args, arguments); return args; } test('should bind a function to an object', 1, function() { var object = {}, bound = _.bind(fn, object); deepEqual(bound('a'), [object, 'a']); }); test('should accept a falsey `thisArg` argument', 1, function() { var values = _.reject(falsey.slice(1), function(value) { return value == null; }), expected = _.map(values, function(value) { return [value]; }); var actual = _.map(values, function(value) { try { var bound = _.bind(fn, value); return bound(); } catch(e) { } }); ok(_.every(actual, function(value, index) { return _.isEqual(value, expected[index]); })); }); test('should bind a function to `null` or `undefined`', 6, function() { var bound = _.bind(fn, null), actual = bound('a'); ok(actual[0] === null || actual[0] && actual[0].Array); strictEqual(actual[1], 'a'); _.times(2, function(index) { bound = index ? _.bind(fn, undefined) : _.bind(fn); actual = bound('b'); ok(actual[0] === undefined || actual[0] && actual[0].Array); strictEqual(actual[1], 'b'); }); }); test('should partially apply arguments ', 4, function() { var object = {}, bound = _.bind(fn, object, 'a'); deepEqual(bound(), [object, 'a']); bound = _.bind(fn, object, 'a'); deepEqual(bound('b'), [object, 'a', 'b']); bound = _.bind(fn, object, 'a', 'b'); deepEqual(bound(), [object, 'a', 'b']); deepEqual(bound('c', 'd'), [object, 'a', 'b', 'c', 'd']); }); test('should support placeholders', 4, function() { if (!isModularize) { var object = {}, bound = _.bind(fn, object, _, 'b', _); deepEqual(bound('a', 'c'), [object, 'a', 'b', 'c']); deepEqual(bound('a'), [object, 'a', 'b', undefined]); deepEqual(bound('a', 'c', 'd'), [object, 'a', 'b', 'c', 'd']); deepEqual(bound(), [object, undefined, 'b', undefined]); } else { skipTest(4); } }); test('should create a function with a `length` of `0`', 2, function() { var fn = function(a, b, c) {}, bound = _.bind(fn, {}); strictEqual(bound.length, 0); bound = _.bind(fn, {}, 1); strictEqual(bound.length, 0); }); test('should ignore binding when called with the `new` operator', 3, function() { function Foo() { return this; } var bound = _.bind(Foo, { 'a': 1 }), newBound = new bound; strictEqual(newBound.a, undefined); strictEqual(bound().a, 1); ok(newBound instanceof Foo); }); test('ensure `new bound` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var bound = _.bind(Foo), object = {}; ok(new bound instanceof Foo); strictEqual(new bound(true), object); }); test('should append array arguments to partially applied arguments (test in IE < 9)', 1, function() { var object = {}, bound = _.bind(fn, object, 'a'); deepEqual(bound(['b'], 'c'), [object, 'a', ['b'], 'c']); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var object = {}, bound = _(fn).bind({}, 'a', 'b'); ok(bound instanceof _); var actual = bound.value()('c'); deepEqual(actual, [object, 'a', 'b', 'c']); } else { skipTest(2); } }); test('should rebind functions correctly', 3, function() { var object1 = {}, object2 = {}, object3 = {}; var bound1 = _.bind(fn, object1), bound2 = _.bind(bound1, object2, 'a'), bound3 = _.bind(bound1, object3, 'b'); deepEqual(bound1(), [object1]); deepEqual(bound2(), [object1, 'a']); deepEqual(bound3(), [object1, 'b']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bindAll'); (function() { var args = arguments; test('should bind all methods of `object`', 1, function() { function Foo() { this._a = 1; this._b = 2; this.a = function() { return this._a; }; } Foo.prototype.b = function() { return this._b; }; var object = new Foo; _.bindAll(object); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2]); }); test('should accept individual method names', 1, function() { var object = { '_a': 1, '_b': 2, '_c': 3, 'a': function() { return this._a; }, 'b': function() { return this._b; }, 'c': function() { return this._c; } }; _.bindAll(object, 'a', 'b'); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2, undefined]); }); test('should accept arrays of method names', 1, function() { var object = { '_a': 1, '_b': 2, '_c': 3, '_d': 4, 'a': function() { return this._a; }, 'b': function() { return this._b; }, 'c': function() { return this._c; }, 'd': function() { return this._d; } }; _.bindAll(object, ['a', 'b'], ['c']); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2, 3, undefined]); }); test('should work with an array `object` argument', 1, function() { var array = ['push', 'pop']; _.bindAll(array); strictEqual(array.pop, Array.prototype.pop); }); test('should work with `arguments` objects as secondary arguments', 1, function() { var object = { '_a': 1, 'a': function() { return this._a; } }; _.bindAll(object, args); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1]); }); }('a')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bindKey'); (function() { test('should work when the target function is overwritten', 2, function() { var object = { 'name': 'fred', 'greet': function(greeting) { return this.name + ' says: ' + greeting; } }; var bound = _.bindKey(object, 'greet', 'hi'); strictEqual(bound(), 'fred says: hi'); object.greet = function(greeting) { return this.name + ' says: ' + greeting + '!'; }; strictEqual(bound(), 'fred says: hi!'); }); test('should support placeholders', 4, function() { var object = { 'fn': function fn(a, b, c, d) { return slice.call(arguments); } }; if (!isModularize) { var bound = _.bindKey(object, 'fn', _, 'b', _); deepEqual(bound('a', 'c'), ['a', 'b', 'c']); deepEqual(bound('a'), ['a', 'b', undefined]); deepEqual(bound('a', 'c', 'd'), ['a', 'b', 'c', 'd']); deepEqual(bound(), [undefined, 'b', undefined]); } else { skipTest(4); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('case methods'); _.each(['camel', 'kebab', 'snake'], function(caseName) { var methodName = caseName + 'Case', func = _[methodName]; var expected = (function() { switch (caseName) { case 'camel': return 'helloWorld'; case 'kebab': return 'hello-world'; case 'snake': return 'hello_world'; } }()); var burredLetters = [ '\xC0', '\xC1', '\xC2', '\xC3', '\xC4', '\xC5', '\xC6', '\xC7', '\xC8', '\xC9', '\xCA', '\xCB', '\xCC', '\xCD', '\xCE', '\xCF', '\xD0', '\xD1', '\xD2', '\xD3', '\xD4', '\xD5', '\xD6', '\xD7', '\xD8', '\xD9', '\xDA', '\xDB', '\xDC', '\xDD', '\xDE', '\xDF', '\xE0', '\xE1', '\xE2', '\xE3', '\xE4', '\xE5', '\xE6', '\xE7', '\xE8', '\xE9', '\xEA', '\xEB', '\xEC', '\xED', '\xEE', '\xEF', '\xF0', '\xF1', '\xF2', '\xF3', '\xF4', '\xF5', '\xF6', '\xF7', '\xF8', '\xF9', '\xFA', '\xFB', '\xFC', '\xFD', '\xFE', '\xFF' ]; var deburredLetters = [ 'A', 'A', 'A', 'A', 'A', 'A', 'AE', 'C', 'E', 'E', 'E', 'E', 'I', 'I', 'I', 'I', 'D', 'N', 'O', 'O', 'O', 'O', 'O', '', 'O', 'U', 'U', 'U', 'U', 'Y', 'Th', 'ss', 'a', 'a', 'a', 'a', 'a', 'a', 'ae', 'c', 'e', 'e', 'e', 'e', 'i', 'i', 'i', 'i', 'd', 'n', 'o', 'o', 'o', 'o', 'o', '', 'o', 'u', 'u', 'u', 'u', 'y', 'th', 'y' ]; test('`_.' + methodName + '` should convert `string` to ' + caseName + ' case', 4, function() { _.each(['Hello world', 'helloWorld', '--hello-world', '__hello_world__'], function(string) { strictEqual(func(string), expected); }); }); test('`_.' + methodName + '` should handle double-converting strings', 4, function() { _.each(['Hello world', 'helloWorld', '--hello-world', '__hello_world__'], function(string) { strictEqual(func(func(string)), expected); }); }); test('`_.' + methodName + '` should deburr letters', 1, function() { var actual = _.map(burredLetters, function(burred, index) { var isCamel = caseName == 'camel', deburrLetter = deburredLetters[index]; var string = isCamel ? func('z' + burred) : func(burred); var deburredString = isCamel ? 'z' + deburrLetter : deburrLetter.toLowerCase(); return string == deburredString; }); ok(_.every(actual, _.identity)); }); test('`_.' + methodName + '` should coerce `string` to a string', 2, function() { var string = 'Hello world'; strictEqual(func(Object(string)), expected); strictEqual(func({ 'toString': _.constant(string) }), expected); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _('hello world')[methodName](); strictEqual(actual, expected); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.camelCase'); (function() { test('should work with numbers', 3, function() { strictEqual(_.camelCase('too legit 2 quit'), 'tooLegit2Quit'); strictEqual(_.camelCase('walk 500 miles'), 'walk500Miles'); strictEqual(_.camelCase('xhr2 request'), 'xhr2Request'); }); test('should handle acronyms', 3, function() { strictEqual(_.camelCase('safe HTML'), 'safeHTML'); strictEqual(_.camelCase('escape HTML entities'), 'escapeHTMLEntities'); strictEqual(_.camelCase('XMLHttpRequest'), 'xmlHttpRequest'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.capitalize'); (function() { test('should capitalize the first character of a string', 3, function() { strictEqual(_.capitalize('fred'), 'Fred'); strictEqual(_.capitalize('Fred'), 'Fred'); strictEqual(_.capitalize(' fred'), ' fred'); }); test('should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _('fred').capitalize(); strictEqual(actual, 'Fred'); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.chain'); (function() { test('should return a wrapped value', 1, function() { if (!isNpm) { var actual = _.chain({ 'a': 0 }); ok(actual instanceof _); } else { skipTest(); } }); test('should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _({ 'a': 0 }); strictEqual(wrapper.chain(), wrapper); } else { skipTest(); } }); test('should enable chaining of methods that return unwrapped values by default', 6, function() { if (!isNpm) { var array = ['c', 'b', 'a']; ok(_.chain(array).first() instanceof _); ok(_(array).chain().first() instanceof _); ok(_.chain(array).isArray() instanceof _); ok(_(array).chain().isArray() instanceof _); ok(_.chain(array).sortBy().first() instanceof _); ok(_(array).chain().sortBy().first() instanceof _); } else { skipTest(6); } }); test('should chain multiple methods', 6, function() { if (!isNpm) { _.times(2, function(index) { var array = ['one two three four', 'five six seven eight', 'nine ten eleven twelve'], expected = { ' ': 9, 'e': 14, 'f': 2, 'g': 1, 'h': 2, 'i': 4, 'l': 2, 'n': 6, 'o': 3, 'r': 2, 's': 2, 't': 5, 'u': 1, 'v': 4, 'w': 2, 'x': 1 }, wrapper = index ? _(array).chain() : _.chain(array); var actual = wrapper .chain() .map(function(value) { return value.split(''); }) .flatten() .reduce(function(object, chr) { object[chr] || (object[chr] = 0); object[chr]++; return object; }, {}) .value(); deepEqual(actual, expected); array = [1, 2, 3, 4, 5, 6]; wrapper = index ? _(array).chain() : _.chain(array); actual = wrapper .chain() .filter(function(n) { return n % 2; }) .reject(function(n) { return n % 3 == 0; }) .sortBy(function(n) { return -n; }) .value(); deepEqual(actual, [5, 1]); array = [3, 4]; wrapper = index ? _(array).chain() : _.chain(array); actual = wrapper .reverse() .concat([2, 1]) .unshift(5) .tap(function(value) { value.pop(); }) .map(function(n) { return n * n; }) .value(); deepEqual(actual,[25, 16, 9, 4]); }); } else { skipTest(6); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('cloning'); (function() { function Klass() { this.a = 1; } Klass.prototype = { 'b': 1 }; var nonCloneable = { 'a DOM element': body, 'a function': Klass }; var objects = { 'an `arguments` object': arguments, 'an array': ['a', 'b', 'c', ''], 'an array-like-object': { '0': 'a', '1': 'b', '2': 'c', '3': '', 'length': 5 }, 'boolean': false, 'boolean object': Object(false), 'an Error object': new Error('text'), 'an EvalError object': new EvalError('text'), 'a RangeError object': new RangeError('text'), 'a ReferenceError object': new ReferenceError('text'), 'a SyntaxError object': new SyntaxError('text'), 'a TypeError object': new TypeError('text'), 'a URIError object': new URIError('text'), 'a Klass instance': new Klass, 'an object': { 'a': 0, 'b': 1, 'c': 3 }, 'an object with object values': { 'a': /a/, 'b': ['B'], 'c': { 'C': 1 } }, 'an object from another document': _._object || {}, 'null': null, 'a number': 3, 'a number object': Object(3), 'a regexp': /a/gim, 'a string': 'a', 'a string object': Object('a'), 'undefined': undefined }; objects['an array'].length = 5; test('`_.clone` should shallow clone by default', 2, function() { var expected = [{ 'a': 0 }, { 'b': 1 }], actual = _.clone(expected); deepEqual(actual, expected); ok(actual !== expected && actual[0] === expected[0]); }); test('`_.cloneDeep` should deep clone objects with circular references', 1, function() { var object = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { } }; object.foo.b.foo.c = object; object.bar.b = object.foo.b; var clone = _.cloneDeep(object); ok(clone.bar.b === clone.foo.b && clone === clone.foo.b.foo.c && clone !== object); }); _.each(['clone', 'cloneDeep'], function(methodName) { var func = _[methodName], isDeep = methodName == 'cloneDeep', klass = new Klass; _.forOwn(objects, function(object, key) { test('`_.' + methodName + '` should clone ' + key, 2, function() { var clone = func(object); ok(_.isEqual(object, clone)); if (_.isObject(object)) { notStrictEqual(clone, object); } else { strictEqual(clone, object); } }); }); _.forOwn(nonCloneable, function(object, key) { test('`_.' + methodName + '` should not clone ' + key, 1, function() { strictEqual(func(object), object); }); }); _.each(typedArrays, function(type) { test('`_.' + methodName + '` should clone ' + type + ' arrays', 2, function() { var Ctor = root[type] || Array, buffer = Ctor == Array ? 4 : new ArrayBuffer(4), array = new Ctor(buffer), actual = func(array); deepEqual(actual, array); notStrictEqual(actual, array); }); }); test('`_.' + methodName + '` should clone problem JScript properties (test in IE < 9)', 2, function() { var actual = func(shadowedObject); deepEqual(actual, shadowedObject); notStrictEqual(actual, shadowedObject); }); test('`_.' + methodName + '` should perform a ' + (isDeep ? 'deep' : 'shallow') + ' clone when used as a callback for `_.map`', 2, function() { var expected = [{ 'a': [0] }, { 'b': [1] }], actual = _.map(expected, func); deepEqual(actual, expected); if (isDeep) { ok(actual[0] !== expected[0] && actual[0].a !== expected[0].a && actual[1].b !== expected[1].b); } else { ok(actual[0] !== expected[0] && actual[0].a === expected[0].a && actual[1].b === expected[1].b); } }); test('`_.' + methodName + '` should pass the correct `callback` arguments', 1, function() { var argsList = []; func(klass, function() { argsList.push(slice.call(arguments)); }); deepEqual(argsList, isDeep ? [[klass], [1, 'a']] : [[klass]]); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func('a', function(value) { return this[value]; }, { 'a': 'A' }); strictEqual(actual, 'A'); }); test('`_.' + methodName + '` should handle cloning if `callback` returns `undefined`', 1, function() { var actual = func({ 'a': { 'b': 'c' } }, _.noop); deepEqual(actual, { 'a': { 'b': 'c' } }); }); test('`_.' + methodName + '` should clone `index` and `input` array properties', 2, function() { var array = /x/.exec('vwxyz'), actual = func(array); strictEqual(actual.index, 2); strictEqual(actual.input, 'vwxyz'); }); test('`_.' + methodName + '` should clone `lastIndex` regexp property', 1, function() { // avoid a regexp literal for older Opera and use `exec` for older Safari var regexp = RegExp('x', 'g'); regexp.exec('vwxyz'); var actual = func(regexp); strictEqual(actual.lastIndex, 3); }); test('`_.' + methodName + '` should not error on DOM elements', 1, function() { if (document) { var element = document.createElement('div'); try { strictEqual(func(element), element); } catch(e) { ok(false); } } else { skipTest(); } }); test('`_.' + methodName + '` should return a unwrapped value when chaining', 2, function() { if (!isNpm) { var object = objects['an object'], actual = _(object)[methodName](); deepEqual(actual, object); notStrictEqual(actual, object); } else { skipTest(2); } }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.compact'); (function() { test('should filter falsey values', 1, function() { var array = ['0', '1', '2']; deepEqual(_.compact(falsey.concat(array)), array); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(falsey).compact(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.compose'); (function() { test('should create a function that is the composition of the provided functions', 1, function() { var realNameMap = { 'pebbles': 'penelope' }; var format = function(name) { name = realNameMap[name.toLowerCase()] || name; return name.charAt(0).toUpperCase() + name.slice(1).toLowerCase(); }; var greet = function(formatted) { return 'Hiya ' + formatted + '!'; }; var welcome = _.compose(greet, format); strictEqual(welcome('pebbles'), 'Hiya Penelope!'); }); test('should return a new function', 1, function() { notStrictEqual(_.compose(_.noop), _.noop); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(_.noop).compose(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.constant'); (function() { test('should create a function that always returns `value`', 1, function() { var object = { 'a': 1 }, values = falsey.concat(null, null, 1, 'a'), constant = _.constant(object), expected = _.map(values, function() { return true; }); var actual = _.map(values, function(value, index) { if (index == 0) { var result = constant(); } else if (index == 1) { result = constant.call({}); } else { result = constant(value); } return result === object; }); deepEqual(actual, expected); }); test('should work with falsey values', 1, function() { var expected = _.map(falsey, function() { return true; }); var actual = _.map(falsey, function(value, index) { var constant = index ? _.constant(value) : _.constant(); return constant() === value || _.isNaN(value); }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(true).constant(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.contains'); (function() { _.each({ 'an `arguments` object': arguments, 'an array': [1, 2, 3, 4], 'an object': { 'a': 1, 'b': 2, 'c': 3, 'd': 4 }, 'a string': '1234' }, function(collection, key) { var values = _.toArray(collection); test('should work with ' + key + ' and return `true` for matched values', 1, function() { strictEqual(_.contains(collection, 3), true); }); test('should work with ' + key + ' and return `false` for unmatched values', 1, function() { strictEqual(_.contains(collection, 5), false); }); test('should work with ' + key + ' and a positive `fromIndex`', 2, function() { strictEqual(_.contains(collection, values[2], 2), true); strictEqual(_.contains(collection, values[1], 2), false); }); test('should work with ' + key + ' and a `fromIndex` >= `collection.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.contains(collection, 1, fromIndex), false); strictEqual(_.contains(collection, undefined, fromIndex), false); strictEqual(_.contains(collection, '', fromIndex), false); }); }); test('should work with ' + key + ' and treat falsey `fromIndex` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(fromIndex) { return _.contains(collection, values[0], fromIndex); }); deepEqual(actual, expected); }); test('should work with ' + key + ' and treat non-number `fromIndex` values as `0`', 1, function() { strictEqual(_.contains(collection, values[0], '1'), true); }); test('should work with ' + key + ' and a negative `fromIndex`', 2, function() { strictEqual(_.contains(collection, values[2], -2), true); strictEqual(_.contains(collection, values[1], -2), false); }); test('should work with ' + key + ' and a negative `fromIndex` <= negative `collection.length`', 3, function() { _.each([-4, -6, -Infinity], function(fromIndex) { strictEqual(_.contains(collection, values[0], fromIndex), true); }); }); test('should work with ' + key + ' and return an unwrapped value when chaining', 1, function() { if (!isNpm) { strictEqual(_(collection).contains(3), true); } else { skipTest(); } }); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 2, function() { strictEqual(_.contains(collection, 'bc'), true); strictEqual(_.contains(collection, 'd'), false); }); }); test('should not be possible to perform a binary search', 1, function() { strictEqual(_.contains([3, 2, 1], 3, true), true); }); test('should be aliased', 1, function() { strictEqual(_.include, _.contains); }); }(1, 2, 3, 4)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.countBy'); (function() { var array = [4.2, 6.1, 6.4]; test('should work with a callback', 1, function() { var actual = _.countBy(array, function(num) { return Math.floor(num); }, Math); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.countBy([4, 6, 6]); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.countBy(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [4.2, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.countBy(array, function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.countBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, 1); deepEqual(actual.hasOwnProperty, 2); }); test('should work with a string for `callback`', 1, function() { var actual = _.countBy(['one', 'two', 'three'], 'length'); deepEqual(actual, { '3': 2, '5': 1 }); }); test('should work with an object for `collection`', 1, function() { var actual = _.countBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.countBy(array, 0), { '1': 1, '2': 2 }); deepEqual(_.countBy(array, 1), { 'a': 2, 'b': 1 }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.create'); (function() { test('should create an object that inherits from the given `prototype` object', 3, function() { function Shape() { this.x = 0; this.y = 0; } function Circle() { Shape.call(this); } Circle.prototype = _.create(Shape.prototype); Circle.prototype.constructor = Circle; var actual = new Circle; ok(actual instanceof Circle); ok(actual instanceof Shape); notStrictEqual(Circle.prototype, Shape.prototype); }); test('should assign `properties` to the created object', 3, function() { function Shape() { this.x = 0; this.y = 0; } function Circle() { Shape.call(this); } var expected = { 'constructor': Circle, 'radius': 0 }; Circle.prototype = _.create(Shape.prototype, expected); var actual = new Circle; ok(actual instanceof Circle); ok(actual instanceof Shape); deepEqual(Circle.prototype, expected); }); test('should accept a falsey `prototype` argument', 1, function() { var expected = _.map(falsey, function() { return {}; }); var actual = _.map(falsey, function(value, index) { return index ? _.create(value) : _.create(); }); deepEqual(actual, expected); }); test('should ignore primitive `prototype` arguments and use an empty object instead', 1, function() { var primitives = [true, null, 1, 'a', undefined], expected = _.map(primitives, _.constant(true)); var actual = _.map(primitives, function(value, index) { return _.isPlainObject(index ? _.create(value) : _.create()); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.callback'); (function() { test('should create a callback with a falsey `thisArg`', 1, function() { var values = _.map(falsey, function(value) { return Object(value == null ? root : value); }); var actual = _.map(values, function(value) { var callback = _.callback(function() { return this; }, value); return callback(); }); deepEqual(actual, values); }); test('should return `_.identity` when `func` is nullish', 2, function() { var object = {}; _.each([null, undefined], function(value) { var callback = _.callback(value); strictEqual(callback(object), object); }); }); test('should not error when `func` is nullish and a `thisArg` is provided', 2, function() { var object = {}; _.each([null, undefined], function(value) { try { var callback = _.callback(value, {}); strictEqual(callback(object), object); } catch(e) { ok(false); } }); }); test('should return a callback created by `_.matches` when `func` is an object', 2, function() { var callback = _.callback({ 'a': 1 }); strictEqual(callback({ 'a': 1, 'b': 2 }), true); strictEqual(callback({}), false); }); test('should return a callback created by `_.property` when `func` is a number or string', 2, function() { var array = ['a'], callback = _.callback(0); strictEqual(callback(array), 'a'); callback = _.callback('0'); strictEqual(callback(array), 'a'); }); test('should work without an `argCount`', 1, function() { var args, expected = ['a', 'b', 'c', 'd', 'e']; var callback = _.callback(function() { args = slice.call(arguments); }); callback.apply(null, expected); deepEqual(args, expected); }); test('should work with functions created by `_.partial` and `_.partialRight`', 2, function() { function fn() { var result = [this.a]; push.apply(result, arguments); return result; } var expected = [1, 2, 3], object = { 'a': 1 }, callback = _.callback(_.partial(fn, 2), object); deepEqual(callback(3), expected); callback = _.callback(_.partialRight(fn, 3), object); deepEqual(callback(2), expected); }); test('should return the function provided if already bound with `Function#bind`', 1, function() { function a() {} var object = {}, bound = a.bind && a.bind(object); if (bound && !('prototype' in bound)) { var bound = a.bind(object); strictEqual(_.callback(bound, object), bound); } else { skipTest(); } }); test('should return the function provided when there is no `this` reference', 2, function() { function a() {} function b() { return this.b; } var object = {}; if (_.support.funcDecomp) { strictEqual(_.callback(a, object), a); notStrictEqual(_.callback(b, object), b); } else { skipTest(2); } }); test('should only write metadata to named functions', 3, function() { function a() {}; var b = function() {}; function c() {}; var object = {}; if (defineProperty && _.support.funcDecomp) { _.callback(a, object); ok(expando in a); _.callback(b, object); ok(!(expando in b)); if (_.support.funcNames) { _.support.funcNames = false; _.callback(c, object); ok(expando in c); _.support.funcNames = true; } else { skipTest(); } } else { skipTest(3); } }); test('should not write metadata when `_.support.funcDecomp` is `false`', 1, function() { function a() {}; if (defineProperty && lodashBizarro) { lodashBizarro.callback(a, {}); ok(!(expando in a)); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.curry'); (function() { function fn(a, b, c, d) { return slice.call(arguments); } test('should curry based on the number of arguments provided', 3, function() { var curried = _.curry(fn), expected = [1, 2, 3, 4]; deepEqual(curried(1)(2)(3)(4), expected); deepEqual(curried(1, 2)(3, 4), expected); deepEqual(curried(1, 2, 3, 4), expected); }); test('should work with partialed methods', 2, function() { var curried = _.curry(fn), expected = [1, 2, 3, 4]; var a = _.partial(curried, 1), b = _.bind(a, null, 2), c = _.partialRight(b, 4), d = _.partialRight(b(3), 4); deepEqual(c(3), expected); deepEqual(d(), expected); }); test('should support placeholders', 4, function() { if (!isModularize) { var curried = _.curry(fn); deepEqual(curried(1)(_, 3)(_, 4)(2), [1, 2, 3, 4]); deepEqual(curried(_, 2)(1)(_, 4)(3), [1, 2, 3, 4]); deepEqual(curried(_, _, 3)(_, 2)(_, 4)(1), [1, 2, 3, 4]); deepEqual(curried(_, _, _, 4)(_, _, 3)(_, 2)(1), [1, 2, 3, 4]); } else { skipTest(4); } }); test('should return a function with a `length` of `0`', 6, function() { _.times(2, function(index) { var curried = index ? _.curry(fn, 4) : _.curry(fn); strictEqual(curried.length, 0); strictEqual(curried(1).length, 0); strictEqual(curried(1, 2).length, 0); }); }); test('ensure `new curried` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var curried = _.curry(Foo), object = {}; ok(new curried(false) instanceof Foo); strictEqual(new curried(true), object); }); test('should not alter the `this` binding', 9, function() { function fn(a, b, c) { var value = this || {}; return [value[a], value[b], value[c]]; } var object = { 'a': 1, 'b': 2, 'c': 3 }, expected = [1, 2, 3]; deepEqual(_.curry(_.bind(fn, object), 3)('a')('b')('c'), expected); deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b')('c'), expected); deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b', 'c'), expected); deepEqual(_.bind(_.curry(fn), object)('a')('b')('c'), Array(3)); deepEqual(_.bind(_.curry(fn), object)('a', 'b')('c'), Array(3)); deepEqual(_.bind(_.curry(fn), object)('a', 'b', 'c'), expected); object.curried = _.curry(fn); deepEqual(object.curried('a')('b')('c'), Array(3)); deepEqual(object.curried('a', 'b')('c'), Array(3)); deepEqual(object.curried('a', 'b', 'c'), expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.debounce'); (function() { asyncTest('should debounce a function', 2, function() { if (!(isRhino && isModularize)) { var count = 0, debounced = _.debounce(function() { count++; }, 32); debounced(); debounced(); debounced(); strictEqual(count, 0); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 96); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent debounced calls return the last `func` result', 2, function() { if (!(isRhino && isModularize)) { var debounced = _.debounce(_.identity, 32); debounced('x'); setTimeout(function() { notEqual(debounced('y'), 'y'); }, 64); setTimeout(function() { notEqual(debounced('z'), 'z'); QUnit.start(); }, 128); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent "immediate" debounced calls return the last `func` result', 2, function() { if (!(isRhino && isModularize)) { var debounced = _.debounce(_.identity, 32, true), result = [debounced('x'), debounced('y')]; deepEqual(result, ['x', 'x']); setTimeout(function() { var result = [debounced('a'), debounced('b')]; deepEqual(result, ['a', 'a']); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('should apply default options correctly', 2, function() { if (!(isRhino && isModularize)) { var count = 0; var debounced = _.debounce(function(value) { count++; return value; }, 32, {}); strictEqual(debounced('x'), undefined); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('should support a `leading` option', 7, function() { if (!(isRhino && isModularize)) { var withLeading, counts = [0, 0, 0]; _.each([true, { 'leading': true }], function(options, index) { var debounced = _.debounce(function(value) { counts[index]++; return value; }, 32, options); if (index == 1) { withLeading = debounced; } strictEqual(debounced('x'), 'x'); }); _.each([false, { 'leading': false }], function(options) { var withoutLeading = _.debounce(_.identity, 32, options); strictEqual(withoutLeading('x'), undefined); }); var withLeadingAndTrailing = _.debounce(function() { counts[2]++; }, 32, { 'leading': true }); withLeadingAndTrailing(); withLeadingAndTrailing(); strictEqual(counts[2], 1); setTimeout(function() { deepEqual(counts, [1, 1, 2]); withLeading('x'); strictEqual(counts[1], 2); QUnit.start(); }, 64); } else { skipTest(7); QUnit.start(); } }); asyncTest('should support a `trailing` option', 4, function() { if (!(isRhino && isModularize)) { var withCount = 0, withoutCount = 0; var withTrailing = _.debounce(function(value) { withCount++; return value; }, 32, { 'trailing': true }); var withoutTrailing = _.debounce(function(value) { withoutCount++; return value; }, 32, { 'trailing': false }); strictEqual(withTrailing('x'), undefined); strictEqual(withoutTrailing('x'), undefined); setTimeout(function() { strictEqual(withCount, 1); strictEqual(withoutCount, 0); QUnit.start(); }, 64); } else { skipTest(4); QUnit.start(); } }); test('should support a `maxWait` option', 2, function() { if (!(isRhino && isModularize)) { var limit = (argv || isPhantom) ? 1000 : 320, withCount = 0, withoutCount = 0; var withMaxWait = _.debounce(function() { withCount++; }, 64, { 'maxWait': 128 }); var withoutMaxWait = _.debounce(function() { withoutCount++; }, 96); var start = +new Date; while ((new Date - start) < limit) { withMaxWait(); withoutMaxWait(); } ok(withCount > 0); ok(!withoutCount); } else { skipTest(2); } }); asyncTest('should cancel `maxDelayed` when `delayed` is executed', 1, function() { if (!(isRhino && isModularize)) { var count = 0; var debounced = _.debounce(function() { count++; }, 32, { 'maxWait': 64 }); debounced(); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should execute the `trailing` call with the correct arguments and `this` binding', 2, function() { if (!(isRhino && isModularize)) { var args, count = 0, object = {}; var debounced = _.debounce(function(value) { args = [this]; push.apply(args, arguments); return ++count != 2; }, 32, { 'leading': true, 'maxWait': 64 }); while (true) { if (!debounced.call(object, 'a')) { break; } } setTimeout(function() { strictEqual(count, 2); deepEqual(args, [object, 'a']); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.defaults'); (function() { test('should assign properties of a source object if missing on the destination object', 1, function() { deepEqual(_.defaults({ 'a': 1 }, { 'a': 2, 'b': 2 }), { 'a': 1, 'b': 2 }); }); test('should assign own source properties', 1, function() { function Foo() { this.a = 1; this.c = 3; } Foo.prototype.b = 2; deepEqual(_.defaults({ 'c': 2 }, new Foo), { 'a': 1, 'c': 2 }); }); test('should accept multiple source objects', 2, function() { var expected = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.defaults({ 'a': 1, 'b': 2 }, { 'b': 3 }, { 'c': 3 }), expected); deepEqual(_.defaults({ 'a': 1, 'b': 2 }, { 'b': 3, 'c': 3 }, { 'c': 2 }), expected); }); test('should not overwrite `null` values', 1, function() { var actual = _.defaults({ 'a': null }, { 'a': 1 }); strictEqual(actual.a, null); }); test('should overwrite `undefined` values', 1, function() { var actual = _.defaults({ 'a': undefined }, { 'a': 1 }); strictEqual(actual.a, 1); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.defer'); (function() { asyncTest('should defer `func` execution', 1, function() { if (!(isRhino && isModularize)) { var pass = false; _.defer(function(){ pass = true; }); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should accept additional arguments', 1, function() { if (!(isRhino && isModularize)) { var args; _.defer(function() { args = slice.call(arguments); }, 1, 2, 3); setTimeout(function() { deepEqual(args, [1, 2, 3]); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should be cancelable', 1, function() { if (!(isRhino && isModularize)) { var pass = true; var timerId = _.defer(function() { pass = false; }); clearTimeout(timerId); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.delay'); (function() { asyncTest('should delay `func` execution', 2, function() { if (!(isRhino && isModularize)) { var pass = false; _.delay(function(){ pass = true; }, 96); setTimeout(function() { ok(!pass); }, 32); setTimeout(function() { ok(pass); QUnit.start(); }, 160); } else { skipTest(2); QUnit.start(); } }); asyncTest('should accept additional arguments', 1, function() { if (!(isRhino && isModularize)) { var args; _.delay(function() { args = slice.call(arguments); }, 32, 1, 2, 3); setTimeout(function() { deepEqual(args, [1, 2, 3]); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should be cancelable', 1, function() { if (!(isRhino && isModularize)) { var pass = true; var timerId = _.delay(function() { pass = false; }, 32); clearTimeout(timerId); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.difference'); (function() { var args = arguments; test('should return the difference of the given arrays', 2, function() { var actual = _.difference([1, 2, 3, 4, 5], [5, 2, 10]); deepEqual(actual, [1, 3, 4]); actual = _.difference([1, 2, 3, 4, 5], [5, 2, 10], [8, 4]); deepEqual(actual, [1, 3]); }); test('should work with large arrays', 1, function() { var array1 = _.range(largeArraySize + 1), array2 = _.range(largeArraySize), a = {}, b = {}, c = {}; array1.push(a, b, c); array2.push(b, c, a); deepEqual(_.difference(array1, array2), [largeArraySize]); }); test('should work with large arrays of objects', 1, function() { var object1 = {}, object2 = {}, largeArray = _.times(largeArraySize, _.constant(object1)); deepEqual(_.difference([object1, object2], largeArray), [object2]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0, 1, null, 3]; deepEqual(_.difference(array, 3, null, { '0': 1 }), array); deepEqual(_.difference(null, array, null, [2, 1]), [0, null, 3]); deepEqual(_.difference(null, array, null, args), [0, null]); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.endsWith'); (function() { var string = 'abc'; test('should return `true` if a string ends with `target`', 1, function() { strictEqual(_.endsWith(string, 'c'), true); }); test('should return `false` if a string does not end with `target`', 1, function() { strictEqual(_.endsWith(string, 'b'), false); }); test('should work with a `position` argument', 1, function() { strictEqual(_.endsWith(string, 'b', 2), true); }); test('should work with `position` >= `string.length`', 4, function() { _.each([3, 5, maxSafeInteger, Infinity], function(position) { strictEqual(_.endsWith(string, 'c', position), true); }); }); test('should treat falsey `position` values, except `undefined`, as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(position) { return _.endsWith(string, position === undefined ? 'c' : '', position); }); deepEqual(actual, expected); }); test('should treat a negative `position` as `0`', 6, function() { _.each([-1, -3, -Infinity], function(position) { ok(_.every(string, function(chr) { return _.endsWith(string, chr, position) === false; })); strictEqual(_.endsWith(string, '', position), true); }); }); test('should always return `true` when `target` is an empty string regardless of `position`', 1, function() { ok(_.every([-Infinity, NaN, -3, -1, 0, 1, 2, 3, 5, maxSafeInteger, Infinity], function(position) { return _.endsWith(string, '', position, true); })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.escape'); (function() { var escaped = '&amp;&lt;&gt;&quot;&#39;&#96;\/', unescaped = '&<>"\'`\/'; test('should escape values', 1, function() { strictEqual(_.escape(unescaped), escaped); }); test('should not escape the "/" character', 1, function() { strictEqual(_.escape('/'), '/'); }); test('should handle strings with nothing to escape', 1, function() { strictEqual(_.escape('abc'), 'abc'); }); test('should escape the same characters unescaped by `_.unescape`', 1, function() { strictEqual(_.escape(_.unescape(escaped)), escaped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.escapeRegExp'); (function() { test('should escape values', 1, function() { var escaped = '\\.\\*\\+\\?\\^\\$\\{\\}\\(\\)\\|\\[\\]\\/\\\\', unescaped = '.*+?^${}()|[\]\/\\'; strictEqual(_.escapeRegExp(unescaped), escaped); }); test('should handle strings with nothing to escape', 1, function() { strictEqual(_.escapeRegExp('abc'), 'abc'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.every'); (function() { test('should return `true` for empty or falsey collections', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { try { return _.every(value, _.identity); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `true` if the callback returns truthy for all elements in the collection', 1, function() { strictEqual(_.every([true, 1, 'x'], _.identity), true); }); test('should return `false` as soon as the callback result is falsey', 1, function() { strictEqual(_.every([true, null, true], _.identity), false); }); test('should work with collections of `undefined` values (test in IE < 9)', 1, function() { strictEqual(_.every([undefined, undefined, undefined], _.identity), false); }); test('should use `_.identity` when no callback is provided', 2, function() { strictEqual(_.every([0]), false); strictEqual(_.every([1]), true); }); test('should be aliased', 1, function() { strictEqual(_.all, _.every); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('source property checks'); _.each(['assign', 'defaults', 'merge'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should not assign inherited `source` properties', 1, function() { function Foo() {} Foo.prototype = { 'a': 1 }; deepEqual(func({}, new Foo), {}); }); test('should work when used as a callback for `_.reduce`', 1, function() { var array = [{ 'a': 1 }, { 'b': 2 }, { 'c': 3 }], actual = _.reduce(array, _.merge); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); if (methodName == 'merge') { test('`_.' + methodName + '` should treat sparse arrays as dense', 2, function() { var array = Array(3); array[0] = 1; array[2] = 3; var actual = func([], array), expected = array.slice(); expected[1] = undefined; ok('1' in actual); deepEqual(actual, expected); }); } }); /*--------------------------------------------------------------------------*/ QUnit.module('strict mode checks'); _.each(['assign', 'bindAll', 'defaults'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should not throw strict mode errors', 1, function() { var object = { 'a': null, 'b': function(){} }, pass = true; if (freeze) { freeze(object); try { if (methodName == 'bindAll') { func(object); } else { func(object, { 'a': 1 }); } } catch(e) { pass = false; } ok(pass); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.filter'); (function() { test('should return elements the `callback` returns truthy for', 1, function() { var actual = _.filter([1, 2, 3], function(num) { return num % 2; }); deepEqual(actual, [1, 3]); }); test('should not modify wrapped values', 2, function() { if (!isNpm) { var wrapped = _([1, 2, 3, 4]); var actual = wrapped.filter(function(num) { return num < 3; }); deepEqual(actual.value(), [1, 2]); actual = wrapped.filter(function(num) { return num > 2; }); deepEqual(actual.value(), [3, 4]); } else { skipTest(2); } }); test('should be aliased', 1, function() { strictEqual(_.select, _.filter); }); }()); /*--------------------------------------------------------------------------*/ _.each(['find', 'findLast', 'findIndex', 'findLastIndex', 'findKey', 'findLastKey'], function(methodName) { QUnit.module('lodash.' + methodName); var func = _[methodName]; (function() { var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; var expected = ({ 'find': [objects[1], undefined, objects[2], objects[1]], 'findLast': [objects[2], undefined, objects[2], objects[2]], 'findIndex': [1, -1, 2, 1], 'findLastIndex': [2, -1, 2, 2], 'findKey': ['1', undefined, '2', '1'], 'findLastKey': ['2', undefined, '2', '2'] })[methodName]; test('should return the correct value', 1, function() { strictEqual(func(objects, function(object) { return object.a; }), expected[0]); }); test('should work with a `thisArg`', 1, function() { strictEqual(func(objects, function(object, index) { return this[index].a; }, objects), expected[0]); }); test('should return `' + expected[1] + '` if value is not found', 1, function() { strictEqual(func(objects, function(object) { return object.a === 3; }), expected[1]); }); test('should work with an object for `callback`', 1, function() { strictEqual(func(objects, { 'b': 2 }), expected[2]); }); test('should work with a string for `callback`', 1, function() { strictEqual(func(objects, 'b'), expected[3]); }); test('should return `' + expected[1] + '` for empty or falsey collections', 1, function() { var actual = [], emptyValues = /Index/.test(methodName) ? _.reject(empties, _.isPlainObject) : empties, expecting = _.map(emptyValues, function() { return expected[1]; }); _.each(emptyValues, function(value) { try { actual.push(func(value, { 'a': 3 })); } catch(e) { } }); deepEqual(actual, expecting); }); }()); (function() { var expected = ({ 'find': 1, 'findLast': 2, 'findKey': 'a', 'findLastKey': 'b' })[methodName]; if (expected != null) { test('should work with an object for `collection`', 1, function() { var actual = func({ 'a': 1, 'b': 2, 'c': 3 }, function(num) { return num < 3; }); strictEqual(actual, expected); }); } }()); (function() { var expected = ({ 'find': 'a', 'findLast': 'b', 'findIndex': 0, 'findLastIndex': 1 })[methodName]; if (expected != null) { test('should work with a string for `collection`', 1, function() { var actual = func('abc', function(chr, index) { return index < 2; }); strictEqual(actual, expected); }); } if (methodName == 'find') { test('should be aliased', 1, function() { strictEqual(_.detect, func); }); } }()); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.findWhere'); (function() { var objects = [ { 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }, { 'a': 3 } ]; test('should filter by `source` properties', 6, function() { strictEqual(_.findWhere(objects, { 'a': 1 }), objects[0]); strictEqual(_.findWhere(objects, { 'a': 2 }), objects[3]); strictEqual(_.findWhere(objects, { 'a': 3 }), objects[4]); strictEqual(_.findWhere(objects, { 'b': 1 }), undefined); strictEqual(_.findWhere(objects, { 'b': 2 }), objects[2]); strictEqual(_.findWhere(objects, { 'a': 1, 'b': 2 }), objects[2]); }); test('should work with a function for `source`', 1, function() { function source() {} source.a = 2; strictEqual(_.findWhere(objects, source), objects[3]); }); test('should match all elements when provided an empty `source`', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { return _.findWhere(objects, value) === objects[0]; }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.first'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 2, 'b': 2 }, { 'a': 1, 'b': 1 }, { 'a': 0, 'b': 0 } ]; test('should return the first element', 1, function() { strictEqual(_.first(array), 1); }); test('should return the first two elements', 1, function() { deepEqual(_.first(array, 2), [1, 2]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 1 : []; }); var actual = _.map(falsey, function(n) { return _.first(array, n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.first(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.first(array, n), array); }); }); test('should return `undefined` when querying empty arrays', 1, function() { strictEqual(_.first([]), undefined); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.first); deepEqual(actual, [1, 4, 7]); }); test('should work with a callback', 1, function() { var actual = _.first(array, function(num) { return num < 3; }); deepEqual(actual, [1, 2]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.first(array, function() { args = slice.call(arguments); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.first(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [1, 2]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.first(objects, { 'b': 2 }), objects.slice(0, 1)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.first(objects, 'b'), objects.slice(0, 2)); }); test('should chain when passing `n`, `callback`, or `thisArg`', 3, function() { if (!isNpm) { var actual = _(array).first(2); ok(actual instanceof _); actual = _(array).first(function(num) { return num < 3; }); ok(actual instanceof _); actual = _(array).first(function(num, index) { return this[index] < 3; }, array); ok(actual instanceof _); } else { skipTest(3); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).first(); strictEqual(actual, 1); } else { skipTest(); } }); test('should be aliased', 2, function() { strictEqual(_.head, _.first); strictEqual(_.take, _.first); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.flatten'); (function() { var args = arguments, array = [{ 'a': [1, [2]] }, { 'a': [3] }]; test('should flatten `arguments` objects', 1, function() { var actual = _.flatten([args, args]); deepEqual(actual, [1, 2, 3, 1, 2, 3]); }); test('should work with a callback', 1, function() { var actual = _.flatten(array, function(object) { return object.a; }); deepEqual(actual, [1, 2, 3]); }); test('should work with `isShallow` and `callback`', 1, function() { var actual = _.flatten(array, true, function(object) { return object.a; }); deepEqual(actual, [1, [2], 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.flatten(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [array[0], 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.flatten(array, function(object, index) { return this[index].a; }, array); deepEqual(actual, [1, 2, 3]); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.flatten(array, 'a'), [1, 2, 3]); }); test('should perform a deep flatten when used as a callback for `_.map`', 1, function() { var array = [[[['a']]], [[['b']]]], actual = _.map(array, _.flatten); deepEqual(actual, [['a'], ['b']]); }); test('should treat sparse arrays as dense', 4, function() { var array = [[1, 2, 3], Array(3)], expected = [1, 2, 3], actual1 = _.flatten(array), actual2 = _.flatten(array, true); expected.push(undefined, undefined, undefined); deepEqual(actual1, expected); ok('4' in actual1); deepEqual(actual2, expected); ok('4' in actual2); }); test('should work with extremely large arrays', 1, function() { // test in modern browsers if (freeze) { try { var expected = Array(5e5), actual = _.flatten([expected]); deepEqual(actual, expected) } catch(e) { ok(false); } } else { skipTest(); } }); test('should work with empty arrays', 1, function() { var actual = _.flatten([[], [[]], [[], [[[]]]]]); deepEqual(actual, []); }); test('should flatten nested arrays', 1, function() { var array = [1, [2], [3, [[4]]]], expected = [1, 2, 3, 4]; deepEqual(_.flatten(array), expected); }); test('should support shallow flattening nested arrays', 1, function() { var array = [1, [2], [3, [4]]], expected = [1, 2, 3, [4]]; deepEqual(_.flatten(array, true), expected); }); test('should support shallow flattening arrays of other arrays', 1, function() { var array = [[1], [2], [3], [[4]]], expected = [1, 2, 3, [4]]; deepEqual(_.flatten(array, true), expected); }); test('should return an empty array for non array-like objects', 1, function() { var actual = _.flatten({ 'a': 1 }, _.identity); deepEqual(actual, []); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('forEach methods'); _.each(['forEach', 'forEachRight'], function(methodName) { var func = _[methodName], isForEach = methodName == 'forEach'; _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('`_.' + methodName + '` should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args, values = []; func(collection, function(value) { args || (args = slice.call(arguments)); values.push(value); }); if (isForEach) { deepEqual(args, ['a', 0, collection]); deepEqual(values, ['a', 'b', 'c']); } else { deepEqual(args, ['c', 2, collection]); deepEqual(values, ['c', 'b', 'a']); } }); }); test('`_.' + methodName + '` should be aliased', 1, function() { if (isForEach) { strictEqual(_.each, _.forEach); } else { strictEqual(_.eachRight, _.forEachRight); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('forIn methods'); _.each(['forIn', 'forInRight'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` iterates over inherited properties', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var keys = []; func(new Foo, function(value, key) { keys.push(key); }); deepEqual(keys.sort(), ['a', 'b']); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('forOwn methods'); _.each(['forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; test('iterates over the `length` property', 1, function() { var object = { '0': 'zero', '1': 'one', 'length': 2 }, props = []; func(object, function(value, prop) { props.push(prop); }); deepEqual(props.sort(), ['0', '1', 'length']); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('iteration methods'); (function() { var methods = [ 'countBy', 'every', 'filter', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'groupBy', 'indexBy', 'map', 'max', 'min', 'partition', 'reject', 'some' ]; var boolMethods = [ 'every', 'some' ]; var collectionMethods = [ 'countBy', 'every', 'filter', 'find', 'findLast', 'forEach', 'forEachRight', 'groupBy', 'indexBy', 'map', 'max', 'min', 'partition', 'reduce', 'reduceRight', 'reject', 'some' ]; var forInMethods = [ 'forIn', 'forInRight' ]; var iterationMethods = [ 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight' ] var objectMethods = [ 'forIn', 'forInRight', 'forOwn', 'forOwnRight' ]; var rightMethods = [ 'forEachRight', 'forInRight', 'forOwnRight' ]; _.each(methods, function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should pass the correct `callback` arguments', 1, function() { var args, expected = [1, 0, array]; func(array, function() { args || (args = slice.call(arguments)); }); if (_.contains(rightMethods, methodName)) { expected[0] = 3; expected[1] = 2; } if (_.contains(objectMethods, methodName)) { expected[1] += ''; } deepEqual(args, expected); }); test('`_.' + methodName + '` should support the `thisArg` argument', 2, function() { var actual; function callback(num, index) { actual = this[index]; } func([1], callback, [2]); strictEqual(actual, 2); func({ 'a': 1 }, callback, { 'a': 2 }); strictEqual(actual, 2); }); }); _.each(_.difference(methods, boolMethods), function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(array)[methodName](_.noop); ok(actual instanceof _); } else { skipTest(); } }); }); _.each(_.difference(methods, forInMethods), function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` iterates over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var keys = []; func(new Foo, function(value, key) { keys.push(key); }); deepEqual(keys, ['a']); }); }); _.each(iterationMethods, function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should return the collection', 1, function() { strictEqual(func(array, Boolean), array); }); test('`_.' + methodName + '` should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _(array); strictEqual(wrapper[methodName](_.noop), wrapper); } else { skipTest(); } }); }); _.each(collectionMethods, function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should treat objects with lengths of `0` as array-like', 1, function() { var pass = true; func({ 'length': 0 }, function() { pass = false; }, 0); ok(pass); }); test('`_.' + methodName + '` should not treat objects with negative lengths as array-like', 1, function() { var pass = false; func({ 'length': -1 }, function() { pass = true; }, 0); ok(pass); }); test('`_.' + methodName + '` should not treat objects with non-number lengths as array-like', 1, function() { var pass = false; func({ 'length': '0' }, function() { pass = true; }, 0); ok(pass); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('collection iteration bugs'); _.each(['forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { var props = []; func(shadowedObject, function(value, prop) { props.push(prop); }); deepEqual(props.sort(), shadowedProps); }); test('`_.' + methodName + '` does not iterate over non-enumerable properties (test in IE < 9)', 10, function() { _.forOwn({ 'Array': Array.prototype, 'Boolean': Boolean.prototype, 'Date': Date.prototype, 'Error': Error.prototype, 'Function': Function.prototype, 'Object': Object.prototype, 'Number': Number.prototype, 'TypeError': TypeError.prototype, 'RegExp': RegExp.prototype, 'String': String.prototype }, function(proto, key) { var message = 'non-enumerable properties on ' + key + '.prototype', props = []; func(proto, function(value, prop) { props.push(prop); }); if (/Error/.test(key)) { ok(_.every(['constructor', 'toString'], function(prop) { return !_.contains(props, prop); }), message); } else { deepEqual(props, [], message); } }); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.prototype.a = 1; var props = []; function callback(value, prop) { props.push(prop); } func(Foo, callback); deepEqual(props, []); props.length = 0; Foo.prototype = { 'a': 1 }; func(Foo, callback); deepEqual(props, []); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('object assignments'); _.each(['assign', 'defaults', 'merge'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should return `undefined` when no destination object is provided', 1, function() { strictEqual(func(), undefined); }); test('`_.' + methodName + '` should assign problem JScript properties (test in IE < 9)', 1, function() { var object = { 'constructor': '0', 'hasOwnProperty': '1', 'isPrototypeOf': '2', 'propertyIsEnumerable': undefined, 'toLocaleString': undefined, 'toString': undefined, 'valueOf': undefined }; var source = { 'propertyIsEnumerable': '3', 'toLocaleString': '4', 'toString': '5', 'valueOf': '6' }; deepEqual(func(object, source), shadowedObject); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.a = 1; Foo.b = 2; Foo.prototype.c = 3; var expected = { 'a': 1, 'b': 2 }; deepEqual(func({}, Foo), expected); Foo.prototype = { 'c': 3 }; deepEqual(func({}, Foo), expected); }); test('`_.' + methodName + '` should work with `_.reduce`', 1, function() { var array = [{ 'b': 2 }, { 'c': 3 }]; deepEqual(_.reduce(array, func, { 'a': 1}), { 'a': 1, 'b': 2, 'c': 3 }); }); test('`_.' + methodName + '` should not error on nullish sources (test in IE < 9)', 1, function() { try { deepEqual(func({ 'a': 1 }, undefined, { 'b': 2 }, null), { 'a': 1, 'b': 2 }); } catch(e) { ok(false); } }); test('`_.' + methodName + '` should not error when `object` is nullish and source objects are provided', 1, function() { var expected = _.times(2, _.constant(true)); var actual = _.map([null, undefined], function(value) { try { return _.isEqual(func(value, { 'a': 1 }), value); } catch(e) { return false; } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _({ 'a': 1 }); strictEqual(wrapper[methodName]({ 'b': 2 }), wrapper); } else { skipTest(); } }); }); _.each(['assign', 'merge'], function(methodName) { var func = _[methodName], isMerge = methodName == 'merge'; test('`_.' + methodName + '` should pass the correct `callback` arguments', 3, function() { var args, object = { 'a': 1 }, source = { 'a': 2 }; func(object, source, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 2, 'a', object, source], 'primitive property values'); args = null; object = { 'a': 1 }; source = { 'b': 2 }; func(object, source, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [undefined, 2, 'b', object, source], 'missing destination property'); var argsList = [], objectValue = [1, 2], sourceValue = { 'b': 2 }; object = { 'a': objectValue }; source = { 'a': sourceValue }; func(object, source, function() { argsList.push(slice.call(arguments)); }); var expected = [[objectValue, sourceValue, 'a', object, source]]; if (isMerge) { expected.push([undefined, 2, 'b', sourceValue, sourceValue]); } deepEqual(argsList, expected, 'non-primitive property values'); }); test('`_.' + methodName + '`should support the `thisArg` argument', 1, function() { var actual = func({}, { 'a': 0 }, function(a, b) { return this[b]; }, [2]); deepEqual(actual, { 'a': 2 }); }); test('`_.' + methodName + '` should not treat the second argument as a callback', 2, function() { function callback() {} callback.b = 2; var actual = func({ 'a': 1 }, callback); deepEqual(actual, { 'a': 1, 'b': 2 }); actual = func({ 'a': 1 }, callback, { 'c': 3 }); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('exit early'); _.each(['_baseEach', 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; if (!func) { return; } test('`_.' + methodName + '` can exit early when iterating arrays', 1, function() { var array = [1, 2, 3], values = []; func(array, function(value) { values.push(value); return false; }); deepEqual(values, [/Right/.test(methodName) ? 3 : 1]); }); test('`_.' + methodName + '` can exit early when iterating objects', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, values = []; func(object, function(value) { values.push(value); return false; }); strictEqual(values.length, 1); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('`__proto__` property bugs'); (function() { test('internal data objects should work with the `__proto__` key', 4, function() { var stringLiteral = '__proto__', stringObject = Object(stringLiteral), expected = [stringLiteral, stringObject]; var largeArray = _.times(largeArraySize, function(count) { return count % 2 ? stringObject : stringLiteral; }); deepEqual(_.difference(largeArray, largeArray), []); deepEqual(_.intersection(largeArray, largeArray), expected); deepEqual(_.uniq(largeArray), expected); deepEqual(_.without.apply(_, [largeArray].concat(largeArray)), []); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.functions'); (function() { test('should return the function names of an object', 1, function() { var object = { 'a': 'a', 'b': _.identity, 'c': /x/, 'd': _.each }; deepEqual(_.functions(object), ['b', 'd']); }); test('should include inherited functions', 1, function() { function Foo() { this.a = _.identity; this.b = 'b' } Foo.prototype.c = _.noop; deepEqual(_.functions(new Foo), ['a', 'c']); }); test('should be aliased', 1, function() { strictEqual(_.methods, _.functions); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.groupBy'); (function() { var array = [4.2, 6.1, 6.4]; test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.groupBy([4, 6, 6]); deepEqual(actual, { '4': [4], '6': [6, 6] }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.groupBy(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [4.2, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.groupBy(array, function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': [4.2], '6': [6.1, 6.4] }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, [4.2]); deepEqual(actual.hasOwnProperty, [6.1, 6.4]); }); test('should work with an object for `collection`', 1, function() { var actual = _.groupBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': [4.2], '6': [6.1, 6.4] }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.groupBy(array, 0), { '1': [[1 , 'a']], '2': [[2, 'a'], [2, 'b']] }); deepEqual(_.groupBy(array, 1), { 'a': [[1 , 'a'], [2, 'a']], 'b': [[2, 'b']] }); }); test('should work with a string for `callback`', 1, function() { var actual = _.groupBy(['one', 'two', 'three'], 'length'); deepEqual(actual, { '3': ['one', 'two'], '5': ['three'] }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.has'); (function() { test('should check for own properties', 2, function() { var object = { 'a': 1 }; strictEqual(_.has(object, 'a'), true); strictEqual(_.has(object, 'b'), false); }); test('should not use the `hasOwnProperty` method of the object', 1, function() { var object = { 'hasOwnProperty': null, 'a': 1 }; strictEqual(_.has(object, 'a'), true); }); test('should not check for inherited properties', 1, function() { function Foo() {} Foo.prototype.a = 1; strictEqual(_.has(new Foo, 'a'), false); }); test('should work with functions', 1, function() { function Foo() {} strictEqual(_.has(Foo, 'prototype'), true); }); test('should return `false` for primitives', 1, function() { var values = falsey.concat(1, 'a'), expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value) { return _.has(value, 'valueOf'); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.identity'); (function() { test('should return the first argument provided', 1, function() { var object = { 'name': 'fred' }; strictEqual(_.identity(object), object); }); }()) /*--------------------------------------------------------------------------*/ QUnit.module('lodash.indexBy'); (function() { test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.indexBy([4, 6, 6]); deepEqual(actual, { '4': 4, '6': 6 }); }); test('should support the `thisArg` argument', 1, function() { var actual = _.indexBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': 4.2, '6': 6.4 }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.indexBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, 4.2); deepEqual(actual.hasOwnProperty, 6.4); }); test('should work with an object for `collection`', 1, function() { var actual = _.indexBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': 4.2, '6': 6.4 }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.indexBy(array, 0), { '1': [1 , 'a'], '2': [2, 'b'] }); deepEqual(_.indexBy(array, 1), { 'a': [2, 'a'], 'b': [2, 'b'] }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.indexOf'); (function() { var array = [1, 2, 3, 1, 2, 3]; test('should return the index of the first matched value', 1, function() { strictEqual(_.indexOf(array, 3), 2); }); test('should return `-1` for an unmatched value', 4, function() { strictEqual(_.indexOf(array, 4), -1); strictEqual(_.indexOf(array, 4, true), -1); var empty = []; strictEqual(_.indexOf(empty, undefined), -1); strictEqual(_.indexOf(empty, undefined, true), -1); }); test('should work with a positive `fromIndex`', 1, function() { strictEqual(_.indexOf(array, 1, 2), 3); }); test('should work with `fromIndex` >= `array.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.indexOf(array, 1, fromIndex), -1); strictEqual(_.indexOf(array, undefined, fromIndex), -1); strictEqual(_.indexOf(array, '', fromIndex), -1); }); }); test('should treat falsey `fromIndex` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(0)); var actual = _.map(falsey, function(fromIndex) { return _.indexOf(array, 1, fromIndex); }); deepEqual(actual, expected); }); test('should treat non-number `fromIndex` values as `0`', 1, function() { strictEqual(_.indexOf([1, 2, 3], 1, '1'), 0); }); test('should work with a negative `fromIndex`', 1, function() { strictEqual(_.indexOf(array, 2, -3), 4); }); test('should work with a negative `fromIndex` <= `-array.length`', 3, function() { _.each([-6, -8, -Infinity], function(fromIndex) { strictEqual(_.indexOf(array, 1, fromIndex), 0); }); }); test('should work with `isSorted`', 1, function() { strictEqual(_.indexOf([1, 2, 3], 1, true), 0); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('custom `_.indexOf` methods'); (function() { function Foo() {} function custom(array, value, fromIndex) { var index = (fromIndex || 0) - 1, length = array.length; while (++index < length) { var other = array[index]; if (other === value || (value instanceof Foo && other instanceof Foo)) { return index; } } return -1; } var array = [1, new Foo, 3, new Foo], indexOf = _.indexOf; var largeArray = _.times(largeArraySize, function() { return new Foo; }); test('`_.contains` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; ok(_.contains(array, new Foo)); ok(_.contains({ 'a': 1, 'b': new Foo, 'c': 3 }, new Foo)); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.difference` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.difference(array, [new Foo]), [1, 3]); deepEqual(_.difference(array, largeArray), [1, 3]); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.intersection` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.intersection(array, [new Foo]), [array[1]]); deepEqual(_.intersection(largeArray, [new Foo]), [array[1]]); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.uniq` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.uniq(array), array.slice(0, 3)); deepEqual(_.uniq(largeArray), [largeArray[0]]); _.indexOf = indexOf; } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.initial'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.initial(value) : _.initial(); } catch(e) { } }); deepEqual(actual, expected); }); test('should exclude last element', 1, function() { deepEqual(_.initial(array), [1, 2]); }); test('should exclude the last two elements', 1, function() { deepEqual(_.initial(array, 2), [1]); }); test('should return an empty when querying empty arrays', 1, function() { deepEqual(_.initial([]), []); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? [1, 2] : array; }); var actual = _.map(falsey, function(n) { return _.initial(array, n); }); deepEqual(actual, expected); }); test('should return all elements when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.initial(array, n), array); }); }); test('should return an empty array when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.initial(array, n), []); }); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.initial); deepEqual(actual, [[1, 2], [4, 5], [7, 8]]); }); test('should work with a callback', 1, function() { var actual = _.initial(array, function(num) { return num > 1; }); deepEqual(actual, [1]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.initial(array, function() { args = slice.call(arguments); }); deepEqual(args, [3, 2, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.initial(array, function(num, index) { return this[index] > 1; }, array); deepEqual(actual, [1]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.initial(objects, { 'b': 2 }), objects.slice(0, 2)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.initial(objects, 'b'), objects.slice(0, 1)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.intersection'); (function() { var args = arguments; test('should return the intersection of the given arrays', 1, function() { var actual = _.intersection([1, 3, 2], [5, 2, 1, 4], [2, 1]); deepEqual(actual, [1, 2]); }); test('should return an array of unique values', 2, function() { var array = [1, 1, 3, 2, 2]; deepEqual(_.intersection(array, [5, 2, 2, 1, 4], [2, 1, 1]), [1, 2]); deepEqual(_.intersection(array), [1, 3, 2]); }); test('should work with large arrays of objects', 1, function() { var object = {}, largeArray = _.times(largeArraySize, _.constant(object)); deepEqual(_.intersection([object], largeArray), [object]); }); test('should work with large arrays of objects', 2, function() { var object = {}, largeArray = _.times(largeArraySize, _.constant(object)); deepEqual(_.intersection([object], largeArray), [object]); deepEqual(_.intersection(_.range(largeArraySize), null, [1]), [1]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0, 1, null, 3]; deepEqual(_.intersection(array, 3, null, { '0': 1 }), array); deepEqual(_.intersection(null, array, null, [2, 1]), [1]); deepEqual(_.intersection(null, array, null, args), [1, 3]); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _([1, 3, 2]).intersection([5, 2, 1, 4]); ok(actual instanceof _); deepEqual(actual.value(), [1, 2]); } else { skipTest(2); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.invert'); (function() { test('should invert an object', 2, function() { var object = { 'a': 1, 'b': 2 }, actual = _.invert(object); deepEqual(actual, { '1': 'a', '2': 'b' }); deepEqual(_.invert(actual), { 'a': '1', 'b': '2' }); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.invert(object), { 'a': '0', 'b': '1', '2': 'length' }); }); test('should accept a `multiValue` flag', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 1 }; deepEqual(_.invert(object, true), { '1': ['a', 'c'], '2': ['b'] }); }); test('should only add multiple values to own, not inherited, properties', 2, function() { var object = { 'a': 'hasOwnProperty', 'b': 'constructor' }; deepEqual(_.invert(object), { 'hasOwnProperty': 'a', 'constructor': 'b' }); ok(_.isEqual(_.invert(object, true), { 'hasOwnProperty': ['a'], 'constructor': ['b'] })); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var object = { 'a': 1, 'b': 2 }, actual = _(object).invert(); ok(actual instanceof _); deepEqual(actual.value(), { '1': 'a', '2': 'b' }); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.invoke'); (function() { test('should invoke a methods on each element of a collection', 1, function() { var array = ['a', 'b', 'c']; deepEqual( _.invoke(array, 'toUpperCase'), ['A', 'B', 'C']); }); test('should work with a function `methodName` argument', 1, function() { var actual = _.invoke(['a', 'b', 'c'], function() { return this.toUpperCase(); }); deepEqual(actual, ['A', 'B', 'C']); }); test('should work with an object for `collection`', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.invoke(object, 'toFixed', 1), ['1.0', '2.0', '3.0']); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.invoke(1), []); }); test('should work with nullish elements', 1, function() { var array = ['a', null, undefined, 'd']; deepEqual(_.invoke(array, 'toUpperCase'), ['A', undefined, undefined, 'D']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isArguments'); (function() { var args = arguments; test('should return `true` for `arguments` objects', 1, function() { strictEqual(_.isArguments(args), true); }); test('should return `false` for non `arguments` objects', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isArguments(value) : _.isArguments(); }); strictEqual(_.isArguments([1, 2, 3]), false); strictEqual(_.isArguments(true), false); strictEqual(_.isArguments(new Date), false); strictEqual(_.isArguments(new Error), false); strictEqual(_.isArguments(_), false); strictEqual(_.isArguments({ '0': 1, 'callee': _.noop, 'length': 1 }), false); strictEqual(_.isArguments(1), false); strictEqual(_.isArguments(/x/), false); strictEqual(_.isArguments('a'), false); deepEqual(actual, expected); }); test('should work with `arguments` objects from another realm', 1, function() { if (_._object) { strictEqual(_.isArguments(_._arguments), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isArray'); (function() { var args = arguments; test('should return `true` for arrays', 1, function() { strictEqual(_.isArray([1, 2, 3]), true); }); test('should return `false` for non arrays', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isArray(value) : _.isArray(); }); strictEqual(_.isArray(args), false); strictEqual(_.isArray(true), false); strictEqual(_.isArray(new Date), false); strictEqual(_.isArray(new Error), false); strictEqual(_.isArray(_), false); strictEqual(_.isArray({ '0': 1, 'length': 1 }), false); strictEqual(_.isArray(1), false); strictEqual(_.isArray(/x/), false); strictEqual(_.isArray('a'), false); deepEqual(actual, expected); }); test('should work with arrays from another realm', 1, function() { if (_._object) { strictEqual(_.isArray(_._array), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isBoolean'); (function() { var args = arguments; test('should return `true` for booleans', 4, function() { strictEqual(_.isBoolean(true), true); strictEqual(_.isBoolean(false), true); strictEqual(_.isBoolean(new Boolean(true)), true); strictEqual(_.isBoolean(new Boolean(false)), true); }); test('should return `false` for non booleans', 10, function() { var expected = _.map(falsey, function(value) { return value === false; }); var actual = _.map(falsey, function(value, index) { return index ? _.isBoolean(value) : _.isBoolean(); }); strictEqual(_.isBoolean(args), false); strictEqual(_.isBoolean([1, 2, 3]), false); strictEqual(_.isBoolean(new Date), false); strictEqual(_.isBoolean(new Error), false); strictEqual(_.isBoolean(_), false); strictEqual(_.isBoolean({ 'a': 1 }), false); strictEqual(_.isBoolean(1), false); strictEqual(_.isBoolean(/x/), false); strictEqual(_.isBoolean('a'), false); deepEqual(actual, expected); }); test('should work with booleans from another realm', 1, function() { if (_._object) { strictEqual(_.isBoolean(_._boolean), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isDate'); (function() { var args = arguments; test('should return `true` for dates', 1, function() { strictEqual(_.isDate(new Date), true); }); test('should return `false` for non dates', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isDate(value) : _.isDate(); }); strictEqual(_.isDate(args), false); strictEqual(_.isDate([1, 2, 3]), false); strictEqual(_.isDate(true), false); strictEqual(_.isDate(new Error), false); strictEqual(_.isDate(_), false); strictEqual(_.isDate({ 'a': 1 }), false); strictEqual(_.isDate(1), false); strictEqual(_.isDate(/x/), false); strictEqual(_.isDate('a'), false); deepEqual(actual, expected); }); test('should work with dates from another realm', 1, function() { if (_._object) { strictEqual(_.isDate(_._date), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isElement'); (function() { var args = arguments; function Element() { this.nodeType = 1; } test('should use robust check', 7, function() { var element = body || new Element; strictEqual(_.isElement(element), true); strictEqual(_.isElement({ 'nodeType': 1 }), false); strictEqual(_.isElement({ 'nodeType': new Number(1) }), false); strictEqual(_.isElement({ 'nodeType': true }), false); strictEqual(_.isElement({ 'nodeType': [1] }), false); strictEqual(_.isElement({ 'nodeType': '1' }), false); strictEqual(_.isElement({ 'nodeType': '001' }), false); }); test('should use a stronger check in browsers', 2, function() { var expected = !body; strictEqual(_.isElement(new Element), expected); if (lodashBizarro) { strictEqual(lodashBizarro.isElement(new Element), !expected); } else { skipTest(); } }); test('should return `false` for non DOM elements', 11, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isElement(value) : _.isElement(); }); strictEqual(_.isElement(args), false); strictEqual(_.isElement([1, 2, 3]), false); strictEqual(_.isElement(true), false); strictEqual(_.isElement(new Date), false); strictEqual(_.isElement(new Error), false); strictEqual(_.isElement(_), false); strictEqual(_.isElement({ 'a': 1 }), false); strictEqual(_.isElement(1), false); strictEqual(_.isElement(/x/), false); strictEqual(_.isElement('a'), false); deepEqual(actual, expected); }); test('should work with DOM elements from another realm', 1, function() { if (_._element) { strictEqual(_.isElement(_._element), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isEmpty'); (function() { var args = arguments; test('should return `true` for empty or falsey values', 3, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { return _.isEmpty(value); }); strictEqual(_.isEmpty(), true); strictEqual(_.isEmpty(/x/), true); deepEqual(actual, expected); }); test('should return `false` for non-empty values', 3, function() { strictEqual(_.isEmpty([0]), false); strictEqual(_.isEmpty({ 'a': 0 }), false); strictEqual(_.isEmpty('a'), false); }); test('should work with an object that has a `length` property', 1, function() { strictEqual(_.isEmpty({ 'length': 0 }), false); }); test('should work with `arguments` objects (test in IE < 9)', 1, function() { strictEqual(_.isEmpty(args), false); }); test('should work with jQuery/MooTools DOM query collections', 1, function() { function Foo(elements) { push.apply(this, elements); } Foo.prototype = { 'length': 0, 'splice': Array.prototype.splice }; strictEqual(_.isEmpty(new Foo([])), true); }); test('should not treat objects with negative lengths as array-like', 1, function() { function Foo() {} Foo.prototype.length = -1; strictEqual(_.isEmpty(new Foo), true); }); test('should not treat objects with lengths larger than `maxSafeInteger` as array-like', 1, function() { function Foo() {} Foo.prototype.length = maxSafeInteger + 1; strictEqual(_.isEmpty(new Foo), true); }); test('should not treat objects with non-number lengths as array-like', 1, function() { strictEqual(_.isEmpty({ 'length': '0' }), false); }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.isEmpty(shadowedObject), false); }); test('skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.prototype.a = 1; strictEqual(_.isEmpty(Foo), true); Foo.prototype = { 'a': 1 }; strictEqual(_.isEmpty(Foo), true); }); test('should return an unwrapped value when intuitively chaining', 1, function() { if (!isNpm) { strictEqual(_({}).isEmpty(), true); } else { skipTest(); } }); test('should return a wrapped value when explicitly chaining', 1, function() { if (!isNpm) { ok(_({}).chain().isEmpty() instanceof _); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isEqual'); (function() { test('should perform comparisons between primitive values', 1, function() { var pairs = [ [1, 1, true], [1, new Number(1), true], [1, '1', false], [1, 2, false], [-0, -0, true], [0, 0, true], [0, new Number(0), true], [new Number(0), new Number(0), true], [-0, 0, false], [0, '0', false], [0, null, false], [NaN, NaN, true], [NaN, new Number(NaN), true], [new Number(NaN), new Number(NaN), true], [NaN, 'a', false], [NaN, Infinity, false], ['a', 'a', true], ['a', new String('a'), true], [new String('a'), new String('a'), true], ['a', 'b', false], ['a', ['a'], false], [true, true, true], [true, new Boolean(true), true], [new Boolean(true), new Boolean(true), true], [true, 1, false], [true, 'a', false], [false, false, true], [false, new Boolean(false), true], [new Boolean(false), new Boolean(false), true], [false, 0, false], [false, '', false], [null, null, true], [null, undefined, false], [null, {}, false], [null, '', false], [undefined, undefined, true], [undefined, null, false], [undefined, '', false] ]; var expected = _.map(pairs, function(pair) { return pair[2]; }); var actual = _.map(pairs, function(pair) { return _.isEqual(pair[0], pair[1]); }) deepEqual(actual, expected); }); test('should return `false` for objects with custom `toString` methods', 1, function() { var primitive, object = { 'toString': function() { return primitive; } }, values = [true, null, 1, 'a', undefined], expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value) { primitive = value; return _.isEqual(object, value); }); deepEqual(actual, expected); }); test('should perform comparisons between arrays', 6, function() { var array1 = [true, null, 1, 'a', undefined], array2 = [true, null, 1, 'a', undefined]; strictEqual(_.isEqual(array1, array2), true); array1 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }]; array2 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }]; strictEqual(_.isEqual(array1, array2), true); array1 = [1]; array1[2] = 3; array2 = [1]; array2[1] = undefined; array2[2] = 3; strictEqual(_.isEqual(array1, array2), true); array1 = [new Number(1), false, new String('a'), /x/, new Date(2012, 4, 23), ['a', 'b', [new String('c')]], { 'a': 1 }]; array2 = [1, new Boolean(false), 'a', /x/, new Date(2012, 4, 23), ['a', new String('b'), ['c']], { 'a': 1 }]; strictEqual(_.isEqual(array1, array2), true); array1 = [1, 2, 3]; array2 = [3, 2, 1]; strictEqual(_.isEqual(array1, array2), false); array1 = [1, 2]; array2 = [1, 2, 3]; strictEqual(_.isEqual(array1, array2), false); }); test('should treat arrays with identical values but different non-numeric properties as equal', 3, function() { var array1 = [1, 2, 3], array2 = [1, 2, 3]; array1.every = array1.filter = array1.forEach = array1.indexOf = array1.lastIndexOf = array1.map = array1.some = array1.reduce = array1.reduceRight = null; array2.concat = array2.join = array2.pop = array2.reverse = array2.shift = array2.slice = array2.sort = array2.splice = array2.unshift = null; strictEqual(_.isEqual(array1, array2), true); array1 = [1, 2, 3]; array1.a = 1; array2 = [1, 2, 3]; array2.b = 1; strictEqual(_.isEqual(array1, array2), true); array1 = /x/.exec('vwxyz'); array2 = ['x']; strictEqual(_.isEqual(array1, array2), true); }); test('should perform comparisons between date objects', 4, function() { strictEqual(_.isEqual(new Date(2012, 4, 23), new Date(2012, 4, 23)), true); strictEqual(_.isEqual(new Date(2012, 4, 23), new Date(2013, 3, 25)), false); strictEqual(_.isEqual(new Date(2012, 4, 23), { 'getTime': function() { return 1337756400000; } }), false); strictEqual(_.isEqual(new Date('a'), new Date('a')), false); }); test('should perform comparisons between functions', 2, function() { function a() { return 1 + 2; } function b() { return 1 + 2; } strictEqual(_.isEqual(a, a), true); strictEqual(_.isEqual(a, b), false); }); test('should perform comparisons between plain objects', 5, function() { var object1 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined }, object2 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined }; strictEqual(_.isEqual(object1, object2), true); object1 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } }; object2 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } }; strictEqual(_.isEqual(object1, object2), true); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object2 = { 'a': 3, 'b': 2, 'c': 1 }; strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object2 = { 'd': 1, 'e': 2, 'f': 3 }; strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2 }; object2 = { 'a': 1, 'b': 2, 'c': 3 }; strictEqual(_.isEqual(object1, object2), false); }); test('should perform comparisons of nested objects', 1, function() { var object1 = { 'a': [1, 2, 3], 'b': true, 'c': new Number(1), 'd': 'a', 'e': { 'f': ['a', new String('b'), 'c'], 'g': new Boolean(false), 'h': new Date(2012, 4, 23), 'i': _.noop, 'j': 'a' } }; var object2 = { 'a': [1, new Number(2), 3], 'b': new Boolean(true), 'c': 1, 'd': new String('a'), 'e': { 'f': ['a', 'b', 'c'], 'g': false, 'h': new Date(2012, 4, 23), 'i': _.noop, 'j': 'a' } }; strictEqual(_.isEqual(object1, object2), true); }); test('should perform comparisons between object instances', 4, function() { function Foo() { this.value = 1; } Foo.prototype.value = 1; function Bar() { this.value = 1; } Bar.prototype.value = 2; strictEqual(_.isEqual(new Foo, new Foo), true); strictEqual(_.isEqual(new Foo, new Bar), false); strictEqual(_.isEqual({ 'value': 1 }, new Foo), false); strictEqual(_.isEqual({ 'value': 2 }, new Bar), false); }); test('should perform comparisons between regexes', 5, function() { strictEqual(_.isEqual(/x/gim, /x/gim), true); strictEqual(_.isEqual(/x/gim, /x/mgi), true); strictEqual(_.isEqual(/x/gi, /x/g), false); strictEqual(_.isEqual(/x/, /y/), false); strictEqual(_.isEqual(/x/g, { 'global': true, 'ignoreCase': false, 'multiline': false, 'source': 'x' }), false); }); test('should avoid common type coercions', 9, function() { strictEqual(_.isEqual(true, new Boolean(false)), false); strictEqual(_.isEqual(new Boolean(false), new Number(0)), false); strictEqual(_.isEqual(false, new String('')), false); strictEqual(_.isEqual(new Number(36), new String(36)), false); strictEqual(_.isEqual(0, ''), false); strictEqual(_.isEqual(1, true), false); strictEqual(_.isEqual(1337756400000, new Date(2012, 4, 23)), false); strictEqual(_.isEqual('36', 36), false); strictEqual(_.isEqual(36, '36'), false); }); test('should work with sparse arrays', 2, function() { strictEqual(_.isEqual(Array(3), Array(3)), true); strictEqual(_.isEqual(Array(3), Array(6)), false); }); test('should work with `arguments` objects (test in IE < 9)', 2, function() { var args1 = (function() { return arguments; }(1, 2, 3)), args2 = (function() { return arguments; }(1, 2, 3)), args3 = (function() { return arguments; }(1, 2)); strictEqual(_.isEqual(args1, args2), true); if (!isPhantom) { strictEqual(_.isEqual(args1, args3), false); } else { skipTest(); } }); test('should treat `arguments` objects like `Object` objects', 2, function() { var args = (function() { return arguments; }(1, 2, 3)), object = { '0': 1, '1': 2, '2': 3, 'length': 3 }; function Foo() {} Foo.prototype = object; strictEqual(_.isEqual(args, object), true); if (!isPhantom) { strictEqual(_.isEqual(args, new Foo), false); } else { skipTest(); } }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.isEqual(shadowedObject, {}), false); }); test('should perform comparisons between objects with constructor properties', 5, function() { strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': 1 }), true); strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': '1' }), false); strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': [1] }), true); strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': ['1'] }), false); strictEqual(_.isEqual({ 'constructor': Object }, {}), false); }); test('should perform comparisons between arrays with circular references', 4, function() { var array1 = [], array2 = []; array1.push(array1); array2.push(array2); strictEqual(_.isEqual(array1, array2), true); array1.push('a'); array2.push('a'); strictEqual(_.isEqual(array1, array2), true); array1.push('b'); array2.push('c'); strictEqual(_.isEqual(array1, array2), false); array1 = ['a', 'b', 'c']; array1[1] = array1; array2 = ['a', ['a', 'b', 'c'], 'c']; strictEqual(_.isEqual(array1, array2), false); }); test('should perform comparisons between objects with circular references', 4, function() { var object1 = {}, object2 = {}; object1.a = object1; object2.a = object2; strictEqual(_.isEqual(object1, object2), true); object1.b = 0; object2.b = new Number(0); strictEqual(_.isEqual(object1, object2), true); object1.c = new Number(1); object2.c = new Number(2); strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object1.b = object1; object2 = { 'a': 1, 'b': { 'a': 1, 'b': 2, 'c': 3 }, 'c': 3 }; strictEqual(_.isEqual(object1, object2), false); }); test('should perform comparisons between objects with multiple circular references', 3, function() { var array1 = [{}], array2 = [{}]; (array1[0].a = array1).push(array1); (array2[0].a = array2).push(array2); strictEqual(_.isEqual(array1, array2), true); array1[0].b = 0; array2[0].b = new Number(0); strictEqual(_.isEqual(array1, array2), true); array1[0].c = new Number(1); array2[0].c = new Number(2); strictEqual(_.isEqual(array1, array2), false); }); test('should perform comparisons between objects with complex circular references', 1, function() { var object1 = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { 'a': 2 } }; var object2 = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { 'a': 2 } }; object1.foo.b.foo.c = object1; object1.bar.b = object1.foo.b; object2.foo.b.foo.c = object2; object2.bar.b = object2.foo.b; strictEqual(_.isEqual(object1, object2), true); }); test('should perform comparisons between objects with shared property values', 1, function() { var object1 = { 'a': [1, 2] }; var object2 = { 'a': [1, 2], 'b': [1, 2] }; object1.b = object1.a; strictEqual(_.isEqual(object1, object2), true); }); test('should pass the correct `callback` arguments', 1, function() { var argsList = []; var object1 = { 'a': [1, 2], 'b': null }; var object2 = { 'a': [1, 2], 'b': null }; object1.b = object2; object2.b = object1; var expected = [ [object1, object2], [object1.a, object2.a, 'a'], [object1.a[0], object2.a[0], 0], [object1.a[1], object2.a[1], 1], [object1.b, object2.b, 'b'], [object1.b.a, object2.b.a, 'a'], [object1.b.a[0], object2.b.a[0], 0], [object1.b.a[1], object2.b.a[1], 1], [object1.b.b, object2.b.b, 'b'] ]; _.isEqual(object1, object2, function() { argsList.push(slice.call(arguments)); }); deepEqual(argsList, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.isEqual('a', 'b', function(a, b) { return this[a] == this[b]; }, { 'a': 1, 'b': 1 }); strictEqual(actual, true); }); test('should handle comparisons if `callback` returns `undefined`', 1, function() { var actual = _.isEqual('a', 'a', function() {}); strictEqual(actual, true); }); test('should return a boolean value even if `callback` does not', 2, function() { var actual = _.isEqual('a', 'a', function() { return 'a'; }); strictEqual(actual, true); var expected = _.map(falsey, _.constant(false)); actual = []; _.each(falsey, function(value) { actual.push(_.isEqual('a', 'b', _.constant(value))); }); deepEqual(actual, expected); }); test('should ensure `callback` is a function', 1, function() { var array = [1, 2, 3], eq = _.partial(_.isEqual, array), actual = _.every([array, [1, 0, 3]], eq); strictEqual(actual, false); }); test('should work when used as a callback for `_.every`', 1, function() { var actual = _.every([1, 1, 1], _.partial(_.isEqual, 1)); ok(actual); }); test('should treat objects created by `Object.create(null)` like any other plain object', 2, function() { function Foo() { this.a = 1; } Foo.prototype.constructor = null; var otherObject = { 'a': 1 }; strictEqual(_.isEqual(new Foo, otherObject), false); if (create) { var object = create(null); object.a = 1; strictEqual(_.isEqual(object, otherObject), true); } else { skipTest(); } }); test('should perform comparisons between typed arrays', 1, function() { var pairs = _.map(typedArrays, function(type) { var Ctor = root[type] || Array, buffer = Ctor == Array ? 4 : new ArrayBuffer(4); return [new Ctor(buffer), new Ctor(buffer)]; }); var expected = _.times(pairs.length, _.constant(true)); var actual = _.map(pairs, function(pair) { return _.isEqual(pair[0], pair[1]); }); deepEqual(actual, expected); }); test('should perform comparisons between wrapped values', 4, function() { if (!isNpm) { var object1 = _({ 'a': 1, 'b': 2 }), object2 = _({ 'a': 1, 'b': 2 }), actual = object1.isEqual(object2); strictEqual(actual, true); strictEqual(_.isEqual(_(actual), _(true)), true); object1 = _({ 'a': 1, 'b': 2 }); object2 = _({ 'a': 1, 'b': 1 }); actual = object1.isEqual(object2); strictEqual(actual, false); strictEqual(_.isEqual(_(actual), _(false)), true); } else { skipTest(4); } }); test('should perform comparisons between wrapped and non-wrapped values', 4, function() { if (!isNpm) { var object1 = _({ 'a': 1, 'b': 2 }), object2 = { 'a': 1, 'b': 2 }; strictEqual(object1.isEqual(object2), true); strictEqual(_.isEqual(object1, object2), true); object1 = _({ 'a': 1, 'b': 2 }); object2 = { 'a': 1, 'b': 1 }; strictEqual(object1.isEqual(object2), false); strictEqual(_.isEqual(object1, object2), false); } else { skipTest(4); } }); test('should return `true` for like-objects from different documents', 1, function() { // ensure `_._object` is assigned (unassigned in Opera 10.00) if (_._object) { var object = { 'a': 1, 'b': 2, 'c': 3 }; strictEqual(_.isEqual(object, _._object), true); } else { skipTest(); } }); test('should not error on DOM elements', 1, function() { if (document) { var element1 = document.createElement('div'), element2 = element1.cloneNode(true); try { strictEqual(_.isEqual(element1, element2), false); } catch(e) { ok(false); } } else { skipTest(); } }); test('should return an unwrapped value when intuitively chaining', 1, function() { if (!isNpm) { strictEqual(_('a').isEqual('a'), true); } else { skipTest(); } }); test('should return a wrapped value when explicitly chaining', 1, function() { if (!isNpm) { ok(_('a').chain().isEqual('a') instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isError'); (function() { var args = arguments; test('should return `true` for error objects', 1, function() { var errors = [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError], expected = _.map(errors, _.constant(true)); var actual = _.map(errors, function(error) { return _.isError(error) === true; }); deepEqual(actual, expected); }); test('should return `false` for non-error objects', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isError(value) : _.isError(); }); strictEqual(_.isError(args), false); strictEqual(_.isError([1, 2, 3]), false); strictEqual(_.isError(true), false); strictEqual(_.isError(new Date), false); strictEqual(_.isError(_), false); strictEqual(_.isError({ 'a': 1 }), false); strictEqual(_.isError(1), false); strictEqual(_.isError(/x/), false); strictEqual(_.isError('a'), false); deepEqual(actual, expected); }); test('should work with an error object from another realm', 1, function() { if (_._object) { var expected = _.map(_._errors, _.constant(true)); var actual = _.map(_._errors, function(error) { return _.isError(error) === true; }); deepEqual(actual, expected); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isFinite'); (function() { test('should return `true` for finite values', 5, function() { strictEqual(_.isFinite(0), true); strictEqual(_.isFinite(1), true); strictEqual(_.isFinite(3.14), true); strictEqual(_.isFinite(-1), true); strictEqual(_.isFinite(new Number(0)), true); }); test('should return `false` for non-finite values', 3, function() { strictEqual(_.isFinite(NaN), false); strictEqual(_.isFinite(Infinity), false); strictEqual(_.isFinite(-Infinity), false); }); test('should return `false` for non-numeric values', 9, function() { strictEqual(_.isFinite(null), false); strictEqual(_.isFinite(undefined), false); strictEqual(_.isFinite([]), false); strictEqual(_.isFinite(true), false); strictEqual(_.isFinite(new Date), false); strictEqual(_.isFinite(new Error), false); strictEqual(_.isFinite(''), false); strictEqual(_.isFinite(' '), false); strictEqual(_.isFinite('2px'), false); }); test('should return `true` for numeric string values', 3, function() { strictEqual(_.isFinite('2'), true); strictEqual(_.isFinite('0'), true); strictEqual(_.isFinite('08'), true); }); test('should work with numbers from another realm', 1, function() { if (_._object) { strictEqual(_.isFinite(_._number), true); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isFunction'); (function() { var args = arguments; test('should return `true` for functions', 1, function() { strictEqual(_.isFunction(_), true); }); test('should return `false` for non functions', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isFunction(value) : _.isFunction(); }); strictEqual(_.isFunction(args), false); strictEqual(_.isFunction([1, 2, 3]), false); strictEqual(_.isFunction(true), false); strictEqual(_.isFunction(new Date), false); strictEqual(_.isFunction(new Error), false); strictEqual(_.isFunction({ 'a': 1 }), false); strictEqual(_.isFunction(1), false); strictEqual(_.isFunction(/x/), false); strictEqual(_.isFunction('a'), false); deepEqual(actual, expected); }); test('should work with host objects in non-edge document modes (test in IE 11)', 1, function() { if (xml) { // trigger Chakra bug // https://github.com/jashkenas/underscore/issues/1621 _.times(100, _.isFunction); strictEqual(_.isFunction(xml), false); } else { skipTest(); } }); test('should work with functions from another realm', 1, function() { if (_._object) { strictEqual(_.isFunction(_._function), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNaN'); (function() { var args = arguments; test('should return `true` for NaNs', 2, function() { strictEqual(_.isNaN(NaN), true); strictEqual(_.isNaN(new Number(NaN)), true); }); test('should return `false` for non NaNs', 11, function() { var expected = _.map(falsey, function(value) { return value !== value; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNaN(value) : _.isNaN(); }); strictEqual(_.isNaN(args), false); strictEqual(_.isNaN([1, 2, 3]), false); strictEqual(_.isNaN(true), false); strictEqual(_.isNaN(new Date), false); strictEqual(_.isNaN(new Error), false); strictEqual(_.isNaN(_), false); strictEqual(_.isNaN({ 'a': 1 }), false); strictEqual(_.isNaN(1), false); strictEqual(_.isNaN(/x/), false); strictEqual(_.isNaN('a'), false); deepEqual(actual, expected); }); test('should work with NaNs from another realm', 1, function() { if (_._object) { strictEqual(_.isNaN(_._nan), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNull'); (function() { var args = arguments; test('should return `true` for nulls', 1, function() { strictEqual(_.isNull(null), true); }); test('should return `false` for non nulls', 11, function() { var expected = _.map(falsey, function(value) { return value === null; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNull(value) : _.isNull(); }); strictEqual(_.isNull(args), false); strictEqual(_.isNull([1, 2, 3]), false); strictEqual(_.isNull(true), false); strictEqual(_.isNull(new Date), false); strictEqual(_.isNull(new Error), false); strictEqual(_.isNull(_), false); strictEqual(_.isNull({ 'a': 1 }), false); strictEqual(_.isNull(1), false); strictEqual(_.isNull(/x/), false); strictEqual(_.isNull('a'), false); deepEqual(actual, expected); }); test('should work with nulls from another realm', 1, function() { if (_._object) { strictEqual(_.isNull(_._null), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNumber'); (function() { var args = arguments; test('should return `true` for numbers', 2, function() { strictEqual(_.isNumber(0), true); strictEqual(_.isNumber(new Number(0)), true); }); test('should return `false` for non numbers', 10, function() { var expected = _.map(falsey, function(value) { return typeof value == 'number'; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNumber(value) : _.isNumber(); }); strictEqual(_.isNumber(args), false); strictEqual(_.isNumber([1, 2, 3]), false); strictEqual(_.isNumber(true), false); strictEqual(_.isNumber(new Date), false); strictEqual(_.isNumber(new Error), false); strictEqual(_.isNumber(_), false); strictEqual(_.isNumber({ 'a': 1 }), false); strictEqual(_.isNumber(/x/), false); strictEqual(_.isNumber('a'), false); deepEqual(actual, expected); }); test('should work with numbers from another realm', 1, function() { if (_._object) { strictEqual(_.isNumber(_._number), true); } else { skipTest(); } }); test('should avoid `[xpconnect wrapped native prototype]` in Firefox', 1, function() { strictEqual(_.isNumber(+"2"), true); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isObject'); (function() { var args = arguments; test('should return `true` for objects', 11, function() { strictEqual(_.isObject(args), true); strictEqual(_.isObject([1, 2, 3]), true); strictEqual(_.isObject(new Boolean(false)), true); strictEqual(_.isObject(new Date), true); strictEqual(_.isObject(new Error), true); strictEqual(_.isObject(_), true); strictEqual(_.isObject({ 'a': 1 }), true); strictEqual(_.isObject(new Number(0)), true); strictEqual(_.isObject(/x/), true); strictEqual(_.isObject(new String('a')), true); if (document) { strictEqual(_.isObject(body), true); } else { skipTest(); } }); test('should return `false` for non objects', 1, function() { var values = falsey.concat('a', true), expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value, index) { return index ? _.isObject(value) : _.isObject(); }); deepEqual(actual, expected); }); test('should work with objects from another realm', 8, function() { if (_._element) { strictEqual(_.isObject(_._element), true); } else { skipTest(); } if (_._object) { strictEqual(_.isObject(_._object), true); strictEqual(_.isObject(_._boolean), true); strictEqual(_.isObject(_._date), true); strictEqual(_.isObject(_._function), true); strictEqual(_.isObject(_._number), true); strictEqual(_.isObject(_._regexp), true); strictEqual(_.isObject(_._string), true); } else { skipTest(7); } }); test('should avoid V8 bug #2291 (test in Chrome 19-20)', 1, function() { // trigger V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 var object = {}; // 1: Useless comparison statement, this is half the trigger object == object; // 2: Initial check with object, this is the other half of the trigger _.isObject(object); strictEqual(_.isObject('x'), false); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isPlainObject'); (function() { var element = document && document.createElement('div'); test('should detect plain objects', 5, function() { function Foo(a) { this.a = 1; } strictEqual(_.isPlainObject({}), true); strictEqual(_.isPlainObject({ 'a': 1 }), true); strictEqual(_.isPlainObject({ 'constructor': Foo }), true); strictEqual(_.isPlainObject([1, 2, 3]), false); strictEqual(_.isPlainObject(new Foo(1)), false); }); test('should return `true` for objects with a `[[Prototype]]` of `null`', 1, function() { if (create) { strictEqual(_.isPlainObject(create(null)), true); } else { skipTest(); } }); test('should return `true` for plain objects with a custom `valueOf` property', 2, function() { strictEqual(_.isPlainObject({ 'valueOf': 0 }), true); if (element) { var valueOf = element.valueOf; element.valueOf = 0; strictEqual(_.isPlainObject(element), false); element.valueOf = valueOf; } else { skipTest(); } }); test('should return `false` for DOM elements', 1, function() { if (element) { strictEqual(_.isPlainObject(element), false); } else { skipTest(); } }); test('should return `false` for Object objects without a `[[Class]]` of "Object"', 3, function() { strictEqual(_.isPlainObject(arguments), false); strictEqual(_.isPlainObject(Error), false); strictEqual(_.isPlainObject(Math), false); }); test('should return `false` for non objects', 3, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isPlainObject(value) : _.isPlainObject(); }); strictEqual(_.isPlainObject(true), false); strictEqual(_.isPlainObject('a'), false); deepEqual(actual, expected); }); test('should work with objects from another realm', 1, function() { if (_._object) { strictEqual(_.isPlainObject(_._object), true); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isRegExp'); (function() { var args = arguments; test('should return `true` for regexes', 2, function() { strictEqual(_.isRegExp(/x/), true); strictEqual(_.isRegExp(RegExp('x')), true); }); test('should return `false` for non regexes', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isRegExp(value) : _.isRegExp(); }); strictEqual(_.isRegExp(args), false); strictEqual(_.isRegExp([1, 2, 3]), false); strictEqual(_.isRegExp(true), false); strictEqual(_.isRegExp(new Date), false); strictEqual(_.isRegExp(new Error), false); strictEqual(_.isRegExp(_), false); strictEqual(_.isRegExp({ 'a': 1 }), false); strictEqual(_.isRegExp(1), false); strictEqual(_.isRegExp('a'), false); deepEqual(actual, expected); }); test('should work with regexes from another realm', 1, function() { if (_._object) { strictEqual(_.isRegExp(_._regexp), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isString'); (function() { var args = arguments; test('should return `true` for strings', 2, function() { strictEqual(_.isString('a'), true); strictEqual(_.isString(new String('a')), true); }); test('should return `false` for non strings', 10, function() { var expected = _.map(falsey, function(value) { return value === ''; }); var actual = _.map(falsey, function(value, index) { return index ? _.isString(value) : _.isString(); }); strictEqual(_.isString(args), false); strictEqual(_.isString([1, 2, 3]), false); strictEqual(_.isString(true), false); strictEqual(_.isString(new Date), false); strictEqual(_.isString(new Error), false); strictEqual(_.isString(_), false); strictEqual(_.isString({ '0': 1, 'length': 1 }), false); strictEqual(_.isString(1), false); strictEqual(_.isString(/x/), false); deepEqual(actual, expected); }); test('should work with strings from another realm', 1, function() { if (_._object) { strictEqual(_.isString(_._string), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isUndefined'); (function() { var args = arguments; test('should return `true` for `undefined` values', 2, function() { strictEqual(_.isUndefined(), true); strictEqual(_.isUndefined(undefined), true); }); test('should return `false` for non `undefined` values', 11, function() { var expected = _.map(falsey, function(value) { return value === undefined; }); var actual = _.map(falsey, function(value, index) { return index ? _.isUndefined(value) : _.isUndefined(); }); strictEqual(_.isUndefined(args), false); strictEqual(_.isUndefined([1, 2, 3]), false); strictEqual(_.isUndefined(true), false); strictEqual(_.isUndefined(new Date), false); strictEqual(_.isUndefined(new Error), false); strictEqual(_.isUndefined(_), false); strictEqual(_.isUndefined({ 'a': 1 }), false); strictEqual(_.isUndefined(1), false); strictEqual(_.isUndefined(/x/), false); strictEqual(_.isUndefined('a'), false); deepEqual(actual, expected); }); test('should work with `undefined` from another realm', 1, function() { if (_._object) { strictEqual(_.isUndefined(_._undefined), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('isType checks'); (function() { test('should return `false` for subclassed values', 8, function() { var funcs = [ 'isArray', 'isBoolean', 'isDate', 'isError', 'isFunction', 'isNumber', 'isRegExp', 'isString' ]; _.each(funcs, function(methodName) { function Foo() {} Foo.prototype = root[methodName.slice(2)].prototype; var object = new Foo; if (toString.call(object) == '[object Object]') { strictEqual(_[methodName](object), false, '`_.' + methodName + '` returns `false`'); } else { skipTest(); } }); }); test('should not error on host objects (test in IE)', 12, function() { if (xml) { var funcs = [ 'isArray', 'isArguments', 'isBoolean', 'isDate', 'isElement', 'isFunction', 'isObject', 'isNull', 'isNumber', 'isRegExp', 'isString', 'isUndefined' ]; _.each(funcs, function(methodName) { var pass = true; try { _[methodName](xml); } catch(e) { pass = false; } ok(pass, '`_.' + methodName + '` should not error'); }); } else { skipTest(12) } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('keys methods'); _.each(['keys', 'keysIn'], function(methodName) { var args = arguments, func = _[methodName], isKeys = methodName == 'keys'; test('`_.' + methodName + '` should return the keys of an object', 1, function() { var object = { 'a': 1, 'b': 1 }, actual = func(object); deepEqual(actual.sort(), ['a', 'b']); }); test('`_.' + methodName + '` should treat sparse arrays as dense', 1, function() { var array = [1]; array[2] = 3; var actual = func(array); deepEqual(actual.sort(), ['0', '1', '2']); }); test('`_.' + methodName + '` should custom properties on arrays', 1, function() { var array = [1]; array.a = 1; var actual = func(array); deepEqual(actual.sort(), ['0', 'a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of arrays', 1, function() { Array.prototype.a = 1; var expected = isKeys ? ['0'] : ['0', 'a'], actual = func([1]); deepEqual(actual.sort(), expected); delete Array.prototype.a; }); test('`_.' + methodName + '` should work with `arguments` objects (test in IE < 9)', 1, function() { if (!isPhantom) { var actual = func(args); deepEqual(actual.sort(), ['0', '1', '2']); } else { skipTest(); } }); test('`_.' + methodName + '` should custom properties on `arguments` objects', 1, function() { if (!isPhantom) { args.a = 1; var actual = func(args); deepEqual(actual.sort(), ['0', '1', '2', 'a']); delete args.a; } else { skipTest(); } }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of `arguments` objects', 1, function() { if (!isPhantom) { Object.prototype.a = 1; var expected = isKeys ? ['0', '1', '2'] : ['0', '1', '2', 'a'], actual = func(args); deepEqual(actual.sort(), expected); delete Object.prototype.a; } else { skipTest(); } }); test('`_.' + methodName + '` should work with string objects (test in IE < 9)', 1, function() { var actual = func(Object('abc')); deepEqual(actual.sort(), ['0', '1', '2']); }); test('`_.' + methodName + '` should custom properties on string objects', 1, function() { var object = Object('a'); object.a = 1; var actual = func(object); deepEqual(actual.sort(), ['0', 'a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of string objects', 1, function() { String.prototype.a = 1; var expected = isKeys ? ['0'] : ['0', 'a'], actual = func(Object('a')); deepEqual(actual.sort(), expected); delete String.prototype.a; }); test('`_.' + methodName + '` fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { var actual = func(shadowedObject); deepEqual(actual.sort(), shadowedProps); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.a = 1; Foo.b = 2; Foo.prototype.c = 3; var expected = ['a', 'b'], actual = func(Foo); deepEqual(actual.sort(), expected); Foo.prototype = { 'c': 3 }; actual = func(Foo); deepEqual(actual.sort(), expected); }); test('`_.' + methodName + '` skips the `constructor` property on prototype objects', 2, function() { function Foo() {} Foo.prototype.a = 1; var expected = ['a']; deepEqual(func(Foo.prototype), ['a']); Foo.prototype = { 'constructor': Foo, 'a': 1 }; deepEqual(func(Foo.prototype), ['a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties', 1, function() { function Foo() { this.a = 1; this.b = 2; } Foo.prototype.c = 3; var expected = isKeys ? ['a', 'b'] : ['a', 'b', 'c'], actual = func(new Foo); deepEqual(actual.sort(), expected); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.last'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; test('should return the last element', 1, function() { strictEqual(_.last(array), 3); }); test('should return the last two elements', 1, function() { deepEqual(_.last(array, 2), [2, 3]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 3 : []; }); var actual = _.map(falsey, function(n) { return _.last(array, n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.last(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.last(array, n), array); }); }); test('should return `undefined` when querying empty arrays', 1, function() { strictEqual(_.last([]), undefined); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.last); deepEqual(actual, [3, 6, 9]); }); test('should work with a callback', 1, function() { var actual = _.last(array, function(num) { return num > 1; }); deepEqual(actual, [2, 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.last(array, function() { args = slice.call(arguments); }); deepEqual(args, [3, 2, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.last(array, function(num, index) { return this[index] > 1; }, array); deepEqual(actual, [2, 3]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.last(objects, { 'b': 2 }), objects.slice(-1)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.last(objects, 'b'), objects.slice(-2)); }); test('should chain when passing `n`, `callback`, or `thisArg`', 3, function() { if (!isNpm) { var actual = _(array).last(2); ok(actual instanceof _); actual = _(array).last(function(num) { return num > 1; }); ok(actual instanceof _); actual = _(array).last(function(num, index) { return this[index] > 1; }, array); ok(actual instanceof _); } else { skipTest(3); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).last(); strictEqual(actual, 3); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.lastIndexOf'); (function() { var array = [1, 2, 3, 1, 2, 3]; test('should return the index of the last matched value', 1, function() { strictEqual(_.lastIndexOf(array, 3), 5); }); test('should return `-1` for an unmatched value', 1, function() { strictEqual(_.lastIndexOf(array, 4), -1); }); test('should work with a positive `fromIndex`', 1, function() { strictEqual(_.lastIndexOf(array, 1, 2), 0); }); test('should work with `fromIndex` >= `array.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.lastIndexOf(array, undefined, fromIndex), -1); strictEqual(_.lastIndexOf(array, 1, fromIndex), 3); strictEqual(_.lastIndexOf(array, '', fromIndex), -1); }); }); test('should treat falsey `fromIndex` values, except `0` and `NaN`, as `array.length`', 1, function() { var expected = _.map(falsey, function(value) { return typeof value == 'number' ? -1 : 5; }); var actual = _.map(falsey, function(fromIndex) { return _.lastIndexOf(array, 3, fromIndex); }); deepEqual(actual, expected); }); test('should treat non-number `fromIndex` values as `array.length`', 2, function() { strictEqual(_.lastIndexOf(array, 3, '1'), 5); strictEqual(_.lastIndexOf(array, 3, true), 5); }); test('should work with a negative `fromIndex`', 1, function() { strictEqual(_.lastIndexOf(array, 2, -3), 1); }); test('should work with a negative `fromIndex` <= `-array.length`', 3, function() { _.each([-6, -8, -Infinity], function(fromIndex) { strictEqual(_.lastIndexOf(array, 1, fromIndex), 0); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('indexOf methods'); (function() { _.each(['indexOf', 'lastIndexOf'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant(-1)); var actual = _.map(falsey, function(value, index) { try { return index ? func(value) : func(); } catch(e) { } }); deepEqual(actual, expected); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.map'); (function() { var array = [1, 2, 3]; test('should pass the correct `callback` arguments', 1, function() { var args; _.map(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 2, function() { function callback(num, index) { return this[index] + num; } var actual = _.map([1], callback, [2]); deepEqual(actual, [3]); actual = _.map({ 'a': 1 }, callback, { 'a': 2 }); deepEqual(actual, [3]); }); test('should iterate over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var actual = _.map(new Foo, function(value, key) { return key; }); deepEqual(actual, ['a']); }); test('should work on an object with no `callback`', 1, function() { var actual = _.map({ 'a': 1, 'b': 2, 'c': 3 }); deepEqual(actual, array); }); test('should handle object arguments with non-numeric length properties', 1, function() { if (defineProperty) { var object = {}; defineProperty(object, 'length', { 'value': 'x' }); deepEqual(_.map(object, _.identity), []); } else { skipTest(); } }); test('should treat a nodelist as an array-like object', 1, function() { if (document) { var actual = _.map(document.getElementsByTagName('body'), function(element) { return element.nodeName.toLowerCase(); }); deepEqual(actual, ['body']); } else { skipTest(); } }); test('should accept a falsey `collection` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.map(value) : _.map(); } catch(e) { } }); deepEqual(actual, expected); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.map(1), []); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { ok(_(array).map(_.noop) instanceof _); } else { skipTest(); } }); test('should be aliased', 1, function() { strictEqual(_.collect, _.map); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.mapValues'); (function() { var object = { 'a': 1, 'b': 2, 'c': 3 }; test('should pass the correct `callback` arguments', 1, function() { var args; _.mapValues(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 'a', object]); }); test('should support the `thisArg` argument', 2, function() { function callback(num, key) { return this[key] + num; } var actual = _.mapValues({ 'a': 1 }, callback, { 'a': 2 }); deepEqual(actual, { 'a': 3 }); actual = _.mapValues([1], callback, [2]); deepEqual(actual, { '0': 3 }); }); test('should iterate over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var actual = _.mapValues(new Foo, function(value, key) { return key; }); deepEqual(actual, { 'a': 'a' }); }); test('should work on an object with no `callback`', 1, function() { var actual = _.mapValues({ 'a': 1, 'b': 2, 'c': 3 }); deepEqual(actual, object); }); test('should accept a falsey `object` argument', 1, function() { var expected = _.map(falsey, _.constant({})); var actual = _.map(falsey, function(value, index) { try { return index ? _.mapValues(value) : _.mapValues(); } catch(e) { } }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { ok(_(object).mapValues(_.noop) instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.matches'); (function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, sources = [{ 'a': 1 }, { 'a': 1, 'c': 3 }]; test('should create a function that performs a deep comparison between a given object and the `source` object', 6, function() { _.each(sources, function(source, index) { var matches = _.matches(source); strictEqual(matches.length, 1); strictEqual(matches(object), true); matches = _.matches(index ? { 'c': 3, 'd': 4 } : { 'b': 1 }); strictEqual(matches(object), false); }); }); test('should return `true` when comparing an empty `source`', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { var matches = _.matches(value); return matches(object) === true; }); deepEqual(actual, expected); }); test('should not error error for falsey `object` values', 2, function() { var expected = _.map(falsey, _.constant(true)); _.each(sources, function(source) { var matches = _.matches(source); var actual = _.map(falsey, function(value, index) { try { var result = index ? matches(value) : matches(); return result === false; } catch(e) { } }); deepEqual(actual, expected); }); }); test('should return `true` when comparing an empty `source` to a falsey `object`', 1, function() { var expected = _.map(falsey, _.constant(true)), matches = _.matches({}); var actual = _.map(falsey, function(value, index) { try { var result = index ? matches(value) : matches(); return result === true; } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.max'); (function() { test('should return the largest value from a collection', 1, function() { strictEqual(3, _.max([1, 2, 3])); }); test('should return `-Infinity` for empty collections', 1, function() { var expected = _.map(empties, function() { return -Infinity; }); var actual = _.map(empties, function(value) { try { return _.max(value); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `-Infinity` for non-numeric collection values', 1, function() { var collections = [['a', 'b'], { 'a': 'a', 'b': 'b' }], expected = _.map(collections, function() { return -Infinity; }); var actual = _.map(collections, function(value) { try { return _.max(value); } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.memoize'); (function() { test('should memoize results based on the first argument provided', 2, function() { var memoized = _.memoize(function(a, b, c) { return a + b + c; }); strictEqual(memoized(1, 2, 3), 6); strictEqual(memoized(1, 3, 5), 6); }); test('should support a `resolver` argument', 2, function() { var fn = function(a, b, c) { return a + b + c; }, memoized = _.memoize(fn, fn); strictEqual(memoized(1, 2, 3), 6); strictEqual(memoized(1, 3, 5), 9); }); test('should not set a `this` binding', 2, function() { var memoized = _.memoize(function(a, b, c) { return a + this.b + this.c; }); var object = { 'b': 2, 'c': 3, 'memoized': memoized }; strictEqual(object.memoized(1), 6); strictEqual(object.memoized(2), 7); }); test('should throw a TypeError if `resolve` is truthy and not a function', function() { raises(function() { _.memoize(_.noop, {}); }, TypeError); }); test('should not throw a TypeError if `resolve` is falsey', function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(value, index) { try { return _.isFunction(index ? _.memoize(_.noop, value) : _.memoize(_.noop)); } catch(e) { } }); deepEqual(actual, expected); }); test('should check cache for own properties', 1, function() { var actual = [], memoized = _.memoize(_.identity); _.each(shadowedProps, function(value) { actual.push(memoized(value)); }); deepEqual(actual, shadowedProps); }); test('should expose a `cache` object on the `memoized` function', 4, function() { _.times(2, function(index) { var resolver = index && _.identity, memoized = _.memoize(_.identity, resolver); memoized('a'); strictEqual(memoized.cache.a, 'a'); memoized.cache.a = 'b'; strictEqual(memoized('a'), 'b'); }); }); test('should skip the `__proto__` key', 4, function() { _.times(2, function(index) { var count = 0, resolver = index && _.identity; var memoized = _.memoize(function() { count++; return []; }, resolver); memoized('__proto__'); memoized('__proto__'); strictEqual(count, 2); ok(!(memoized.cache instanceof Array)); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.merge'); (function() { var args = arguments; test('should merge `source` into the destination object', 1, function() { var names = { 'characters': [ { 'name': 'barney' }, { 'name': 'fred' } ] }; var ages = { 'characters': [ { 'age': 36 }, { 'age': 40 } ] }; var heights = { 'characters': [ { 'height': '5\'4"' }, { 'height': '5\'5"' } ] }; var expected = { 'characters': [ { 'name': 'barney', 'age': 36, 'height': '5\'4"' }, { 'name': 'fred', 'age': 40, 'height': '5\'5"' } ] }; deepEqual(_.merge(names, ages, heights), expected); }); test('should merge sources containing circular references', 1, function() { var object = { 'foo': { 'a': 1 }, 'bar': { 'a': 2 } }; var source = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { } }; source.foo.b.foo.c = source; source.bar.b = source.foo.b; var actual = _.merge(object, source); ok(actual.bar.b === actual.foo.b && actual.foo.b.foo.c === actual.foo.b.foo.c.foo.b.foo.c); }); test('should not treat `arguments` objects as plain objects', 1, function() { var object = { 'args': args }; var source = { 'args': { '3': 4 } }; var actual = _.merge(object, source); strictEqual(_.isArguments(actual.args), false); }); test('should work with four arguments', 1, function() { var expected = { 'a': 4 }; deepEqual(_.merge({ 'a': 1 }, { 'a': 2 }, { 'a': 3 }, expected), expected); }); test('should assign `null` values', 1, function() { var actual = _.merge({ 'a': 1 }, { 'a': null }); strictEqual(actual.a, null); }); test('should not assign `undefined` values', 1, function() { var actual = _.merge({ 'a': 1 }, { 'a': undefined }); strictEqual(actual.a, 1); }); test('should handle merging if `callback` returns `undefined`', 1, function() { var actual = _.merge({ 'a': { 'b': [1, 1] } }, { 'a': { 'b': [0] } }, function() {}); deepEqual(actual, { 'a': { 'b': [0, 1] } }); }); test('should defer to `callback` when it returns a value other than `undefined`', 1, function() { var actual = _.merge({ 'a': { 'b': [0, 1] } }, { 'a': { 'b': [2] } }, function(a, b) { return _.isArray(a) ? a.concat(b) : undefined; }); deepEqual(actual, { 'a': { 'b': [0, 1, 2] } }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.min'); (function() { test('should return the smallest value from a collection', 1, function() { strictEqual(1, _.min([1, 2, 3])); }); test('should return `Infinity` for empty collections', 1, function() { var expected = _.map(empties, function() { return Infinity; }); var actual = _.map(empties, function(value) { try { return _.min(value); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `Infinity` for non-numeric collection values', 1, function() { var collections = [['a', 'b'], { 'a': 'a', 'b': 'b' }], expected = _.map(collections, function() { return Infinity; }); var actual = _.map(collections, function(value) { try { return _.min(value); } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.max and lodash.min'); _.each(['max', 'min'], function(methodName) { var array = [1, 2, 3], func = _[methodName], isMax = methodName == 'max'; test('`_.' + methodName + '` should work with Date objects', 1, function() { var now = new Date, past = new Date(0); strictEqual(func([now, past]), isMax ? now : past); }); test('`_.' + methodName + '` should work with a callback argument', 1, function() { var actual = func(array, function(num) { return -num; }); strictEqual(actual, isMax ? 1 : 3); }); test('`_.' + methodName + '` should pass the correct `callback` arguments when iterating an array', 1, function() { var args; func(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('`_.' + methodName + '` should pass the correct `callback` arguments when iterating an object', 1, function() { var args, object = { 'a': 1, 'b': 2 }, firstKey = _.first(_.keys(object)); var expected = firstKey == 'a' ? [1, 'a', object] : [2, 'b', object]; func(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func(array, function(num, index) { return -this[index]; }, array); strictEqual(actual, isMax ? 1 : 3); }); test('`_.' + methodName + '` should work when used as a callback for `_.map`', 1, function() { var array = [[2, 3, 1], [5, 6, 4], [8, 9, 7]], actual = _.map(array, func); deepEqual(actual, isMax ? [3, 6, 9] : [1, 4, 7]); }); test('`_.' + methodName + '` should iterate an object', 1, function() { var actual = func({ 'a': 1, 'b': 2, 'c': 3 }); strictEqual(actual, isMax ? 3 : 1); }); test('`_.' + methodName + '` should iterate a string', 2, function() { _.each(['abc', Object('abc')], function(value) { var actual = func(value); strictEqual(actual, isMax ? 'c' : 'a'); }); }); test('`_.' + methodName + '` should work when `callback` returns +/-Infinity', 1, function() { var object = { 'a': (isMax ? -Infinity : Infinity) }; var actual = func([object, { 'a': object.a }], function(object) { return object.a; }); strictEqual(actual, object); }); test('`_.' + methodName + '` should work with extremely large arrays', 1, function() { var array = _.range(0, 5e5); strictEqual(func(array), isMax ? 499999 : 0); }); test('`_.' + methodName + '` should work when chaining on an array with only one value', 1, function() { if (!isNpm) { var actual = _([40])[methodName]().value(); strictEqual(actual, 40); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.mixin'); (function() { function wrapper(value) { if (!(this instanceof wrapper)) { return new wrapper(value); } this.__wrapped__ = value; } var value = ['a'], source = { 'a': function(array) { return array[0]; }, 'b': 'B' }; test('should mixin `source` methods into lodash', 4, function() { _.mixin(source); strictEqual(_.a(value), 'a'); strictEqual(_(value).a().__wrapped__, 'a'); delete _.a; delete _.prototype.a; ok(!('b' in _)); ok(!('b' in _.prototype)); delete _.b; delete _.prototype.b; }); test('should use `this` as the default `object` value', 3, function() { var object = _.create(_); object.mixin(source); strictEqual(object.a(value), 'a'); ok(!('a' in _)); ok(!('a' in _.prototype)); delete wrapper.a; delete wrapper.prototype.a; delete wrapper.b; delete wrapper.prototype.b; }); test('should accept an `object` argument', 1, function() { var object = {}; _.mixin(object, source); strictEqual(object.a(value), 'a'); }); test('should return `object`', 2, function() { var object = {}; strictEqual(_.mixin(object, source), object); strictEqual(_.mixin(), _); }); test('should work with a function for `object`', 2, function() { _.mixin(wrapper, source); var wrapped = wrapper(value), actual = wrapped.a(); strictEqual(actual.__wrapped__, 'a'); ok(actual instanceof wrapper); delete wrapper.a; delete wrapper.prototype.a; delete wrapper.b; delete wrapper.prototype.b; }); test('should not assign inherited `source` properties', 1, function() { function Foo() {} Foo.prototype = { 'a': _.noop }; deepEqual(_.mixin({}, new Foo, {}), {}); }); test('should accept an `options` argument', 16, function() { function message(func, chain) { return (func === _ ? 'lodash' : 'provided') + ' function should ' + (chain ? '' : 'not ') + 'chain'; } _.each([_, wrapper], function(func) { _.each([false, true, { 'chain': false }, { 'chain': true }], function(options) { if (func === _) { _.mixin(source, options); } else { _.mixin(func, source, options); } var wrapped = func(value), actual = wrapped.a(); if (options === true || (options && options.chain)) { strictEqual(actual.__wrapped__, 'a', message(func, true)); ok(actual instanceof func, message(func, true)); } else { strictEqual(actual, 'a', message(func, false)); ok(!(actual instanceof func), message(func, false)); } delete func.a; delete func.prototype.a; delete func.b; delete func.prototype.b; }); }); }); test('should not error for non-object `options` values', 2, function() { var pass = true; try { _.mixin({}, source, 1); } catch(e) { pass = false; } ok(pass); pass = true; try { _.mixin(source, 1); } catch(e) { pass = false; } delete _.a; delete _.prototype.a; delete _.b; delete _.prototype.b; ok(pass); }); test('should return the existing wrapper when chaining', 2, function() { if (!isNpm) { _.each([_, wrapper], function(func) { if (func === _) { var wrapper = _(source), actual = wrapper.mixin(); strictEqual(actual.value(), _); } else { wrapper = _(func); actual = wrapper.mixin(source); strictEqual(actual, wrapper); } delete func.a; delete func.prototype.a; delete func.b; delete func.prototype.b; }); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.noop'); (function() { test('should always return `undefined`', 1, function() { var values = falsey.concat([], true, new Date, _, {}, /x/, 'a'), expected = _.map(values, _.constant()); var actual = _.map(values, function(value, index) { return index ? _.noop(value) : _.noop(); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.now'); (function() { asyncTest('should return the number of milliseconds that have elapsed since the Unix epoch', 2, function() { var stamp = +new Date, actual = _.now(); ok(actual >= stamp); if (!(isRhino && isModularize)) { setTimeout(function() { ok(_.now() > actual); QUnit.start(); }, 32); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.omit'); (function() { var args = arguments, object = { 'a': 1, 'b': 2, 'c': 3 }, expected = { 'b': 2 }; test('should create an object with omitted properties', 2, function() { deepEqual(_.omit(object, 'a'), { 'b': 2, 'c': 3 }); deepEqual(_.omit(object, 'a', 'c'), expected); }); test('should support picking an array of properties', 1, function() { deepEqual(_.omit(object, ['a', 'c']), expected); }); test('should support picking an array of properties and individual properties', 1, function() { deepEqual(_.omit(object, ['a'], 'c'), expected); }); test('should iterate over inherited properties', 1, function() { function Foo() {} Foo.prototype = object; deepEqual(_.omit(new Foo, 'a', 'c'), expected); }); test('should work with `arguments` objects as secondary arguments', 1, function() { deepEqual(_.omit(object, args), expected); }); test('should work with an array `object` argument', 1, function() { deepEqual(_.omit([1, 2, 3], '0', '2'), { '1': 2 }); }); test('should work with a callback argument', 1, function() { var actual = _.omit(object, function(num) { return num != 2; }); deepEqual(actual, expected); }); test('should pass the correct `callback` arguments', 1, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.keys(object).pop(); var expected = lastKey == 'b' ? [1, 'a', object] : [2, 'b', object]; _.omit(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.omit(object, function(num) { return num != this.b; }, { 'b': 2 }); deepEqual(actual, expected); }); test('should coerce property names to strings', 1, function() { deepEqual(_.omit({ '0': 'a' }, 0), {}); }); }('a', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.once'); (function() { test('should execute `func` once', 1, function() { var count = 0, once = _.once(function() { count++; }); once(); once(); strictEqual(count, 1); }); test('should not set a `this` binding', 1, function() { var once = _.once(function() { this.count++; }), object = { 'count': 0, 'once': once }; object.once(); object.once(); strictEqual(object.count, 1); }); test('should ignore recursive calls', 1, function() { var count = 0; var once = _.once(function() { count++; once(); }); once(); strictEqual(count, 1); }); test('should not throw more than once', 2, function() { var once = _.once(function() { throw new Error; }); raises(function() { once(); }, Error); var pass = true; try { once(); } catch(e) { pass = false; } ok(pass); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pad'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.pad('abc', 9), ' abc '); }); test('should truncate pad characters to fit the pad length', 2, function() { strictEqual(_.pad('abc', 8), ' abc '); strictEqual(_.pad('abc', 8, '_-'), '_-abc_-_'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.pad(Object('abc'), 4), 'abc '); strictEqual(_.pad({ 'toString': _.constant('abc') }, 5), ' abc '); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.padLeft'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.padLeft('abc', 6), ' abc'); }); test('should truncate pad characters to fit the pad length', 1, function() { strictEqual(_.padLeft('abc', 6, '_-'), '_-_abc'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.padLeft(Object('abc'), 4), ' abc'); strictEqual(_.padLeft({ 'toString': _.constant('abc') }, 5), ' abc'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.padRight'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.padRight('abc', 6), 'abc '); }); test('should truncate pad characters to fit the pad length', 1, function() { strictEqual(_.padRight('abc', 6, '_-'), 'abc_-_'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.padRight(Object('abc'), 4), 'abc '); strictEqual(_.padRight({ 'toString': _.constant('abc') }, 5), 'abc '); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('pad methods'); _.each(['pad', 'padLeft', 'padRight'], function(methodName, index) { var func = _[methodName]; test('`_.' + methodName + '` should not pad is string is >= `length`', 2, function() { strictEqual(func('abc', 2), 'abc'); strictEqual(func('abc', 3), 'abc'); }); test('`_.' + methodName + '` should treat negative `length` as `0`', 2, function() { _.each([0, -2], function(length) { strictEqual(func('abc', length), 'abc'); }); }); test('`_.' + methodName + '` should coerce `length` to a number', 2, function() { _.each(['', '4'], function(length) { var actual = length ? (index == 1 ? ' abc' : 'abc ') : 'abc'; strictEqual(func('abc', length), actual); }); }); test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string and `chars`', 6, function() { _.each([null, '_-'], function(chars) { strictEqual(func(null, 0, chars), ''); strictEqual(func(undefined, 0, chars), ''); strictEqual(func('', 0, chars), ''); }); }); test('`_.' + methodName + '` should work with `null`, `undefined`, or empty string for `chars`', 3, function() { notStrictEqual(func('abc', 6, null), 'abc'); notStrictEqual(func('abc', 6, undefined), 'abc'); strictEqual(func('abc', 6, ''), 'abc'); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pairs'); (function() { test('should create a two dimensional array of an object\'s key-value pairs', 1, function() { var object = { 'a': 1, 'b': 2 }; deepEqual(_.pairs(object), [['a', 1], ['b', 2]]); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.pairs(object), [['0', 'a'], ['1', 'b'], ['length', 2]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.parseInt'); (function() { test('should accept a `radix` argument', 1, function() { var expected = _.range(2, 37); var actual = _.map(expected, function(radix) { return _.parseInt('10', radix); }); deepEqual(actual, expected); }); test('should use a radix of `10`, for non-hexadecimals, if `radix` is `undefined` or `0`', 4, function() { strictEqual(_.parseInt('10'), 10); strictEqual(_.parseInt('10', 0), 10); strictEqual(_.parseInt('10', 10), 10); strictEqual(_.parseInt('10', undefined), 10); }); test('should use a radix of `16`, for hexadecimals, if `radix` is `undefined` or `0`', 8, function() { _.each(['0x20', '0X20'], function(string) { strictEqual(_.parseInt(string), 32); strictEqual(_.parseInt(string, 0), 32); strictEqual(_.parseInt(string, 16), 32); strictEqual(_.parseInt(string, undefined), 32); }); }); test('should use a radix of `10` for string with leading zeros', 2, function() { strictEqual(_.parseInt('08'), 8); strictEqual(_.parseInt('08', 10), 8); }); test('should parse strings with leading whitespace (test in Chrome, Firefox, and Opera)', 8, function() { strictEqual(_.parseInt(whitespace + '10'), 10); strictEqual(_.parseInt(whitespace + '10', 10), 10); strictEqual(_.parseInt(whitespace + '08'), 8); strictEqual(_.parseInt(whitespace + '08', 10), 8); _.each(['0x20', '0X20'], function(string) { strictEqual(_.parseInt(whitespace + string), 32); strictEqual(_.parseInt(whitespace + string, 16), 32); }); }); test('should coerce `radix` to a number', 2, function() { var object = { 'valueOf': function() { return 0; } }; strictEqual(_.parseInt('08', object), 8); strictEqual(_.parseInt('0x20', object), 32); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('partial methods'); _.each(['partial', 'partialRight'], function(methodName) { var func = _[methodName], isPartial = methodName == 'partial'; test('`_.' + methodName + '` partially applies arguments', 1, function() { var par = func(_.identity, 'a'); strictEqual(par(), 'a'); }); test('`_.' + methodName + '` creates a function that can be invoked with additional arguments', 1, function() { var fn = function(a, b) { return [a, b]; }, expected = ['a', 'b'], par = func(fn, 'a'); deepEqual(par('b'), isPartial ? expected : expected.reverse()); }); test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked without additional arguments', 1, function() { var fn = function() { return arguments.length; }, par = func(fn); strictEqual(par(), 0); }); test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked with additional arguments', 1, function() { var par = func(_.identity); strictEqual(par('a'), 'a'); }); test('`_.' + methodName + '` should support placeholders', 4, function() { if (!isModularize) { var fn = function() { return slice.call(arguments); }, par = func(fn, _, 'b', _); deepEqual(par('a', 'c'), ['a', 'b', 'c']); deepEqual(par('a'), ['a', 'b', undefined]); deepEqual(par(), [undefined, 'b', undefined]); if (isPartial) { deepEqual(par('a', 'c', 'd'), ['a', 'b', 'c', 'd']); } else { par = func(fn, _, 'c', _); deepEqual(par('a', 'b', 'd'), ['a', 'b', 'c', 'd']); } } else { skipTest(4); } }); test('`_.' + methodName + '` should not alter the `this` binding', 3, function() { var fn = function() { return this.a; }, object = { 'a': 1 }; var par = func(_.bind(fn, object)); strictEqual(par(), object.a); par = _.bind(func(fn), object); strictEqual(par(), object.a); object.par = func(fn); strictEqual(object.par(), object.a); }); test('`_.' + methodName + '` creates a function with a `length` of `0`', 1, function() { var fn = function(a, b, c) {}, par = func(fn, 'a'); strictEqual(par.length, 0); }); test('`_.' + methodName + '` ensure `new partialed` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var object = {}, par = func(Foo); ok(new par instanceof Foo); strictEqual(new par(true), object); }); test('`_.' + methodName + '` should clone metadata for created functions', 3, function() { var greet = function(greeting, name) { return greeting + ' ' + name; }; var par1 = func(greet, 'hi'), par2 = func(par1, 'barney'), par3 = func(par1, 'pebbles'); strictEqual(par1('fred'), isPartial ? 'hi fred' : 'fred hi') strictEqual(par2(), isPartial ? 'hi barney' : 'barney hi'); strictEqual(par3(), isPartial ? 'hi pebbles' : 'pebbles hi'); }); test('`_.' + methodName + '` should work with curried methods', 2, function() { var fn = function(a, b, c) { return a + b + c; }, curried = _.curry(func(fn, 1), 2); strictEqual(curried(2, 3), 6); strictEqual(curried(2)(3), 6); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.partialRight'); (function() { test('should work as a deep `_.defaults`', 1, function() { var object = { 'a': { 'b': 1 } }, source = { 'a': { 'b': 2, 'c': 3 } }, expected = { 'a': { 'b': 1, 'c': 3 } }; var defaultsDeep = _.partialRight(_.merge, function deep(value, other) { return _.merge(value, other, deep); }); deepEqual(defaultsDeep(object, source), expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('methods using `createWrapper`'); (function() { test('combinations of partial functions should work', 1, function() { function fn() { return slice.call(arguments); } var a = _.partial(fn), b = _.partialRight(a, 3), c = _.partial(b, 1); deepEqual(c(2), [1, 2, 3]); }); test('combinations of bound and partial functions should work', 3, function() { function fn() { var result = [this.a]; push.apply(result, arguments); return result; } var expected = [1, 2, 3, 4], object = { 'a': 1, 'fn': fn }; var a = _.bindKey(object, 'fn'), b = _.partialRight(a, 4), c = _.partial(b, 2); deepEqual(c(3), expected); a = _.bind(fn, object); b = _.partialRight(a, 4); c = _.partial(b, 2); deepEqual(c(3), expected); a = _.partial(fn, 2); b = _.bind(a, object); c = _.partialRight(b, 4); deepEqual(c(3), expected); }); test('recursively bound functions should work', 1, function() { function fn() { return this.a; } var a = _.bind(fn, { 'a': 1 }), b = _.bind(a, { 'a': 2 }), c = _.bind(b, { 'a': 3 }); strictEqual(c(), 1); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.partition'); (function() { var array = [1, 0, 1]; test('should always return two groups of elements', 3, function() { deepEqual(_.partition([], _.identity), [[], []]); deepEqual(_.partition(array, _.constant(true)), [array, []]); deepEqual(_.partition(array, _.constant(false)), [[], array]); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.partition(array); deepEqual(actual, [[1, 1], [0]]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.partition(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.partition([1.1, 0.2, 1.3], function(num) { return this.floor(num); }, Math); deepEqual(actual, [[1.1, 1.3], [0.2]]); }); test('should work with an object for `collection`', 1, function() { var actual = _.partition({ 'a': 1.1, 'b': 0.2, 'c': 1.3 }, function(num) { return Math.floor(num); }); deepEqual(actual, [[1.1, 1.3], [0.2]]); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 0], [0, 1], [1, 0] ]; deepEqual(_.partition(array, 0), [[array[0], array[2]], [array[1]]]); deepEqual(_.partition(array, 1), [[array[1]], [array[0], array[2]]]); }); test('should work with a string for `callback`', 1, function() { var objects = [{ 'a': 1 }, { 'a': 1 }, { 'b': 2 }], actual = _.partition(objects, 'a'); deepEqual(actual, [objects.slice(0, 2), objects.slice(2)]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pick'); (function() { var args = arguments, object = { 'a': 1, 'b': 2, 'c': 3 }, expected = { 'a': 1, 'c': 3 }; test('should create an object of picked properties', 2, function() { deepEqual(_.pick(object, 'a'), { 'a': 1 }); deepEqual(_.pick(object, 'a', 'c'), expected); }); test('should support picking an array of properties', 1, function() { deepEqual(_.pick(object, ['a', 'c']), expected); }); test('should support picking an array of properties and individual properties', 1, function() { deepEqual(_.pick(object, ['a'], 'c'), expected); }); test('should iterate over inherited properties', 1, function() { function Foo() {} Foo.prototype = object; deepEqual(_.pick(new Foo, 'a', 'c'), expected); }); test('should work with `arguments` objects as secondary arguments', 1, function() { deepEqual(_.pick(object, args), expected); }); test('should work with an array `object` argument', 1, function() { deepEqual(_.pick([1, 2, 3], '1'), { '1': 2 }); }); test('should work with a callback argument', 1, function() { var actual = _.pick(object, function(num) { return num != 2; }); deepEqual(actual, expected); }); test('should pass the correct `callback` arguments', 1, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.keys(object).pop(); var expected = lastKey == 'b' ? [1, 'a', object] : [2, 'b', object]; _.pick(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.pick(object, function(num) { return num != this.b; }, { 'b': 2 }); deepEqual(actual, expected); }); test('should coerce property names to strings', 1, function() { deepEqual(_.pick({ '0': 'a', '1': 'b' }, 0), { '0': 'a' }); }); }('a', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pluck'); (function() { test('should return an array of property values from each element of a collection', 1, function() { var objects = [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }], actual = _.pluck(objects, 'name'); deepEqual(actual, ['barney', 'fred']); }); test('should work with an object for `collection`', 1, function() { var object = { 'a': [1], 'b': [1, 2], 'c': [1, 2, 3] }; deepEqual(_.pluck(object, 'length'), [1, 2, 3]); }); test('should work with nullish elements', 1, function() { var objects = [{ 'a': 1 }, null, undefined, { 'a': 4 }]; deepEqual(_.pluck(objects, 'a'), [1, undefined, undefined, 4]); }); test('should coerce `key` to a string', 1, function() { function fn() {} fn.toString = _.constant('fn'); var objects = [{ 'null': 1 }, { 'undefined': 2 }, { 'fn': 3 }, { '[object Object]': 4 }], values = [null, undefined, fn, {}] var actual = _.map(objects, function(object, index) { return _.pluck([object], values[index]); }); deepEqual(actual, [[1], [2], [3], [4]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.property'); (function() { test('should create a function that plucks a property value of a given object', 3, function() { var object = { 'a': 1, 'b': 2 }, property = _.property('a'); strictEqual(property.length, 1); strictEqual(property(object), 1); property = _.property('b'); strictEqual(property(object), 2); }); test('should work with non-string `prop` arguments', 1, function() { var array = [1, 2, 3], property = _.property(1); strictEqual(property(array), 2); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pull'); (function() { test('should modify and return the array', 2, function() { var array = [1, 2, 3], actual = _.pull(array, 1, 3); deepEqual(array, [2]); ok(actual === array); }); test('should preserve holes in arrays', 2, function() { var array = [1, 2, 3, 4]; delete array[1]; delete array[3]; _.pull(array, 1); ok(!('0' in array)); ok(!('2' in array)); }); test('should treat holes as `undefined`', 1, function() { var array = [1, 2, 3]; delete array[1]; _.pull(array, undefined); deepEqual(array, [1, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pullAt'); (function() { test('should modify the array and return removed elements', 2, function() { var array = [1, 2, 3], actual = _.pullAt(array, [0, 1]); deepEqual(array, [3]); deepEqual(actual, [1, 2]); }); test('should work with unsorted indexes', 2, function() { var array = [1, 2, 3, 4], actual = _.pullAt(array, [1, 3, 0]); deepEqual(array, [3]); deepEqual(actual, [2, 4, 1]); }); test('should work with repeated indexes', 2, function() { var array = [1, 2, 3, 4], actual = _.pullAt(array, [0, 2, 0, 1, 0, 2]); deepEqual(array, [4]); deepEqual(actual, [1, 3, 1, 2, 1, 3]); }); test('should return `undefined` for nonexistent keys', 2, function() { var array = ['a', 'b', 'c'], actual = _.pullAt(array, [2, 4, 0]); deepEqual(array, ['b']); deepEqual(actual, ['c', undefined, 'a']); }); test('should return an empty array when no keys are provided', 2, function() { var array = ['a', 'b', 'c'], actual = _.pullAt(array); deepEqual(array, ['a', 'b', 'c']); deepEqual(actual, []); }); test('should accept multiple index arguments', 2, function() { var array = ['a', 'b', 'c', 'd'], actual = _.pullAt(array, 3, 0, 2); deepEqual(array, ['b']); deepEqual(actual, ['d', 'a', 'c']); }); test('should ignore non-index values', 2, function() { var array = ['a', 'b', 'c'], clone = array.slice(); var values = _.reject(empties, function(value) { return value === 0 || _.isArray(value); }).concat(-1, 1.1); var expected = _.map(values, _.constant(undefined)), actual = _.pullAt.apply(_, [array].concat(values)); deepEqual(actual, expected); deepEqual(array, clone); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.random'); (function() { var array = Array(1000); test('should return `0` or `1` when arguments are not provided', 1, function() { var actual = _.map(array, function() { return _.random(); }); deepEqual(_.uniq(actual).sort(), [0, 1]); }); test('supports not passing a `max` argument', 1, function() { ok(_.some(array, function() { return _.random(5) !== 5; })); }); test('supports large integer values', 2, function() { var min = Math.pow(2, 31), max = Math.pow(2, 62); ok(_.every(array, function() { return _.random(min, max) >= min; })); ok(_.some(array, function() { return _.random(Number.MAX_VALUE) > 0; })); }); test('should coerce arguments to numbers', 1, function() { strictEqual(_.random('1', '1'), 1); }); test('should support floats', 2, function() { var min = 1.5, max = 1.6, actual = _.random(min, max); ok(actual % 1); ok(actual >= min && actual <= max); }); test('supports passing a `floating` argument', 3, function() { var actual = _.random(true); ok(actual % 1 && actual >= 0 && actual <= 1); actual = _.random(2, true); ok(actual % 1 && actual >= 0 && actual <= 2); actual = _.random(2, 4, true); ok(actual % 1 && actual >= 2 && actual <= 4); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.range'); (function() { test('should work when passing a single `end` argument', 1, function() { deepEqual(_.range(4), [0, 1, 2, 3]); }); test('should work when passing `start` and `end` arguments', 1, function() { deepEqual(_.range(1, 5), [1, 2, 3, 4]); }); test('should work when passing `start`, `end`, and `step` arguments', 1, function() { deepEqual(_.range(0, 20, 5), [0, 5, 10, 15]); }); test('should support a `step` of `0`', 1, function() { deepEqual(_.range(1, 4, 0), [1, 1, 1]); }); test('should work when passing `step` larger than `end`', 1, function() { deepEqual(_.range(1, 5, 20), [1]); }); test('should work when passing a negative `step` argument', 2, function() { deepEqual(_.range(0, -4, -1), [0, -1, -2, -3]); deepEqual(_.range(21, 10, -3), [21, 18, 15, 12]); }); test('should treat falsey `start` arguments as `0`', 13, function() { _.each(falsey, function(value, index) { if (index) { deepEqual(_.range(value), []); deepEqual(_.range(value, 1), [0]); } else { deepEqual(_.range(), []); } }); }); test('should coerce arguments to finite numbers', 1, function() { var actual = [_.range('0', 1), _.range('1'), _.range(0, 1, '1'), _.range(NaN), _.range(NaN, NaN)]; deepEqual(actual, [[0], [0], [0], [], []]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reduce'); (function() { var array = [1, 2, 3]; test('should use the first element of a collection as the default `accumulator`', 1, function() { strictEqual(_.reduce(array), 1); }); test('should pass the correct `callback` arguments when iterating an array', 2, function() { var args; _.reduce(array, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, [0, 1, 0, array]); args = null; _.reduce(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 2, 1, array]); }); test('should pass the correct `callback` arguments when iterating an object', 2, function() { var args, object = { 'a': 1, 'b': 2 }, firstKey = _.first(_.keys(object)); var expected = firstKey == 'a' ? [0, 1, 'a', object] : [0, 2, 'b', object]; _.reduce(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); args = null; expected = firstKey == 'a' ? [1, 2, 'b', object] : [2, 1, 'a', object]; _.reduce(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args; var actual = _.reduce(collection, function(accumulator, value) { args || (args = slice.call(arguments)); return accumulator + value; }); deepEqual(args, ['a', 'b', 1, collection]); strictEqual(actual, 'abc'); }); }); test('should be aliased', 2, function() { strictEqual(_.foldl, _.reduce); strictEqual(_.inject, _.reduce); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reduceRight'); (function() { var array = [1, 2, 3]; test('should use the last element of a collection as the default `accumulator`', 1, function() { strictEqual(_.reduceRight(array), 3); }); test('should pass the correct `callback` arguments when iterating an array', 2, function() { var args; _.reduceRight(array, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, [0, 3, 2, array]); args = null; _.reduceRight(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [3, 2, 1, array]); }); test('should pass the correct `callback` arguments when iterating an object', 2, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.last(_.keys(object)); var expected = lastKey == 'b' ? [0, 2, 'b', object] : [0, 1, 'a', object]; _.reduceRight(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); args = null; expected = lastKey == 'b' ? [2, 1, 'a', object] : [1, 2, 'b', object]; _.reduceRight(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args; var actual = _.reduceRight(collection, function(accumulator, value) { args || (args = slice.call(arguments)); return accumulator + value; }); deepEqual(args, ['c', 'b', 1, collection]); strictEqual(actual, 'cba'); }); }); test('should be aliased', 1, function() { strictEqual(_.foldr, _.reduceRight); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('reduce methods'); _.each(['reduce', 'reduceRight'], function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should reduce a collection to a single value', 1, function() { var actual = func(['a', 'b', 'c'], function(accumulator, value) { return accumulator + value; }, ''); strictEqual(actual, methodName == 'reduce' ? 'abc' : 'cba'); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func(array, function(sum, num, index) { return sum + this[index]; }, 0, array); deepEqual(actual, 6); }); test('`_.' + methodName + '` should support empty or falsey collections without an initial `accumulator` value', 1, function() { var actual = [], expected = _.map(empties, _.constant()); _.each(empties, function(value) { try { actual.push(func(value, _.noop)); } catch(e) { } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should support empty or falsey collections with an initial `accumulator` value', 1, function() { var expected = _.map(empties, _.constant('x')); var actual = _.map(empties, function(value) { try { return func(value, _.noop, 'x'); } catch(e) { } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should handle an initial `accumulator` value of `undefined`', 1, function() { var actual = func([], _.noop, undefined); strictEqual(actual, undefined); }); test('`_.' + methodName + '` should return `undefined` for empty collections when no `accumulator` is provided (test in IE > 9 and modern browsers)', 2, function() { var array = [], object = { '0': 1, 'length': 0 }; if ('__proto__' in array) { array.__proto__ = object; strictEqual(_.reduce(array, _.noop), undefined); } else { skipTest(); } strictEqual(_.reduce(object, _.noop), undefined); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(array)[methodName](function(sum, num) { return sum + num; }); strictEqual(actual, 6); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reject'); (function() { test('should return elements the `callback` returns falsey for', 1, function() { var actual = _.reject([1, 2, 3], function(num) { return num % 2; }); deepEqual(actual, [2]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('filter methods'); _.each(['filter', 'reject'], function(methodNames) { var func = _[methodNames]; test('`_.' + methodNames + '` should not modify the resulting value from within `callback`', 1, function() { var actual = func([0], function(num, index, array) { array[index] = 1; return methodNames == 'filter'; }); deepEqual(actual, [0]); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.remove'); (function() { test('should modify the array and return removed elements', 2, function() { var array = [1, 2, 3]; var actual = _.remove(array, function(num) { return num < 3; }); deepEqual(array, [3]); deepEqual(actual, [1, 2]); }); test('should pass the correct `callback` arguments', 1, function() { var args, array = [1, 2, 3]; _.remove(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var array = [1, 2, 3]; var actual = _.remove(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [1, 2]); }); test('should preserve holes in arrays', 2, function() { var array = [1, 2, 3, 4]; delete array[1]; delete array[3]; _.remove(array, function(num) { return num === 1; }); ok(!('0' in array)); ok(!('2' in array)); }); test('should treat holes as `undefined`', 1, function() { var array = [1, 2, 3]; delete array[1]; _.remove(array, function(num) { return num == null; }); deepEqual(array, [1, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.repeat'); (function() { test('should repeat a string `n` times', 2, function() { strictEqual(_.repeat('*', 3), '***'); strictEqual(_.repeat('abc', 2), 'abcabc'); }); test('should return an empty string for negative `n` or `n` of `0`', 2, function() { strictEqual(_.repeat('abc', 0), ''); strictEqual(_.repeat('abc', -2), ''); }); test('should coerce `n` to a number', 3, function() { strictEqual(_.repeat('abc'), ''); strictEqual(_.repeat('abc', '2'), 'abcabc'); strictEqual(_.repeat('*', { 'valueOf': _.constant(3) }), '***'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.repeat(Object('abc'), 2), 'abcabc'); strictEqual(_.repeat({ 'toString': _.constant('*') }, 3), '***'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.result'); (function() { var object = { 'a': 1, 'b': null, 'c': function() { return this.a; } }; test('should resolve property values', 4, function() { strictEqual(_.result(object, 'a'), 1); strictEqual(_.result(object, 'b'), null); strictEqual(_.result(object, 'c'), 1); strictEqual(_.result(object, 'd'), undefined); }); test('should return `undefined` when `object` is nullish', 2, function() { strictEqual(_.result(null, 'a'), undefined); strictEqual(_.result(undefined, 'a'), undefined); }); test('should return the specified default value for undefined properties', 1, function() { var values = falsey.concat(1, _.constant(1)); var expected = _.transform(values, function(result, value) { result.push(value, value); }); var actual = _.transform(values, function(result, value) { result.push( _.result(object, 'd', value), _.result(null, 'd', value) ); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.rest'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 2, 'b': 2 }, { 'a': 1, 'b': 1 }, { 'a': 0, 'b': 0 } ]; test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.rest(value) : _.rest(); } catch(e) { } }); deepEqual(actual, expected); }); test('should exclude the first element', 1, function() { deepEqual(_.rest(array), [2, 3]); }); test('should exclude the first two elements', 1, function() { deepEqual(_.rest(array, 2), [3]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? [2, 3] : array; }); var actual = _.map(falsey, function(n) { return _.rest(array, n); }); deepEqual(actual, expected); }); test('should return all elements when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.rest(array, n), array); }); }); test('should return an empty array when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.rest(array, n), []); }); }); test('should return an empty when querying empty arrays', 1, function() { deepEqual(_.rest([]), []); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.rest); deepEqual(actual, [[2, 3], [5, 6], [8, 9]]); }); test('should work with a callback', 1, function() { var actual = _.rest(array, function(num) { return num < 3; }); deepEqual(actual, [3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.rest(array, function() { args = slice.call(arguments); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.rest(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [3]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.rest(objects, { 'b': 2 }), objects.slice(-2)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.rest(objects, 'b'), objects.slice(-1)); }); test('should be aliased', 2, function() { strictEqual(_.drop, _.rest); strictEqual(_.tail, _.rest); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.runInContext'); (function() { test('should not require a fully populated `context` object', 1, function() { if (!isModularize) { var lodash = _.runInContext({ 'setTimeout': function(callback) { callback(); } }); var pass = false; lodash.delay(function() { pass = true; }, 32); ok(pass); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sample'); (function() { var array = [1, 2, 3]; test('should return a random element', 1, function() { var actual = _.sample(array); ok(_.contains(array, actual)); }); test('should return two random elements', 1, function() { var actual = _.sample(array, 2); ok(actual[0] !== actual[1] && _.contains(array, actual[0]) && _.contains(array, actual[1])); }); test('should contain elements of the collection', 1, function() { var actual = _.sample(array, array.length); deepEqual(actual.sort(), array); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 1 : []; }); var actual = _.map(falsey, function(n) { return _.sample([1], n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1` or `NaN`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.sample(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.sample(array, n).sort(), array); }); }); test('should return `undefined` when sampling an empty array', 1, function() { strictEqual(_.sample([]), undefined); }); test('should return an empty array for empty or falsey collections', 1, function() { var actual = []; var expected = _.transform(empties, function(result) { result.push([], []); }); _.each(empties, function(value) { try { actual.push(_.shuffle(value), _.shuffle(value, 1)); } catch(e) { } }); deepEqual(actual, expected); }); test('should sample an object', 2, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, actual = _.sample(object); ok(_.contains(array, actual)); actual = _.sample(object, 2); ok(actual[0] !== actual[1] && _.contains(array, actual[0]) && _.contains(array, actual[1])); }); test('should work when used as a callback for `_.map`', 1, function() { var a = [1, 2, 3], b = [4, 5, 6], c = [7, 8, 9], actual = _.map([a, b, c], _.sample); ok(_.contains(a, actual[0]) && _.contains(b, actual[1]) && _.contains(c, actual[2])); }); test('should chain when passing `n`', 1, function() { if (!isNpm) { var actual = _(array).sample(2); ok(actual instanceof _); } else { skipTest(); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).sample(); ok(_.contains(array, actual)); } else { skipTest(); } }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 2, function() { var actual = _.sample(collection); ok(_.contains(collection, actual)); actual = _.sample(collection, 2); ok(actual[0] !== actual[1] && _.contains(collection, actual[0]) && _.contains(collection, actual[1])); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.shuffle'); (function() { var array = [1, 2, 3], object = { 'a': 1, 'b': 2, 'c': 3 }; test('should return a new array', 1, function() { notStrictEqual(_.shuffle(array), array); }); test('should contain the same elements after a collection is shuffled', 2, function() { deepEqual(_.shuffle(array).sort(), array); deepEqual(_.shuffle(object).sort(), array); }); test('should shuffle an object', 1, function() { var actual = _.shuffle(object); deepEqual(actual.sort(), array); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.shuffle(1), []); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { var actual = _.shuffle(collection); deepEqual(actual.sort(), ['a','b', 'c']); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.size'); (function() { var args = arguments, array = [1, 2, 3]; test('should return the number of own enumerable properties of an object', 1, function() { strictEqual(_.size({ 'one': 1, 'two': 2, 'three': 3 }), 3); }); test('should return the length of an array', 1, function() { strictEqual(_.size(array), 3); }); test('should accept a falsey `object` argument', 1, function() { var expected = _.map(falsey, _.constant(0)); var actual = _.map(falsey, function(value, index) { try { return index ? _.size(value) : _.size(); } catch(e) { } }); deepEqual(actual, expected); }); test('should work with `arguments` objects (test in IE < 9)', 1, function() { strictEqual(_.size(args), 3); }); test('should work with jQuery/MooTools DOM query collections', 1, function() { function Foo(elements) { push.apply(this, elements); } Foo.prototype = { 'length': 0, 'splice': Array.prototype.splice }; strictEqual(_.size(new Foo(array)), 3); }); test('should not treat objects with negative lengths as array-like', 1, function() { strictEqual(_.size({ 'length': -1 }), 1); }); test('should not treat objects with lengths larger than `maxSafeInteger` as array-like', 1, function() { strictEqual(_.size({ 'length': maxSafeInteger + 1 }), 1); }); test('should not treat objects with non-number lengths as array-like', 1, function() { strictEqual(_.size({ 'length': '0' }), 1); }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.size(shadowedObject), 7); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { deepEqual(_.size(collection), 3); }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.slice'); (function() { var array = [1, 2, 3]; test('should work with a positive `start`', 1, function() { deepEqual(_.slice(array, 1), [2, 3]); }); test('should work with a `start` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(start) { deepEqual(_.slice(array, start), []); }); }); test('should treat falsey `start` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(array)); var actual = _.map(falsey, function(start) { return _.slice(array, start); }); deepEqual(actual, expected); }); test('should work with a negative `start`', 1, function() { deepEqual(_.slice(array, -1), [3]); }); test('should work with a negative `start` <= negative `array.length`', 3, function() { _.each([-3, -4, -Infinity], function(start) { deepEqual(_.slice(array, start), [1, 2, 3]); }); }); test('should work with a positive `end`', 1, function() { deepEqual(_.slice(array, 0, 1), [1]); }); test('should work with a `end` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(end) { deepEqual(_.slice(array, 0, end), [1, 2, 3]); }); }); test('should treat falsey `end` values, except `undefined`, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value === undefined ? array : []; }); var actual = _.map(falsey, function(end) { return _.slice(array, 0, end); }); deepEqual(actual, expected); }); test('should work with a negative `end`', 1, function() { deepEqual(_.slice(array, 0, -1), [1, 2]); }); test('should work with a negative `end` <= negative `array.length`', 3, function() { _.each([-3, -4, -Infinity], function(end) { deepEqual(_.slice(array, 0, end), []); }); }); test('should coerce `start` and `end` to finite numbers', 1, function() { var actual = [_.slice(array, '0', 1), _.slice(array, 0, '1'), _.slice(array, '1'), _.slice(array, NaN, 1), _.slice(array, 1, NaN)]; deepEqual(actual, [[1], [1], [2, 3], [1], []]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.some'); (function() { test('should return `false` for empty or falsey collections', 1, function() { var expected = _.map(empties, _.constant(false)); var actual = _.map(empties, function(value) { try { return _.some(value, _.identity); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `true` if the callback returns truthy for any element in the collection', 2, function() { strictEqual(_.some([false, 1, ''], _.identity), true); strictEqual(_.some([null, 'x', 0], _.identity), true); }); test('should return `false` if the callback returns falsey for all elements in the collection', 2, function() { strictEqual(_.some([false, false, false], _.identity), false); strictEqual(_.some([null, 0, ''], _.identity), false); }); test('should return `true` as soon as the `callback` result is truthy', 1, function() { strictEqual(_.some([null, true, null], _.identity), true); }); test('should use `_.identity` when no callback is provided', 2, function() { strictEqual(_.some([0, 1]), true); strictEqual(_.some([0, 0]), false); }); test('should be aliased', 1, function() { strictEqual(_.any, _.some); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sortBy'); (function() { function Pair(a, b, c) { this.a = a; this.b = b; this.c = c; } var objects = [ { 'a': 'x', 'b': 3 }, { 'a': 'y', 'b': 4 }, { 'a': 'x', 'b': 1 }, { 'a': 'y', 'b': 2 } ]; var stableOrder = [ new Pair(1, 1, 1), new Pair(1, 2, 1), new Pair(1, 1, 1), new Pair(1, 2, 1), new Pair(1, 3, 1), new Pair(1, 4, 1), new Pair(1, 5, 1), new Pair(1, 6, 1), new Pair(2, 1, 2), new Pair(2, 2, 2), new Pair(2, 3, 2), new Pair(2, 4, 2), new Pair(2, 5, 2), new Pair(2, 6, 2), new Pair(undefined, 1, 1), new Pair(undefined, 2, 1), new Pair(undefined, 3, 1), new Pair(undefined, 4, 1), new Pair(undefined, 5, 1), new Pair(undefined, 6, 1) ]; test('should sort in ascending order', 1, function() { var actual = _.pluck(_.sortBy(objects, function(object) { return object.b; }), 'b'); deepEqual(actual, [1, 2, 3, 4]); }); test('should perform a stable sort (test in IE > 8, Opera, and V8)', 1, function() { var actual = _.sortBy(stableOrder, function(pair) { return pair.a; }); deepEqual(actual, stableOrder); }); test('should work with `undefined` values', 1, function() { var array = [undefined, 4, 1, undefined, 3, 2]; deepEqual(_.sortBy(array, _.identity), [1, 2, 3, 4, undefined, undefined]); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.sortBy([3, 2, 1]); deepEqual(actual, [1, 2, 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.sortBy(objects, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [objects[0], 0, objects]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math); deepEqual(actual, [3, 1, 2]); }); test('should work with a string for `callback`', 1, function() { var actual = _.pluck(_.sortBy(objects, 'b'), 'b'); deepEqual(actual, [1, 2, 3, 4]); }); test('should work with an object for `collection`', 1, function() { var actual = _.sortBy({ 'a': 1, 'b': 2, 'c': 3 }, function(num) { return Math.sin(num); }); deepEqual(actual, [3, 1, 2]); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.sortBy(1), []); }); test('should support sorting by an array of properties', 1, function() { var actual = _.sortBy(objects, ['a', 'b']); deepEqual(actual, [objects[2], objects[0], objects[3], objects[1]]); }); test('should perform a stable sort when sorting by multiple properties (test in IE > 8, Opera, and V8)', 1, function() { var actual = _.sortBy(stableOrder, ['a', 'c']); deepEqual(actual, stableOrder); }); test('should coerce arrays returned from a callback', 1, function() { var actual = _.sortBy(objects, function(object) { var result = [object.a, object.b]; result.toString = function() { return String(this[0]); }; return result; }); deepEqual(actual, [objects[0], objects[2], objects[1], objects[3]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sortedIndex'); (function() { var array = [20, 30, 50], objects = [{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }]; test('should return the insert index of a given value', 2, function() { strictEqual(_.sortedIndex(array, 40), 2); strictEqual(_.sortedIndex(array, 30), 1); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.sortedIndex(array, 40, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [40]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.sortedIndex(array, 40, function(num) { return this[num]; }, { '20': 20, '30': 30, '40': 40 }); strictEqual(actual, 2); }); test('should work with a string for `callback`', 1, function() { var actual = _.sortedIndex(objects, { 'x': 40 }, 'x'); strictEqual(actual, 2); }); test('supports arrays with lengths larger than `Math.pow(2, 31) - 1`', 1, function() { var length = Math.pow(2, 32) - 1, index = length - 1, array = Array(length), steps = 0; if (array.length == length) { array[index] = index; _.sortedIndex(array, index, function() { steps++; }); strictEqual(steps, 33); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.support'); (function() { test('should contain properties with boolean values', 1, function() { ok(_.every(_.values(_.support), function(value) { return value === true || value === false; })); }); test('should not contain minified properties (test production builds)', 1, function() { var props = [ 'argsClass', 'argsObject', 'dom', 'enumErrorProps', 'enumPrototypes', 'fastBind', 'funcDecomp', 'funcNames', 'nodeClass', 'nonEnumArgs', 'nonEnumShadows', 'nonEnumStrings', 'ownLast', 'spliceObjects', 'unindexedChars' ]; ok(_.isEmpty(_.difference(_.keys(_.support), props))); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.startsWith'); (function() { var string = 'abc'; test('should return `true` if a string starts with `target`', 1, function() { strictEqual(_.startsWith(string, 'a'), true); }); test('should return `false` if a string does not start with `target`', 1, function() { strictEqual(_.startsWith(string, 'b'), false); }); test('should work with a `position` argument', 1, function() { strictEqual(_.startsWith(string, 'b', 1), true); }); test('should work with `position` >= `string.length`', 4, function() { _.each([3, 5, maxSafeInteger, Infinity], function(position) { strictEqual(_.startsWith(string, 'a', position), false); }); }); test('should treat falsey `position` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(position) { return _.startsWith(string, 'a', position); }); deepEqual(actual, expected); }); test('should treat a negative `position` as `0`', 6, function() { _.each([-1, -3, -Infinity], function(position) { strictEqual(_.startsWith(string, 'a', position), true); strictEqual(_.startsWith(string, 'b', position), false); }); }); test('should always return `true` when `target` is an empty string regardless of `position`', 1, function() { ok(_.every([-Infinity, NaN, -3, -1, 0, 1, 2, 3, 5, maxSafeInteger, Infinity], function(position) { return _.startsWith(string, '', position, true); })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.startsWith and lodash.endsWith'); _.each(['startsWith', 'endsWith'], function(methodName) { var func = _[methodName], isEndsWith = methodName == 'endsWith', chr = isEndsWith ? 'c' : 'a', string = 'abc'; test('`_.' + methodName + '` should coerce `string` to a string', 2, function() { strictEqual(func(Object(string), chr), true); strictEqual(func({ 'toString': _.constant(string) }, chr), true); }); test('`_.' + methodName + '` should coerce `target` to a string', 2, function() { strictEqual(func(string, Object(chr)), true); strictEqual(func(string, { 'toString': _.constant(chr) }), true); }); test('`_.' + methodName + '` should coerce `position` to a number', 2, function() { var position = isEndsWith ? 2 : 1; strictEqual(func(string, 'b', Object(position)), true); strictEqual(func(string, 'b', { 'toString': _.constant(String(position)) }), true); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.tap'); (function() { test('should intercept and return the given value', 2, function() { if (!isNpm) { var intercepted, array = [1, 2, 3]; var actual = _.tap(array, function(value) { intercepted = value; }); strictEqual(actual, array); strictEqual(intercepted, array); } else { skipTest(2); } }); test('should return intercept unwrapped values and return wrapped values when chaining', 2, function() { if (!isNpm) { var intercepted, array = [1, 2, 3]; var actual = _(array).tap(function(value) { intercepted = value; value.pop(); }); ok(actual instanceof _); strictEqual(intercepted, array); } else { skipTest(2); } }); test('should support the `thisArg` argument', 1, function() { if (!isNpm) { var array = [1, 2]; var actual = _(array.slice()).tap(function(value) { value.push(this[0]); }, array); deepEqual(actual.value(), [1, 2, 1]); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.template'); (function() { test('should escape values in "escape" delimiters', 1, function() { var escaped = '<p>&amp;&lt;&gt;&quot;&#39;\/</p>', unescaped = '&<>"\'\/'; var compiled = _.template('<p><%- value %></p>'); strictEqual(compiled({ 'value': unescaped }), escaped); }); test('should evaluate JavaScript in "evaluate" delimiters', 1, function() { var compiled = _.template( '<ul><%\ for (var key in collection) {\ %><li><%= collection[key] %></li><%\ } %></ul>' ); var actual = compiled({ 'collection': { 'a': 'A', 'b': 'B' } }); strictEqual(actual, '<ul><li>A</li><li>B</li></ul>'); }); test('should interpolate data object properties', 1, function() { var compiled = _.template('<%= a %>BC'); strictEqual(compiled({ 'a': 'A' }), 'ABC'); }); test('should support escaped values in "interpolation" delimiters', 1, function() { var compiled = _.template('<%= a ? "a=\\"A\\"" : "" %>'); strictEqual(compiled({ 'a': true }), 'a="A"'); }); test('should work with "interpolate" delimiters containing ternary operators', 1, function() { var compiled = _.template('<%= value ? value : "b" %>'), data = { 'value': 'a' }; strictEqual(compiled(data), 'a'); }); test('should work with "interpolate" delimiters containing global values', 1, function() { var compiled = _.template('<%= typeof Math.abs %>'); try { var actual = compiled(); } catch(e) { } strictEqual(actual, 'function'); }); test('should work with complex "interpolate" delimiters', 22, function() { _.each({ '<%= a + b %>': '3', '<%= b - a %>': '1', '<%= a = b %>': '2', '<%= !a %>': 'false', '<%= ~a %>': '-2', '<%= a * b %>': '2', '<%= a / b %>': '0.5', '<%= a % b %>': '1', '<%= a >> b %>': '0', '<%= a << b %>': '4', '<%= a & b %>': '0', '<%= a ^ b %>': '3', '<%= a | b %>': '3', '<%= {}.toString.call(0) %>': '[object Number]', '<%= a.toFixed(2) %>': '1.00', '<%= obj["a"] %>': '1', '<%= delete a %>': 'true', '<%= "a" in obj %>': 'true', '<%= obj instanceof Object %>': 'true', '<%= new Boolean %>': 'false', '<%= typeof a %>': 'number', '<%= void a %>': '' }, function(value, key) { var compiled = _.template(key), data = { 'a': 1, 'b': 2 }; strictEqual(compiled(data), value, key); }); }); test('should parse ES6 template delimiters', 2, function() { var data = { 'value': 2 }; strictEqual(_.template('1${value}3', data), '123'); strictEqual(_.template('${"{" + value + "\\}"}', data), '{2}'); }); test('should not reference `_.escape` when "escape" delimiters are not used', 1, function() { var compiled = _.template('<%= typeof __e %>'); strictEqual(compiled({}), 'undefined'); }); test('should allow referencing variables declared in "evaluate" delimiters from other delimiters', 1, function() { var compiled = _.template('<% var b = a; %><%= b.value %>'), data = { 'a': { 'value': 1 } }; strictEqual(compiled(data), '1'); }); test('should support single line comments in "evaluate" delimiters (test production builds)', 1, function() { var compiled = _.template('<% // comment %><% if (value) { %>yap<% } else { %>nope<% } %>'); strictEqual(compiled({ 'value': true }), 'yap'); }); test('should work with custom `_.templateSettings` delimiters', 1, function() { var settings = _.clone(_.templateSettings); _.assign(_.templateSettings, { 'escape': /\{\{-([\s\S]+?)\}\}/g, 'evaluate': /\{\{([\s\S]+?)\}\}/g, 'interpolate': /\{\{=([\s\S]+?)\}\}/g }); var compiled = _.template('<ul>{{ _.each(collection, function(value, index) { }}<li>{{= index }}: {{- value }}</li>{{ }); }}</ul>'), expected = '<ul><li>0: a &amp; A</li><li>1: b &amp; B</li></ul>'; strictEqual(compiled({ 'collection': ['a & A', 'b & B'] }), expected); _.assign(_.templateSettings, settings); }); test('should work with `_.templateSettings` delimiters containing special characters', 1, function() { var settings = _.clone(_.templateSettings); _.assign(_.templateSettings, { 'escape': /<\?-([\s\S]+?)\?>/g, 'evaluate': /<\?([\s\S]+?)\?>/g, 'interpolate': /<\?=([\s\S]+?)\?>/g }); var compiled = _.template('<ul><? _.each(collection, function(value, index) { ?><li><?= index ?>: <?- value ?></li><? }); ?></ul>'), expected = '<ul><li>0: a &amp; A</li><li>1: b &amp; B</li></ul>'; strictEqual(compiled({ 'collection': ['a & A', 'b & B'] }), expected); _.assign(_.templateSettings, settings); }); test('should work with no delimiters', 1, function() { var expected = 'abc'; strictEqual(_.template(expected, {}), expected); }); test('should support the "imports" option', 1, function() { var options = { 'imports': { 'a': 1 } }, compiled = _.template('<%= a %>', null, options); strictEqual(compiled({}), '1'); }); test('should support the "variable" options', 1, function() { var compiled = _.template( '<% _.each( data.a, function( value ) { %>' + '<%= value.valueOf() %>' + '<% }) %>', null, { 'variable': 'data' } ); try { var data = { 'a': [1, 2, 3] }; strictEqual(compiled(data), '123'); } catch(e) { ok(false); } }); test('should use a `with` statement by default', 1, function() { var compiled = _.template('<%= index %><%= collection[index] %><% _.each(collection, function(value, index) { %><%= index %><% }); %>'), actual = compiled({ 'index': 1, 'collection': ['a', 'b', 'c'] }); strictEqual(actual, '1b012'); }); test('should work correctly with `this` references', 2, function() { var compiled = _.template('a<%= this.String("b") %>c'); strictEqual(compiled(), 'abc'); var object = { 'b': 'B' }; object.compiled = _.template('A<%= this.b %>C', null, { 'variable': 'obj' }); strictEqual(object.compiled(), 'ABC'); }); test('should work with backslashes', 1, function() { var compiled = _.template('<%= a %> \\b'); strictEqual(compiled({ 'a': 'A' }), 'A \\b'); }); test('should work with escaped characters in string literals', 2, function() { var compiled = _.template('<% print("\'\\n\\r\\t\\u2028\\u2029\\\\") %>'); strictEqual(compiled(), "'\n\r\t\u2028\u2029\\"); compiled = _.template('\'\n\r\t<%= a %>\u2028\u2029\\"'); strictEqual(compiled({ 'a': 'A' }), '\'\n\r\tA\u2028\u2029\\"'); }); test('should handle \\u2028 & \\u2029 characters', 1, function() { var compiled = _.template('\u2028<%= "\\u2028\\u2029" %>\u2029'); strictEqual(compiled(), '\u2028\u2028\u2029\u2029'); }); test('should work with statements containing quotes', 1, function() { var compiled = _.template("<%\ if (a == 'A' || a == \"a\") {\ %>'a',\"A\"<%\ } %>" ); strictEqual(compiled({ 'a': 'A' }), "'a',\"A\""); }); test('should work with templates containing newlines and comments', 1, function() { var compiled = _.template('<%\n\ // comment\n\ if (value) { value += 3; }\n\ %><p><%= value %></p>' ); strictEqual(compiled({ 'value': 3 }), '<p>6</p>'); }); test('should not error with IE conditional comments enabled (test with development build)', 1, function() { var compiled = _.template(''), pass = true; /*@cc_on @*/ try { compiled(); } catch(e) { pass = false; } ok(pass); }); test('should tokenize delimiters', 1, function() { var compiled = _.template('<span class="icon-<%= type %>2"></span>'), data = { 'type': 1 }; strictEqual(compiled(data), '<span class="icon-12"></span>'); }); test('should evaluate delimiters once', 1, function() { var actual = [], compiled = _.template('<%= func("a") %><%- func("b") %><% func("c") %>'); compiled({ 'func': function(value) { actual.push(value); } }); deepEqual(actual, ['a', 'b', 'c']); }); test('should match delimiters before escaping text', 1, function() { var compiled = _.template('<<\n a \n>>', null, { 'evaluate': /<<(.*?)>>/g }); strictEqual(compiled(), '<<\n a \n>>'); }); test('should resolve `null` and `undefined` values to an empty string', 4, function() { var compiled = _.template('<%= a %><%- a %>'); strictEqual(compiled({ 'a': null }), ''); strictEqual(compiled({ 'a': undefined }), ''); compiled = _.template('<%= a.b %><%- a.b %>'); strictEqual(compiled({ 'a': {} }), ''); strictEqual(compiled({ 'a': {} }), ''); }); test('should parse delimiters with newlines', 1, function() { var expected = '<<\nprint("<p>" + (value ? "yes" : "no") + "</p>")\n>>', compiled = _.template(expected, null, { 'evaluate': /<<(.+?)>>/g }), data = { 'value': true }; strictEqual(compiled(data), expected); }); test('should support recursive calls', 1, function() { var compiled = _.template('<%= a %><% a = _.template(c, obj) %><%= a %>'), data = { 'a': 'A', 'b': 'B', 'c': '<%= b %>' }; strictEqual(compiled(data), 'AB'); }); test('should coerce `text` argument to a string', 1, function() { var data = { 'a': 1 }, object = { 'toString': function() { return '<%= a %>'; } }; strictEqual(_.template(object, data), '1'); }); test('should not augment the `options` object', 1, function() { var options = {}; _.template('', {}, options); deepEqual(options, {}); }); test('should not modify `_.templateSettings` when `options` are provided', 2, function() { ok(!('a' in _.templateSettings)); _.template('', {}, { 'a': 1 }); ok(!('a' in _.templateSettings)); delete _.templateSettings.a; }); test('should not error for non-object `data` and `options` values', 2, function() { var pass = true; try { _.template('', 1); } catch(e) { pass = false; } ok(pass); pass = true; try { _.template('', 1, 1); } catch(e) { pass = false; } ok(pass); }); test('should provide the template source when a SyntaxError occurs', 1, function() { try { _.template('<% if x %>'); } catch(e) { var source = e.source; } ok(/__p/.test(source)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.truncate'); (function() { var string = 'hi-diddly-ho there, neighborino'; test('should truncate to a length of `30` by default', 1, function() { strictEqual(_.truncate(string), 'hi-diddly-ho there, neighbo...'); }); test('should not truncate if `string` is <= `length`', 2, function() { strictEqual(_.truncate(string, string.length), string); strictEqual(_.truncate(string, string.length + 2), string); }); test('should truncate string the given length', 1, function() { strictEqual(_.truncate(string, 24), 'hi-diddly-ho there, n...'); }); test('should support a `omission` option', 1, function() { strictEqual(_.truncate(string, { 'omission': ' [...]' }), 'hi-diddly-ho there, neig [...]'); }); test('should support a `length` option', 1, function() { strictEqual(_.truncate(string, { 'length': 4 }), 'h...'); }); test('should support a `separator` option', 2, function() { strictEqual(_.truncate(string, { 'length': 24, 'separator': ' ' }), 'hi-diddly-ho there,...'); strictEqual(_.truncate(string, { 'length': 24, 'separator': /,? +/ }), 'hi-diddly-ho there...'); }); test('should treat negative `length` as `0`', 4, function() { _.each([0, -2], function(length) { strictEqual(_.truncate(string, length), '...'); strictEqual(_.truncate(string, { 'length': length }), '...'); }); }); test('should coerce `length` to a number', 4, function() { _.each(['', '4'], function(length, index) { var actual = index ? 'h...' : '...'; strictEqual(_.truncate(string, length), actual); strictEqual(_.truncate(string, { 'length': { 'valueOf': _.constant(length) } }), actual); }); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.truncate(Object(string), 4), 'h...'); strictEqual(_.truncate({ 'toString': _.constant(string) }, 5), 'hi...'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.throttle'); (function() { asyncTest('should throttle a function', 2, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function() { count++; }, 32); throttled(); throttled(); throttled(); var lastCount = count; ok(count > 0); setTimeout(function() { ok(count > lastCount); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent calls should return the result of the first call', 5, function() { if (!(isRhino && isModularize)) { var throttled = _.throttle(_.identity, 32), result = [throttled('a'), throttled('b')]; deepEqual(result, ['a', 'a']); setTimeout(function() { var result = [throttled('x'), throttled('y')]; notEqual(result[0], 'a'); notStrictEqual(result[0], undefined); notEqual(result[1], 'y'); notStrictEqual(result[1], undefined); QUnit.start(); }, 64); } else { skipTest(5); QUnit.start(); } }); asyncTest('should clear timeout when `func` is called', 1, function() { if (!isModularize) { var callCount = 0, dateCount = 0; var getTime = function() { return ++dateCount < 3 ? +new Date : Infinity; }; var lodash = _.runInContext(_.assign({}, root, { 'Date': function() { return { 'getTime': getTime, 'valueOf': getTime }; } })); var throttled = lodash.throttle(function() { callCount++; }, 32); throttled(); throttled(); throttled(); setTimeout(function() { strictEqual(callCount, 2); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('should not trigger a trailing call when invoked once', 2, function() { if (!(isRhino && isModularize)) { var count = 0, throttled = _.throttle(function() { count++; }, 32); throttled(); strictEqual(count, 1); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); _.times(2, function(index) { test('should trigger a call when invoked repeatedly' + (index ? ' and `leading` is `false`' : ''), 1, function() { if (!(isRhino && isModularize)) { var count = 0, limit = 256, options = index ? { 'leading': false } : {}; var throttled = _.throttle(function() { count++; }, 32, options); var start = +new Date; while ((new Date - start) < limit) { throttled(); } ok(count > 1); } else { skipTest(); } }); }); asyncTest('should apply default options correctly', 3, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function(value) { count++; return value; }, 32, {}); strictEqual(throttled('a'), 'a'); strictEqual(throttled('b'), 'a'); setTimeout(function() { strictEqual(count, 2); QUnit.start(); }, 256); } else { skipTest(3); QUnit.start(); } }); test('should support a `leading` option', 4, function() { if (!(isRhino && isModularize)) { _.each([true, { 'leading': true }], function(options) { var withLeading = _.throttle(_.identity, 32, options); strictEqual(withLeading('a'), 'a'); }); _.each([false, { 'leading': false }], function(options) { var withoutLeading = _.throttle(_.identity, 32, options); strictEqual(withoutLeading('a'), undefined); }); } else { skipTest(4); } }); asyncTest('should support a `trailing` option', 6, function() { if (!(isRhino && isModularize)) { var withCount = 0, withoutCount = 0; var withTrailing = _.throttle(function(value) { withCount++; return value; }, 64, { 'trailing': true }); var withoutTrailing = _.throttle(function(value) { withoutCount++; return value; }, 64, { 'trailing': false }); strictEqual(withTrailing('a'), 'a'); strictEqual(withTrailing('b'), 'a'); strictEqual(withoutTrailing('a'), 'a'); strictEqual(withoutTrailing('b'), 'a'); setTimeout(function() { strictEqual(withCount, 2); strictEqual(withoutCount, 1); QUnit.start(); }, 256); } else { skipTest(6); QUnit.start(); } }); asyncTest('should not update `lastCalled`, at the end of the timeout, when `trailing` is `false`', 1, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function() { count++; }, 64, { 'trailing': false }); throttled(); throttled(); setTimeout(function() { throttled(); throttled(); }, 96); setTimeout(function() { ok(count > 1); QUnit.start(); }, 192); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.debounce and lodash.throttle'); _.each(['debounce', 'throttle'], function(methodName) { var func = _[methodName], isThrottle = methodName == 'throttle'; test('_.' + methodName + ' should not error for non-object `options` values', 1, function() { var pass = true; try { func(_.noop, 32, 1); } catch(e) { pass = false; } ok(pass); }); asyncTest('_.' + methodName + ' should call `func` with the correct `this` binding', 1, function() { if (!(isRhino && isModularize)) { var object = { 'funced': func(function() { actual.push(this); }, 32) }; var actual = [], expected = _.times(isThrottle ? 2 : 1, _.constant(object)); object.funced(); if (isThrottle) { object.funced(); } setTimeout(function() { deepEqual(actual, expected); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('_.' + methodName + ' supports recursive calls', 2, function() { if (!(isRhino && isModularize)) { var actual = [], args = _.map(['a', 'b', 'c'], function(chr) { return [{}, chr]; }), length = isThrottle ? 2 : 1, expected = args.slice(0, length), queue = args.slice(); var funced = func(function() { var current = [this]; push.apply(current, arguments); actual.push(current); var next = queue.shift(); if (next) { funced.call(next[0], next[1]); } }, 32); var next = queue.shift(); funced.call(next[0], next[1]); deepEqual(actual, expected.slice(0, length - 1)); setTimeout(function() { deepEqual(actual, expected); QUnit.start(); }, 32); } else { skipTest(2); QUnit.start(); } }); asyncTest('_.' + methodName + ' should work if the system time is set backwards', 1, function() { if (!isModularize) { var callCount = 0, dateCount = 0; var getTime = function() { return ++dateCount < 2 ? +new Date : +new Date(2012, 3, 23, 23, 27, 18); }; var lodash = _.runInContext(_.assign({}, root, { 'Date': function() { return { 'getTime': getTime, 'valueOf': getTime }; } })); var funced = lodash[methodName](function() { callCount++; }, 32); funced(); setTimeout(function() { funced(); strictEqual(callCount, isThrottle ? 2 : 1); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('_.' + methodName + ' should support cancelling delayed calls', 1, function() { if (!(isRhino && isModularize)) { var callCount = 0; var funced = func(function() { callCount++; }, 32, { 'leading': false }); funced(); funced.cancel(); setTimeout(function() { strictEqual(callCount, 0); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.toArray'); (function() { test('should return the values of objects', 1, function() { var array = [1, 2, 3], object = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.toArray(object), array); }); test('should work with a string for `collection` (test in Opera < 10.52)', 2, function() { deepEqual(_.toArray('abc'), ['a', 'b', 'c']); deepEqual(_.toArray(Object('abc')), ['a', 'b', 'c']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.slice and lodash.toArray'); _.each(['slice', 'toArray'], function(methodName) { var args = (function() { return arguments; }(1, 2, 3)), array = [1, 2, 3], func = _[methodName]; test('should return a dense array', 3, function() { var sparse = Array(3); sparse[1] = 2; var actual = func(sparse); ok('0' in actual); ok('2' in actual); deepEqual(actual, sparse); }); test('should treat array-like objects like arrays', 2, function() { var object = { '0': 'a', '1': 'b', '2': 'c', 'length': 3 }; deepEqual(func(object), ['a', 'b', 'c']); deepEqual(func(args), array); }); test('should return a shallow clone of arrays', 2, function() { var actual = func(array); notStrictEqual(actual, array); deepEqual(func(array), array); }); test('should work with a node list for `collection` (test in IE < 9)', 1, function() { if (document) { try { var nodeList = document.getElementsByTagName('body'), actual = func(nodeList); } catch(e) { } deepEqual(actual, [body]); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.times'); (function() { test('should rollover large `n` values', 1, function() { var actual = _.times(Math.pow(2, 32) + 1); deepEqual(actual, [0]); }); test('should coerce non-finite `n` values to `0`', 3, function() { _.each([-Infinity, NaN, Infinity], function(n) { deepEqual(_.times(n), []); }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.times(1, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [0]); }); test('should support the `thisArg` argument', 1, function() { var expect = [1, 2, 3]; var actual = _.times(3, function(num) { return this[num]; }, expect); deepEqual(actual, expect); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.times(3); deepEqual(actual, [0, 1, 2]); }); test('should return an array of the results of each `callback` execution', 1, function() { deepEqual(_.times(3, function(n) { return n * 2; }), [0, 2, 4]); }); test('should return an empty array for falsey and negative `n` arguments', 1, function() { var values = falsey.concat(-1, -Infinity), expected = _.map(values, _.constant([])); var actual = _.map(values, function(value, index) { return index ? _.times(value) : _.times(); }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _(3).times(); ok(actual instanceof _); deepEqual(actual.value(), [0, 1, 2]); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.transform'); (function() { test('should produce an that is an instance of the given object\'s constructor', 2, function() { function Foo() { this.a = 1; this.b = 2; this.c = 3; } var actual = _.transform(new Foo, function(result, value, key) { result[key] = value * value; }); ok(actual instanceof Foo); deepEqual(_.clone(actual), { 'a': 1, 'b': 4, 'c': 9 }); }); test('should treat sparse arrays as dense', 1, function() { var actual = _.transform(Array(1), function(result, value, index) { result[index] = String(value); }); deepEqual(actual, ['undefined']); }); test('should work without a callback argument', 1, function() { function Foo() {} ok(_.transform(new Foo) instanceof Foo); }); test('should check that `object` is an object before using it as the `accumulator` `[[Prototype]]', 1, function() { ok(!(_.transform(1) instanceof Number)); }); _.each({ 'array': [1, 2, 3], 'object': { 'a': 1, 'b': 2, 'c': 3 } }, function(object, key) { test('should pass the correct `callback` arguments when transforming an ' + key, 2, function() { var args; _.transform(object, function() { args || (args = slice.call(arguments)); }); var first = args[0]; if (key == 'array') { ok(first !== object && _.isArray(first)); deepEqual(args, [first, 1, 0, object]); } else { ok(first !== object && _.isPlainObject(first)); deepEqual(args, [first, 1, 'a', object]); } }); test('should support the `thisArg` argument when transforming an ' + key, 2, function() { var actual = _.transform(object, function(result, value, key) { result[key] = this[key]; }, null, object); notStrictEqual(actual, object); deepEqual(actual, object); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('trim methods'); _.each(['trim', 'trimLeft', 'trimRight'], function(methodName, index) { var func = _[methodName]; var parts = []; if (index != 2) { parts.push('leading'); } if (index != 1) { parts.push('trailing'); } parts = parts.join(' and '); test('`_.' + methodName + '` should remove ' + parts + ' whitespace', 1, function() { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(string), expected); }); test('`_.' + methodName + '` should not remove non-whitespace characters', 1, function() { var problemChars = '\x85\u200b\ufffe', string = problemChars + 'a b c' + problemChars; strictEqual(func(string), string); }); test('`_.' + methodName + '` should coerce `string` to a string', 1, function() { var object = { 'toString': function() { return whitespace + 'a b c' + whitespace; } }, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(object), expected); }); test('`_.' + methodName + '` should remove ' + parts + ' `chars`', 1, function() { var string = '-_-a-b-c-_-', expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : ''); strictEqual(func(string, '_-'), expected); }); test('`_.' + methodName + '` should coerce `chars` to a string', 1, function() { var object = { 'toString': function() { return '_-'; } }, string = '-_-a-b-c-_-', expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : ''); strictEqual(func(string, object), expected); }); test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string and `chars`', 6, function() { _.each([null, '_-'], function(chars) { strictEqual(func(null, chars), ''); strictEqual(func(undefined, chars), ''); strictEqual(func('', chars), ''); }); }); test('`_.' + methodName + '` should work with `null`, `undefined`, or empty string for `chars`', 3, function() { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(string, null), expected); strictEqual(func(string, undefined), expected); strictEqual(func(string, ''), string); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''), actual = _(string)[methodName](); strictEqual(actual, expected); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.unescape'); (function() { var escaped = '&amp;&lt;&gt;&quot;&#39;\/', unescaped = '&<>"\'\/'; test('should unescape entities in the correct order', 1, function() { strictEqual(_.unescape('&amp;lt;'), '&lt;'); }); test('should unescape the proper entities', 1, function() { strictEqual(_.unescape(escaped), unescaped); }); test('should not unescape the "&#x2F;" entity', 1, function() { strictEqual(_.unescape('&#x2F;'), '&#x2F;'); }); test('should handle strings with nothing to unescape', 1, function() { strictEqual(_.unescape('abc'), 'abc'); }); test('should unescape the same characters escaped by `_.escape`', 1, function() { strictEqual(_.unescape(_.escape(unescaped)), unescaped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.union'); (function() { var args = arguments; test('should return the union of the given arrays', 1, function() { var actual = _.union([1, 3, 2], [5, 2, 1, 4], [2, 1]); deepEqual(actual, [1, 3, 2, 5, 4]); }); test('should not flatten nested arrays', 1, function() { var actual = _.union([1, 3, 2], [1, [5]], [2, [4]]); deepEqual(actual, [1, 3, 2, [5], [4]]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0]; deepEqual(_.union(array, 3, null, { '0': 1 }), array); deepEqual(_.union(null, array, null, [2, 1]), [0, 2, 1]); deepEqual(_.union(null, array, null, args), [0, 1, 2, 3]); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.uniq'); (function() { var objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }, { 'a': 2 }, { 'a': 3 }, { 'a': 1 }]; test('should return unique values of an unsorted array', 1, function() { var array = [2, 3, 1, 2, 3, 1]; deepEqual(_.uniq(array), [2, 3, 1]); }); test('should return unique values of a sorted array', 1, function() { var array = [1, 1, 2, 2, 3]; deepEqual(_.uniq(array), [1, 2, 3]); }); test('should work with `isSorted`', 1, function() { var array = [1, 1, 2, 2, 3]; deepEqual(_.uniq([1, 1, 2, 2, 3], true), [1, 2, 3]); }); test('should work with a callback', 1, function() { var actual = _.uniq(objects, false, function(object) { return object.a; }); deepEqual(actual, objects.slice(0, 3)); }); test('should work with a callback without specifying `isSorted`', 1, function() { var actual = _.uniq(objects, function(object) { return object.a; }); deepEqual(actual, objects.slice(0, 3)); }); test('should support the `thisArg` argument', 1, function() { var actual = _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return this.floor(num); }, Math); deepEqual(actual, [1, 2, 3]); }); test('should perform an unsorted uniq operation when used as a callback for `_.map`', 1, function() { var array = [[2, 1, 2], [1, 2, 1]], actual = _.map(array, _.uniq); deepEqual(actual, [[2, 1], [1, 2]]); }); test('should work with large arrays', 1, function() { var object = {}; var largeArray = _.times(largeArraySize, function(index) { switch (index % 3) { case 0: return 0; case 1: return 'a'; case 2: return object; } }); deepEqual(_.uniq(largeArray), [0, 'a', object]); }); test('should work with large arrays of boolean, `null`, and `undefined` values', 1, function() { var array = [], expected = [true, false, null, undefined], count = Math.ceil(largeArraySize / expected.length); _.times(count, function() { push.apply(array, expected); }); deepEqual(_.uniq(array), expected); }); test('should distinguish between numbers and numeric strings', 1, function() { var array = [], expected = ['2', 2, Object('2'), Object(2)], count = Math.ceil(largeArraySize / expected.length); _.times(count, function() { push.apply(array, expected); }); deepEqual(_.uniq(array), expected); }); _.each({ 'an object': ['a'], 'a number': 0, 'a string': '0' }, function(callback, key) { test('should work with ' + key + ' for `callback`', 1, function() { var actual = _.uniq([['a'], ['b'], ['a']], callback); deepEqual(actual, [['a'], ['b']]); }); }); test('should be aliased', 1, function() { strictEqual(_.unique, _.uniq); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.uniqueId'); (function() { test('should generate unique ids', 1, function() { var actual = []; _.times(1000, function() { actual.push(_.uniqueId()); }); strictEqual(_.uniq(actual).length, actual.length); }); test('should return a string value when not passing a prefix argument', 1, function() { strictEqual(typeof _.uniqueId(), 'string'); }); test('should coerce the prefix argument to a string', 1, function() { var actual = [_.uniqueId(3), _.uniqueId(2), _.uniqueId(1)]; ok(/3\d+,2\d+,1\d+/.test(actual)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.values'); (function() { test('should get the values of an object', 1, function() { var object = { 'a': 1, 'b': 2 }; deepEqual(_.values(object), [1, 2]); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.values(object), ['a', 'b', 2]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.where'); (function() { var objects = [ { 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }, { 'a': 3 } ]; test('should filter by `source` properties', 6, function() { deepEqual(_.where(objects, { 'a': 1 }), [{ 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 2 }), [{ 'a': 2, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 3 }), [{ 'a': 3 }]); deepEqual(_.where(objects, { 'b': 1 }), []); deepEqual(_.where(objects, { 'b': 2 }), [{ 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 1, 'b': 2 }), [{ 'a': 1, 'b': 2 }]); }); test('should not filter by inherited `source` properties', 2, function() { function Foo() {} Foo.prototype = { 'a': 2 }; var source = new Foo; source.b = 2; var expected = [objects[2], objects[3]], actual = _.where(objects, source); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, objects))); }); test('should filter by problem JScript properties (test in IE < 9)', 1, function() { var collection = [shadowedObject]; deepEqual(_.where(collection, shadowedObject), [shadowedObject]); }); test('should work with an object for `collection`', 2, function() { var collection = { 'x': { 'a': 1 }, 'y': { 'a': 3 }, 'z': { 'a': 1, 'b': 2 } }; var expected = [collection.x, collection.z], actual = _.where(collection, { 'a': 1 }); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, _.values(collection)))); }); test('should work with a function for `source`', 1, function() { function source() {} source.a = 2; deepEqual(_.where(objects, source), [{ 'a': 2, 'b': 2 }]); }); test('should match all elements when provided an empty `source`', 1, function() { var expected = _.map(empties, _.constant(objects)); var actual = _.map(empties, function(value) { var result = _.where(objects, value); return result !== objects && result; }); deepEqual(actual, expected); }); test('should perform a deep partial comparison of `source`', 2, function() { var collection = [{ 'a': { 'b': { 'c': 1, 'd': 2 }, 'e': 3 }, 'f': 4 }], expected = collection.slice(), actual = _.where(collection, { 'a': { 'b': { 'c': 1 } } }); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, collection))); }); test('should search of arrays for values', 2, function() { var collection = [{ 'a': [1, 2] }], expected = collection.slice(); deepEqual(_.where(collection, { 'a': [] }), []); deepEqual(_.where(collection, { 'a': [2] }), expected); }); test('should perform a partial comparison of *all* objects within arrays of `source`', 2, function() { var collection = [ { 'a': [{ 'b': 1, 'c': 2, 'd': 3 }, { 'b': 4, 'c': 5, 'd': 6 }] }, { 'a': [{ 'b': 1, 'c': 2, 'd': 3 }, { 'b': 4, 'c': 6, 'd': 7 }] } ]; var actual = _.where(collection, { 'a': [{ 'b': 1, 'c': 2 }, { 'b': 4, 'c': 5 }] }); deepEqual(actual, [collection[0]]); ok(_.isEmpty(_.difference(actual, collection))); }); test('should handle a `source` with `undefined` values', 4, function() { var source = { 'b': undefined }, actual = _.where([{ 'a': 1 }, { 'a': 1, 'b': 1 }], source); deepEqual(actual, []); var object = { 'a': 1, 'b': undefined }; actual = _.where([object], source); deepEqual(actual, [object]); source = { 'a': { 'c': undefined } }; actual = _.where([{ 'a': { 'b': 1 } }, { 'a':{ 'b':1 , 'c': 1 } }], source); deepEqual(actual, []); object = { 'a': { 'b': 1, 'c': undefined } }; actual = _.where([object], source); deepEqual(actual, [object]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.without'); (function() { test('should use strict equality to determine the values to reject', 2, function() { var object1 = { 'a': 1 }, object2 = { 'b': 2 }, array = [object1, object2]; deepEqual(_.without(array, { 'a': 1 }), array); deepEqual(_.without(array, object1), [object2]); }); test('should remove all occurrences of each value from an array', 1, function() { var array = [1, 2, 3, 1, 2, 3]; deepEqual(_.without(array, 1, 2), [3, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.wrap'); (function() { test('should create a wrapped function', 1, function() { var p = _.wrap(_.escape, function(func, text) { return '<p>' + func(text) + '</p>'; }); strictEqual(p('fred, barney, & pebbles'), '<p>fred, barney, &amp; pebbles</p>'); }); test('should pass the correct `wrapper` arguments', 1, function() { var args; var wrapped = _.wrap(_.noop, function() { args || (args = slice.call(arguments)); }); wrapped(1, 2, 3); deepEqual(args, [_.noop, 1, 2, 3]); }); test('should not set a `this` binding', 1, function() { var p = _.wrap(_.escape, function(func) { return '<p>' + func(this.text) + '</p>'; }); var object = { 'p': p, 'text': 'fred, barney, & pebbles' }; strictEqual(object.p(), '<p>fred, barney, &amp; pebbles</p>'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.xor'); (function() { var args = arguments; test('should return the symmetric difference of the given arrays', 1, function() { var actual = _.xor([1, 2, 5], [2, 3, 5], [3, 4, 5]); deepEqual(actual, [1, 4, 5]); }); test('should return an array of unique values', 2, function() { var actual = _.xor([1, 1, 2, 5], [2, 2, 3, 5], [3, 4, 5, 5]); deepEqual(actual, [1, 4, 5]); actual = _.xor([1, 1]); deepEqual(actual, [1]); }); test('should return a new array when a single array is provided', 1, function() { var array = [1]; notStrictEqual(_.xor(array), array); }); test('should ignore individual secondary arguments', 1, function() { var array = [0]; deepEqual(_.xor(array, 3, null, { '0': 1 }), array); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [1, 2]; deepEqual(_.xor(array, 3, null, { '0': 1 }), array); deepEqual(_.xor(null, array, null, [2, 3]), [1, 3]); deepEqual(_.xor(null, array, null, args), [3]); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _([1, 2, 3]).xor([5, 2, 1, 4]); ok(actual instanceof _); deepEqual(actual.value(), [3, 5, 4]); } else { skipTest(2); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.zip'); (function() { var object = { 'an empty array': [ [], [] ], '0-tuples': [ [[], []], [] ], '2-tuples': [ [['barney', 'fred'], [36, 40]], [['barney', 36], ['fred', 40]] ], '3-tuples': [ [['barney', 'fred'], [36, 40], [true, false]], [['barney', 36, true], ['fred', 40, false]] ] }; _.forOwn(object, function(pair, key) { test('should work with ' + key, 2, function() { var actual = _.zip.apply(_, pair[0]); deepEqual(actual, pair[1]); deepEqual(_.zip.apply(_, actual), actual.length ? pair[0] : []); }); }); test('should work with tuples of different lengths', 4, function() { var pair = [ [['barney', 36], ['fred', 40, false]], [['barney', 'fred'], [36, 40], [undefined, false]] ]; var actual = _.zip(pair[0]); ok('0' in actual[2]); deepEqual(actual, pair[1]); actual = _.zip.apply(_, actual); ok('2' in actual[0]); deepEqual(actual, [['barney', 36, undefined], ['fred', 40, false]]); }); test('should support consuming its return value', 1, function() { var expected = [['barney', 'fred'], [36, 40]]; deepEqual(_.zip(_.zip(_.zip(_.zip(expected)))), expected); }); test('should be aliased', 1, function() { strictEqual(_.unzip, _.zip); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.zipObject'); (function() { var object = { 'barney': 36, 'fred': 40 }, array = [['barney', 36], ['fred', 40]]; test('should skip falsey elements in a given two dimensional array', 1, function() { var actual = _.zipObject(array.concat(falsey)); deepEqual(actual, object); }); test('should zip together key/value arrays into an object', 1, function() { var actual = _.zipObject(['barney', 'fred'], [36, 40]); deepEqual(actual, object); }); test('should ignore extra `values`', 1, function() { deepEqual(_.zipObject(['a'], [1, 2]), { 'a': 1 }); }); test('should accept a two dimensional array', 1, function() { var actual = _.zipObject(array); deepEqual(actual, object); }); test('should not assume `keys` is two dimensional if `values` is not provided', 1, function() { var actual = _.zipObject(['barney', 'fred']); deepEqual(actual, { 'barney': undefined, 'fred': undefined }); }); test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant({})); var actual = _.map(falsey, function(value, index) { try { return index ? _.zipObject(value) : _.zipObject(); } catch(e) { } }); deepEqual(actual, expected); }); test('should support consuming the return value of `_.pairs`', 1, function() { deepEqual(_.zipObject(_.pairs(object)), object); }); test('should be aliased', 1, function() { strictEqual(_.object, _.zipObject); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).shift'); (function() { test('should remove the value at index `0` when length is `0` (test in IE 8 compatibility mode)', 2, function() { if (!isNpm) { var wrapped = _({ '0': 1, 'length': 1 }); wrapped.shift(); deepEqual(wrapped.keys().value(), ['length']); strictEqual(wrapped.first(), undefined); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).splice'); (function() { test('should remove the value at index `0` when length is `0` (test in IE < 9, and in compatibility mode for IE 9)', 2, function() { if (!isNpm) { var wrapped = _({ '0': 1, 'length': 1 }); wrapped.splice(0, 1); deepEqual(wrapped.keys().value(), ['length']); strictEqual(wrapped.first(), undefined); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).toString'); (function() { test('should return the `toString` result of the wrapped value', 1, function() { if (!isNpm) { var wrapped = _([1, 2, 3]); strictEqual(String(wrapped), '1,2,3'); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).valueOf'); (function() { test('should return the `valueOf` result of the wrapped value', 1, function() { if (!isNpm) { var wrapped = _(123); strictEqual(Number(wrapped), 123); } else { skipTest(); } }); test('should stringify the wrapped value when passed to `JSON.stringify`', 1, function() { if (!isNpm && JSON) { var wrapped = _([1, 2, 3]); strictEqual(JSON.stringify(wrapped), '[1,2,3]'); } else { skipTest(); } }); test('should be aliased', 2, function() { if (!isNpm) { var expected = _.prototype.valueOf; strictEqual(_.prototype.toJSON, expected); strictEqual(_.prototype.value, expected); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return existing wrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'push', 'reverse', 'sort', 'unshift' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return the existing wrapped value', 1, function() { if (!isNpm) { strictEqual(wrapped[methodName](), wrapped); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return new wrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'concat', 'slice', 'splice' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return a new wrapped value', 1, function() { if (!isNpm) { ok(wrapped[methodName]() instanceof _); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return unwrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'clone', 'contains', 'every', 'find', 'first', 'has', 'isArguments', 'isArray', 'isBoolean', 'isDate', 'isElement', 'isEmpty', 'isEqual', 'isFinite', 'isFunction', 'isNaN', 'isNull', 'isNumber', 'isObject', 'isPlainObject', 'isRegExp', 'isString', 'isUndefined', 'join', 'last', 'pop', 'shift', 'reduce', 'reduceRight', 'some' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return an unwrapped value', 1, function() { if (!isNpm) { var actual = methodName == 'reduceRight' ? wrapped[methodName](_.identity) : wrapped[methodName](); ok(!(actual instanceof _)); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods capable of returning wrapped and unwrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'first', 'last', 'sample' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` called without an `n` argument should return an unwrapped value', 1, function() { if (!isNpm) { strictEqual(typeof wrapped[methodName](), 'number'); } else { skipTest(); } }); test('`_(...).' + methodName + '` called with an `n` argument should return a wrapped value', 1, function() { if (!isNpm) { ok(wrapped[methodName](1) instanceof _); } else { skipTest(); } }); test('`_.' + methodName + '` should return `undefined` when querying falsey arguments without an `n` argument', 1, function() { if (!isNpm) { var actual = [], expected = _.map(falsey, _.constant()), func = _[methodName]; _.each(falsey, function(value, index) { try { actual.push(index ? func(value) : func()); } catch(e) { } }); deepEqual(actual, expected); } else { skipTest(); } }); test('`_.' + methodName + '` should return an empty array when querying falsey arguments with an `n` argument', 1, function() { if (!isNpm) { var expected = _.map(falsey, _.constant([])), func = _[methodName]; var actual = _.map(falsey, function(value, index) { try { return func(value, 2); } catch(e) { } }); deepEqual(actual, expected); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('"Arrays" category methods'); (function() { var args = arguments, array = [1, 2, 3, 4, 5, 6]; test('should work with `arguments` objects', 23, function() { function message(methodName) { return '`_.' + methodName + '` should work with `arguments` objects'; } deepEqual(_.at(args, 0, 4), [1, 5], message('at')); deepEqual(_.at(array, args), [2, undefined, 4, undefined, 6], '_.at should work with `arguments` objects as secondary arguments'); deepEqual(_.difference(args, [null]), [1, [3], 5], message('difference')); deepEqual(_.difference(array, args), [2, 3, 4, 6], '_.difference should work with `arguments` objects as secondary arguments'); deepEqual(_.union(args, [null, 6]), [1, null, [3], 5, 6], message('union')); deepEqual(_.union(array, args), array.concat([null, [3]]), '_.union should work with `arguments` objects as secondary arguments'); deepEqual(_.compact(args), [1, [3], 5], message('compact')); deepEqual(_.findIndex(args, _.identity), 0, message('findIndex')); deepEqual(_.findLastIndex(args, _.identity), 4, message('findLastIndex')); deepEqual(_.first(args), 1, message('first')); deepEqual(_.flatten(args), [1, null, 3, null, 5], message('flatten')); deepEqual(_.indexOf(args, 5), 4, message('indexOf')); deepEqual(_.initial(args, 4), [1], message('initial')); deepEqual(_.intersection(args, [1]), [1], message('intersection')); deepEqual(_.last(args), 5, message('last')); deepEqual(_.lastIndexOf(args, 1), 0, message('lastIndexOf')); deepEqual(_.rest(args, 4), [5], message('rest')); deepEqual(_.sortedIndex(args, 6), 5, message('sortedIndex')); deepEqual(_.uniq(args), [1, null, [3], 5], message('uniq')); deepEqual(_.without(args, null), [1, [3], 5], message('without')); deepEqual(_.zip(args, args), [[1, 1], [null, null], [[3], [3]], [null, null], [5, 5]], message('zip')); if (_.support.argsClass && _.support.argsObject && !_.support.nonEnumArgs) { _.pull(args, null); deepEqual([args[0], args[1], args[2]], [1, [3], 5], message('pull')); _.remove(args, function(value) { return typeof value == 'number'; }); ok(args.length === 1 && _.isEqual(args[0], [3]), message('remove')); } else { skipTest(2) } }); test('should accept falsey primary arguments', 4, function() { function message(methodName) { return '`_.' + methodName + '` should accept falsey primary arguments'; } deepEqual(_.difference(null, array), array, message('difference')); deepEqual(_.intersection(null, array), array, message('intersection')); deepEqual(_.union(null, array), array, message('union')); deepEqual(_.xor(null, array), array, message('xor')); }); test('should accept falsey secondary arguments', 3, function() { function message(methodName) { return '`_.' + methodName + '` should accept falsey secondary arguments'; } deepEqual(_.difference(array, null), array, message('difference')); deepEqual(_.intersection(array, null), array, message('intersection')); deepEqual(_.union(array, null), array, message('union')); }); }(1, null, [3], null, 5)); /*--------------------------------------------------------------------------*/ /*--------------------------------------------------------------------------*/ QUnit.module('"Strings" category methods'); (function() { var stringMethods = [ 'camelCase', 'capitalize', 'escape', 'escapeRegExp', 'kebabCase', 'pad', 'padLeft', 'padRight', 'repeat', 'snakeCase', 'trim', 'trimLeft', 'trimRight', 'truncate', 'unescape' ]; _.each(stringMethods, function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string', 3, function() { strictEqual(func(null), ''); strictEqual(func(undefined), ''); strictEqual(func(''), ''); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash methods'); (function() { var allMethods = _.reject(_.functions(_), function(methodName) { return /^_/.test(methodName); }); var returnArrays = [ 'at', 'compact', 'difference', 'filter', 'first', 'flatten', 'functions', 'initial', 'intersection', 'invoke', 'last', 'keys', 'map', 'pairs', 'pluck', 'pull', 'pullAt', 'range', 'reject', 'remove', 'rest', 'sample', 'shuffle', 'sortBy', 'times', 'toArray', 'union', 'uniq', 'values', 'where', 'without', 'xor', 'zip' ]; var rejectFalsey = [ 'after', 'bind', 'compose', 'curry', 'debounce', 'defer', 'delay', 'memoize', 'negate', 'once', 'partial', 'partialRight', 'tap', 'throttle', 'wrap' ]; var acceptFalsey = _.difference(allMethods, rejectFalsey); test('should accept falsey arguments', 187, function() { var emptyArrays = _.map(falsey, _.constant([])), isExposed = '_' in root, oldDash = root._; _.each(acceptFalsey, function(methodName) { var expected = emptyArrays, func = _[methodName], pass = true; var actual = _.map(falsey, function(value, index) { try { return index ? func(value) : func(); } catch(e) { pass = false; } }); if (methodName == 'noConflict') { if (isExposed) { root._ = oldDash; } else { delete root._; } } else if (methodName == 'pull') { expected = falsey; } if (_.contains(returnArrays, methodName) && !_.contains(['first', 'last', 'sample'], methodName)) { deepEqual(actual, expected, '_.' + methodName + ' returns an array'); } ok(pass, '`_.' + methodName + '` accepts falsey arguments'); }); // skip tests for missing methods of modularized builds _.each(['noConflict', 'runInContext', 'tap'], function(methodName) { if (!_[methodName]) { skipTest(); } }); }); test('should return an array', 66, function() { var array = [1, 2, 3]; _.each(returnArrays, function(methodName) { var actual, func = _[methodName]; switch (methodName) { case 'invoke': actual = func(array, 'toFixed'); break; case 'first': case 'last': case 'sample': actual = func(array, 1); break; default: actual = func(array); } ok(_.isArray(actual), '_.' + methodName + ' returns an array'); var isPull = methodName == 'pull'; strictEqual(actual === array, isPull, '_.' + methodName + ' should ' + (isPull ? '' : 'not ') + 'return the provided array'); }); }); test('should throw a TypeError for falsey arguments', 15, function() { _.each(rejectFalsey, function(methodName) { var expected = _.map(falsey, _.constant(true)), func = _[methodName]; var actual = _.map(falsey, function(value, index) { var pass = !index && methodName == 'compose'; try { index ? func(value) : func(); } catch(e) { pass = !pass; } return pass; }); deepEqual(actual, expected, '`_.' + methodName + '` rejects falsey arguments'); }); }); test('should handle `null` `thisArg` arguments', 44, function() { var expected = (function() { return this; }).call(null); var funcs = [ 'assign', 'clone', 'cloneDeep', 'countBy', 'dropWhile', 'dropRightWhile', 'every', 'flatten', 'filter', 'find', 'findIndex', 'findKey', 'findLast', 'findLastIndex', 'findLastKey', 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'groupBy', 'isEqual', 'map', 'mapValues', 'max', 'merge', 'min', 'omit', 'partition', 'pick', 'reduce', 'reduceRight', 'reject', 'remove', 'some', 'sortBy', 'sortedIndex', 'takeWhile', 'takeRightWhile', 'tap', 'times', 'transform', 'uniq' ]; _.each(funcs, function(methodName) { var actual, array = ['a'], func = _[methodName], message = '`_.' + methodName + '` handles `null` `thisArg` arguments'; function callback() { actual = this; } if (func) { if (/^reduce/.test(methodName) || methodName == 'transform') { func(array, callback, 0, null); } else if (_.contains(['assign', 'merge'], methodName)) { func(array, array, callback, null); } else if (_.contains(['isEqual', 'sortedIndex'], methodName)) { func(array, 'a', callback, null); } else if (methodName == 'times') { func(1, callback, null); } else { func(array, callback, null); } strictEqual(actual, expected, message); } else { skipTest(); } }); }); test('should not contain minified method names (test production builds)', 1, function() { ok(_.every(_.functions(_), function(methodName) { return methodName.length > 2 || methodName === 'at'; })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.config.asyncRetries = 10; QUnit.config.hidepassed = true; if (!document) { QUnit.config.noglobals = true; QUnit.start(); } }.call(this));
test/test.js
;(function() { /** Used as a safe reference for `undefined` in pre ES5 environments */ var undefined; /** Used as the size to cover large array optimizations */ var largeArraySize = 200; /** Used as the maximum length an array-like object */ var maxSafeInteger = Math.pow(2, 53) - 1; /** Used as a reference to the global object */ var root = (typeof global == 'object' && global) || this; /** Used to store Lo-Dash to test for bad extensions/shims */ var lodashBizarro = root.lodashBizarro; /** Method and object shortcuts */ var phantom = root.phantom, amd = root.define && define.amd, argv = root.process && process.argv, document = !phantom && root.document, body = root.document && root.document.body, create = Object.create, freeze = Object.freeze, JSON = root.JSON, noop = function() {}, params = root.arguments, push = Array.prototype.push, slice = Array.prototype.slice, system = root.system, toString = Object.prototype.toString; /** The file path of the Lo-Dash file to test */ var filePath = (function() { var min = 0, result = []; if (phantom) { result = params = phantom.args; } else if (system) { min = 1; result = params = system.args; } else if (argv) { min = 2; result = params = argv; } else if (params) { result = params; } var last = result[result.length - 1]; result = (result.length > min && !/test(?:\.js)?$/.test(last)) ? last : '../lodash.js'; if (!amd) { try { result = require('fs').realpathSync(result); } catch(e) { } try { result = require.resolve(result); } catch(e) { } } return result; }()); /** The `ui` object */ var ui = root.ui || (root.ui = { 'buildPath': filePath, 'loaderPath': '', 'isModularize': /\b(?:commonjs|(index|main)\.js|lodash-(?:amd|node)|modularize|npm)\b/.test(filePath), 'urlParams': {} }); /** The basename of the Lo-Dash file to test */ var basename = /[\w.-]+$/.exec(filePath)[0]; /** Detect if in a Java environment */ var isJava = !document && !!root.java; /** Used to indicate testing a modularized build */ var isModularize = ui.isModularize; /** Detect if testing `npm` modules */ var isNpm = isModularize && /\bnpm\b/.test([ui.buildPath, ui.urlParams.build]); /** Detects if running in PhantomJS */ var isPhantom = phantom || typeof callPhantom == 'function'; /** Detect if running in Rhino */ var isRhino = isJava && typeof global == 'function' && global().Array === root.Array; /** Used to test Web Workers */ var Worker = !(ui.isForeign || isModularize) && document && root.Worker; /** Used to test host objects in IE */ try { var xml = new ActiveXObject('Microsoft.XMLDOM'); } catch(e) { } /** Use a single "load" function */ var load = (typeof require == 'function' && !amd) ? require : (isJava && root.load) || noop; /** The unit testing framework */ var QUnit = (function() { return root.QUnit || ( root.addEventListener || (root.addEventListener = noop), root.setTimeout || (root.setTimeout = noop), root.QUnit = load('../vendor/qunit/qunit/qunit.js') || root.QUnit, addEventListener === noop && delete root.addEventListener, root.QUnit ); }()); /** Load and install QUnit Extras */ var qa = load('../vendor/qunit-extras/qunit-extras.js'); if (qa) { qa.runInContext(root); } /*--------------------------------------------------------------------------*/ // log params provided to `test.js` if (params) { console.log('test.js invoked with arguments: ' + JSON.stringify(slice.call(params))); } // exit early if going to run tests in a PhantomJS web page if (phantom && isModularize) { var page = require('webpage').create(); page.open(filePath, function(status) { if (status != 'success') { console.log('PhantomJS failed to load page: ' + filePath); phantom.exit(1); } }); page.onCallback = function(details) { var coverage = details.coverage; if (coverage) { var fs = require('fs'), cwd = fs.workingDirectory, sep = fs.separator; fs.write([cwd, 'coverage', 'coverage.json'].join(sep), JSON.stringify(coverage)); } phantom.exit(details.failed ? 1 : 0); }; page.onConsoleMessage = function(message) { console.log(message); }; page.onInitialized = function() { page.evaluate(function() { document.addEventListener('DOMContentLoaded', function() { QUnit.done(function(details) { details.coverage = window.__coverage__; callPhantom(details); }); }); }); }; return; } /*--------------------------------------------------------------------------*/ /** The `lodash` function to test */ var _ = root._ || (root._ = ( _ = load(filePath) || root._, _ = _._ || _, (_.runInContext ? _.runInContext(root) : _) )); /** Used to pass falsey values to methods */ var falsey = [, '', 0, false, NaN, null, undefined]; /** Used to pass empty values to methods */ var empties = [[], {}].concat(falsey.slice(1)); /** Used as the property name for wrapper metadata */ var expando = '__lodash@' + _.VERSION + '__'; /** Used to set property descriptors */ var defineProperty = (function() { try { var o = {}, func = Object.defineProperty, result = func(o, o, o) && func; } catch(e) { } return result; }()); /** Used to check problem JScript properties (a.k.a. the `[[DontEnum]]` bug) */ var shadowedProps = [ 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'toLocaleString', 'toString', 'valueOf' ]; /** Used to check problem JScript properties too */ var shadowedObject = _.invert(shadowedProps); /** Used to check whether methods support typed arrays */ var typedArrays = [ 'Float32Array', 'Int8Array', 'Int16Array', 'Int32Array', 'Uint8Array', 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array' ]; /** Used to check for problems removing whitespace */ var whitespace = ' \t\x0B\f\xA0\ufeff\n\r\u2028\u2029\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000'; /** * Removes all own enumerable properties from a given object. * * @private * @param {Object} object The object to empty. */ function emptyObject(object) { _.forOwn(object, function(value, key, object) { delete object[key]; }); } /** * Sets a non-enumerable property value on `object`. * * Note: This function is used to avoid a bug in older versions of V8 where * overwriting non-enumerable built-ins makes them enumerable. * See https://code.google.com/p/v8/issues/detail?id=1623 * * @private * @param {Object} object The object augment. * @param {string} key The name of the property to set. * @param {*} value The property value. */ function setProperty(object, key, value) { try { defineProperty(object, key, { 'configurable': true, 'enumerable': false, 'writable': true, 'value': value }); } catch(e) { object[key] = value; } } /** * Skips a given number of tests with a passing result. * * @private * @param {number} [count=1] The number of tests to skip. */ function skipTest(count) { count || (count = 1); while (count--) { ok(true, 'test skipped'); } } /*--------------------------------------------------------------------------*/ // setup values for Node.js (function() { if (amd) { return; } try { // add values from a different realm _.extend(_, require('vm').runInNewContext([ '({', "'_arguments': (function() { return arguments; }(1, 2, 3)),", "'_array': [1, 2, 3],", "'_boolean': new Boolean(false),", "'_date': new Date,", "'_errors': [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError],", "'_function': function() {},", "'_nan': NaN,", "'_null': null,", "'_number': new Number(0),", "'_object': { 'a': 1, 'b': 2, 'c': 3 },", "'_regexp': /x/,", "'_string': new String('a'),", "'_undefined': undefined", '})' ].join('\n'))); } catch(e) { return; } // load ES6 Set shim require('./asset/set'); // expose `baseEach` for better code coverage if (isModularize && !isNpm) { var path = require('path'), baseEach = require(path.join(path.dirname(filePath), 'internals', 'baseEach.js')); _._baseEach = baseEach.baseEach || baseEach; } // allow bypassing native checks var _fnToString = Function.prototype.toString; setProperty(Function.prototype, 'toString', function wrapper() { setProperty(Function.prototype, 'toString', _fnToString); var result = this === Set ? this.toString() : _fnToString.call(this); setProperty(Function.prototype, 'toString', wrapper); return result; }); // fake DOM setProperty(global, 'window', {}); setProperty(global.window, 'document', {}); setProperty(global.window.document, 'createDocumentFragment', function() { return { 'nodeType': 11 }; }); // fake `WinRTError` setProperty(global, 'WinRTError', Error); // add extensions Function.prototype._method = function() {}; // set bad shims var _isArray = Array.isArray; setProperty(Array, 'isArray', function() {}); var _now = Date.now; setProperty(Date, 'now', function() {}); var _create = create; setProperty(Object, 'create', function() {}); var _defineProperty = Object.defineProperty; setProperty(Object, 'defineProperty', function() {}); var _getPrototypeOf = Object.getPrototypeOf; setProperty(Object, 'getPrototypeOf', function() {}); var _keys = Object.keys; setProperty(Object, 'keys', function() {}); var _hasOwnProperty = Object.prototype.hasOwnProperty; setProperty(Object.prototype, 'hasOwnProperty', function(key) { if (key == '1' && _.isArguments(this) && _.isEqual(_.values(this), [0, 0])) { throw new Error; } return _hasOwnProperty.call(this, key); }); var _contains = String.prototype.contains; setProperty(String.prototype, 'contains', _contains ? function() {} : Boolean); // clear cache so Lo-Dash can be reloaded emptyObject(require.cache); // load Lo-Dash and expose it to the bad extensions/shims lodashBizarro = (lodashBizarro = require(filePath))._ || lodashBizarro; // restore native methods setProperty(Array, 'isArray', _isArray); setProperty(Date, 'now', _now); setProperty(Object, 'create', _create); setProperty(Object, 'defineProperty', _defineProperty); setProperty(Object, 'getPrototypeOf', _getPrototypeOf); setProperty(Object, 'keys', _keys); setProperty(Object.prototype, 'hasOwnProperty', _hasOwnProperty); setProperty(Function.prototype, 'toString', _fnToString); if (_contains) { setProperty(String.prototype, 'contains', _contains); } else { delete String.prototype.contains; } delete global.window; delete global.WinRTError; delete Function.prototype._method; }()); // add values from an iframe (function() { if (_._object || !document) { return; } var iframe = document.createElement('iframe'); iframe.frameBorder = iframe.height = iframe.width = 0; body.appendChild(iframe); var idoc = (idoc = iframe.contentDocument || iframe.contentWindow).document || idoc; idoc.write([ '<script>', 'parent._._arguments = (function() { return arguments; }(1, 2, 3));', 'parent._._array = [1, 2, 3];', 'parent._._boolean = new Boolean(false);', 'parent._._date = new Date;', "parent._._element = document.createElement('div');", 'parent._._errors = [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError];', 'parent._._function = function() {};', 'parent._._nan = NaN;', 'parent._._null = null;', 'parent._._number = new Number(0);', "parent._._object = { 'a': 1, 'b': 2, 'c': 3 };", 'parent._._regexp = /x/;', "parent._._string = new String('a');", 'parent._._undefined = undefined;', '<\/script>' ].join('\n')); idoc.close(); }()); // add web worker (function() { if (!Worker) { return; } var worker = new Worker('./asset/worker.js?t=' + (+new Date)); worker.addEventListener('message', function(e) { _._VERSION = e.data || ''; }, false); worker.postMessage(ui.buildPath); }()); /*--------------------------------------------------------------------------*/ // explicitly call `QUnit.module()` instead of `module()` // in case we are in a CLI environment QUnit.module(basename); (function() { test('supports loading ' + basename + ' as the "lodash" module', 1, function() { if (amd) { strictEqual((lodashModule || {}).moduleName, 'lodash'); } else { skipTest(); } }); test('supports loading ' + basename + ' with the Require.js "shim" configuration option', 1, function() { if (amd && /requirejs/.test(ui.loaderPath)) { strictEqual((shimmedModule || {}).moduleName, 'shimmed'); } else { skipTest(); } }); test('supports loading ' + basename + ' as the "underscore" module', 1, function() { if (amd) { strictEqual((underscoreModule || {}).moduleName, 'underscore'); } else { skipTest(); } }); asyncTest('supports loading ' + basename + ' in a web worker', 1, function() { if (Worker) { var limit = 15000, start = +new Date; var attempt = function() { var actual = _._VERSION; if ((new Date - start) < limit && typeof actual != 'string') { setTimeout(attempt, 16); return; } strictEqual(actual, _.VERSION); QUnit.start(); }; attempt(); } else { skipTest(); QUnit.start(); } }); test('should not add `Function.prototype` extensions to lodash', 1, function() { if (lodashBizarro) { ok(!('_method' in lodashBizarro)); } else { skipTest(); } }); test('should avoid overwritten native methods', 9, function() { function Foo() {} function message(methodName) { return '`_.' + methodName + '` should avoid overwritten native methods'; } var object = { 'a': 1 }, otherObject = { 'b': 2 }, largeArray = _.times(largeArraySize, _.constant(object)); if (lodashBizarro) { try { var actual = [lodashBizarro.isArray([]), lodashBizarro.isArray({ 'length': 0 })]; } catch(e) { actual = null; } deepEqual(actual, [true, false], message('Array.isArray')); try { actual = lodashBizarro.now(); } catch(e) { actual = null; } ok(typeof actual == 'number', message('Date.now')); try { actual = [lodashBizarro.create(Foo.prototype, object), lodashBizarro.create()]; } catch(e) { actual = null; } ok(actual[0] instanceof Foo, message('Object.create')); deepEqual(actual[1], {}, message('Object.create')); try { actual = lodashBizarro.bind(function() { return this.a; }, object); } catch(e) { actual = null; } ok(!(expando in actual), message('Object.defineProperty')); try { actual = [lodashBizarro.isPlainObject({}), lodashBizarro.isPlainObject([])]; } catch(e) { actual = null; } deepEqual(actual, [true, false], message('Object.getPrototypeOf')); try { actual = [lodashBizarro.keys(object), lodashBizarro.keys()]; } catch(e) { actual = null; } deepEqual(actual, [['a'], []], message('Object.keys')); try { actual = [ lodashBizarro.difference([object, otherObject], largeArray), lodashBizarro.intersection(largeArray, [object]), lodashBizarro.uniq(largeArray) ]; } catch(e) { actual = null; } deepEqual(actual, [[otherObject], [object], [object]], message('Set')); try { actual = lodashBizarro.contains('abc', 'c'); } catch(e) { actual = null; } strictEqual(actual, true, message('String#contains')); } else { skipTest(9); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash constructor'); (function() { test('creates a new instance when called without the `new` operator', 1, function() { ok(_() instanceof _); }); test('should return provided `lodash` instances', 1,function() { var wrapped = _(false); strictEqual(_(wrapped), wrapped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.after'); (function() { function after(n, times) { var count = 0; _.times(times, _.after(n, function() { count++; })); return count; } test('should create a function that executes `func` after `n` calls', 4, function() { strictEqual(after(5, 5), 1, 'after(n) should execute `func` after being called `n` times'); strictEqual(after(5, 4), 0, 'after(n) should not execute `func` unless called `n` times'); strictEqual(after(0, 0), 0, 'after(0) should not execute `func` immediately'); strictEqual(after(0, 1), 1, 'after(0) should execute `func` when called once'); }); test('should coerce non-finite `n` values to `0`', 3, function() { _.each([-Infinity, NaN, Infinity], function(n) { strictEqual(after(n, 1), 1); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.assign'); (function() { test('should assign properties of a source object to the destination object', 1, function() { deepEqual(_.assign({ 'a': 1 }, { 'b': 2 }), { 'a': 1, 'b': 2 }); }); test('should assign own source properties', 1, function() { function Foo() { this.a = 1; this.c = 3; } Foo.prototype.b = 2; deepEqual(_.assign({}, new Foo), { 'a': 1, 'c': 3 }); }); test('should accept multiple source objects', 2, function() { var expected = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.assign({ 'a': 1 }, { 'b': 2 }, { 'c': 3 }), expected); deepEqual(_.assign({ 'a': 1 }, { 'b': 2, 'c': 2 }, { 'c': 3 }), expected); }); test('should overwrite source properties', 1, function() { var expected = { 'a': 3, 'b': 2, 'c': 1 }; deepEqual(_.assign({ 'a': 1, 'b': 2 }, expected), expected); }); test('should assign source properties with `null` and `undefined` values', 1, function() { var expected = { 'a': null, 'b': undefined, 'c': null }; deepEqual(_.assign({ 'a': 1, 'b': 2 }, expected), expected); }); test('should work with a callback', 1, function() { var actual = _.assign({ 'a': 1, 'b': 2 }, { 'a': 3, 'c': 3 }, function(a, b) { return typeof a == 'undefined' ? b : a; }); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); test('should be aliased', 1, function() { strictEqual(_.extend, _.assign); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.at'); (function() { var args = arguments; test('should return `undefined` for nonexistent keys', 1, function() { var actual = _.at(['a', 'b', 'c'], [2, 4, 0]); deepEqual(actual, ['c', undefined, 'a']); }); test('should return an empty array when no keys are provided', 1, function() { deepEqual(_.at(['a', 'b', 'c']), []); }); test('should accept multiple key arguments', 1, function() { var actual = _.at(['a', 'b', 'c', 'd'], 3, 0, 2); deepEqual(actual, ['d', 'a', 'c']); }); test('should work with an `arguments` object for `collection`', 1, function() { var actual = _.at(args, [2, 0]); deepEqual(actual, ['c', 'a']); }); test('should work with an object for `collection`', 1, function() { var actual = _.at({ 'a': 1, 'b': 2, 'c': 3 }, ['c', 'a']); deepEqual(actual, [3, 1]); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { deepEqual(_.at(collection, [2, 0]), ['c', 'a']); }); }); }('a', 'b', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bind'); (function() { function fn() { var args = [this]; push.apply(args, arguments); return args; } test('should bind a function to an object', 1, function() { var object = {}, bound = _.bind(fn, object); deepEqual(bound('a'), [object, 'a']); }); test('should accept a falsey `thisArg` argument', 1, function() { var values = _.reject(falsey.slice(1), function(value) { return value == null; }), expected = _.map(values, function(value) { return [value]; }); var actual = _.map(values, function(value) { try { var bound = _.bind(fn, value); return bound(); } catch(e) { } }); ok(_.every(actual, function(value, index) { return _.isEqual(value, expected[index]); })); }); test('should bind a function to `null` or `undefined`', 6, function() { var bound = _.bind(fn, null), actual = bound('a'); ok(actual[0] === null || actual[0] && actual[0].Array); strictEqual(actual[1], 'a'); _.times(2, function(index) { bound = index ? _.bind(fn, undefined) : _.bind(fn); actual = bound('b'); ok(actual[0] === undefined || actual[0] && actual[0].Array); strictEqual(actual[1], 'b'); }); }); test('should partially apply arguments ', 4, function() { var object = {}, bound = _.bind(fn, object, 'a'); deepEqual(bound(), [object, 'a']); bound = _.bind(fn, object, 'a'); deepEqual(bound('b'), [object, 'a', 'b']); bound = _.bind(fn, object, 'a', 'b'); deepEqual(bound(), [object, 'a', 'b']); deepEqual(bound('c', 'd'), [object, 'a', 'b', 'c', 'd']); }); test('should support placeholders', 4, function() { if (!isModularize) { var object = {}, bound = _.bind(fn, object, _, 'b', _); deepEqual(bound('a', 'c'), [object, 'a', 'b', 'c']); deepEqual(bound('a'), [object, 'a', 'b', undefined]); deepEqual(bound('a', 'c', 'd'), [object, 'a', 'b', 'c', 'd']); deepEqual(bound(), [object, undefined, 'b', undefined]); } else { skipTest(4); } }); test('should create a function with a `length` of `0`', 2, function() { var fn = function(a, b, c) {}, bound = _.bind(fn, {}); strictEqual(bound.length, 0); bound = _.bind(fn, {}, 1); strictEqual(bound.length, 0); }); test('should ignore binding when called with the `new` operator', 3, function() { function Foo() { return this; } var bound = _.bind(Foo, { 'a': 1 }), newBound = new bound; strictEqual(newBound.a, undefined); strictEqual(bound().a, 1); ok(newBound instanceof Foo); }); test('ensure `new bound` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var bound = _.bind(Foo), object = {}; ok(new bound instanceof Foo); strictEqual(new bound(true), object); }); test('should append array arguments to partially applied arguments (test in IE < 9)', 1, function() { var object = {}, bound = _.bind(fn, object, 'a'); deepEqual(bound(['b'], 'c'), [object, 'a', ['b'], 'c']); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var object = {}, bound = _(fn).bind({}, 'a', 'b'); ok(bound instanceof _); var actual = bound.value()('c'); deepEqual(actual, [object, 'a', 'b', 'c']); } else { skipTest(2); } }); test('should rebind functions correctly', 3, function() { var object1 = {}, object2 = {}, object3 = {}; var bound1 = _.bind(fn, object1), bound2 = _.bind(bound1, object2, 'a'), bound3 = _.bind(bound1, object3, 'b'); deepEqual(bound1(), [object1]); deepEqual(bound2(), [object1, 'a']); deepEqual(bound3(), [object1, 'b']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bindAll'); (function() { var args = arguments; test('should bind all methods of `object`', 1, function() { function Foo() { this._a = 1; this._b = 2; this.a = function() { return this._a; }; } Foo.prototype.b = function() { return this._b; }; var object = new Foo; _.bindAll(object); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2]); }); test('should accept individual method names', 1, function() { var object = { '_a': 1, '_b': 2, '_c': 3, 'a': function() { return this._a; }, 'b': function() { return this._b; }, 'c': function() { return this._c; } }; _.bindAll(object, 'a', 'b'); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2, undefined]); }); test('should accept arrays of method names', 1, function() { var object = { '_a': 1, '_b': 2, '_c': 3, '_d': 4, 'a': function() { return this._a; }, 'b': function() { return this._b; }, 'c': function() { return this._c; }, 'd': function() { return this._d; } }; _.bindAll(object, ['a', 'b'], ['c']); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1, 2, 3, undefined]); }); test('should work with an array `object` argument', 1, function() { var array = ['push', 'pop']; _.bindAll(array); strictEqual(array.pop, Array.prototype.pop); }); test('should work with `arguments` objects as secondary arguments', 1, function() { var object = { '_a': 1, 'a': function() { return this._a; } }; _.bindAll(object, args); var actual = _.map(_.functions(object), function(methodName) { return object[methodName].call({}); }); deepEqual(actual, [1]); }); }('a')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.bindKey'); (function() { test('should work when the target function is overwritten', 2, function() { var object = { 'name': 'fred', 'greet': function(greeting) { return this.name + ' says: ' + greeting; } }; var bound = _.bindKey(object, 'greet', 'hi'); strictEqual(bound(), 'fred says: hi'); object.greet = function(greeting) { return this.name + ' says: ' + greeting + '!'; }; strictEqual(bound(), 'fred says: hi!'); }); test('should support placeholders', 4, function() { var object = { 'fn': function fn(a, b, c, d) { return slice.call(arguments); } }; if (!isModularize) { var bound = _.bindKey(object, 'fn', _, 'b', _); deepEqual(bound('a', 'c'), ['a', 'b', 'c']); deepEqual(bound('a'), ['a', 'b', undefined]); deepEqual(bound('a', 'c', 'd'), ['a', 'b', 'c', 'd']); deepEqual(bound(), [undefined, 'b', undefined]); } else { skipTest(4); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('case methods'); _.each(['camel', 'kebab', 'snake'], function(caseName) { var methodName = caseName + 'Case', func = _[methodName]; var expected = (function() { switch (caseName) { case 'camel': return 'helloWorld'; case 'kebab': return 'hello-world'; case 'snake': return 'hello_world'; } }()); var burredLetters = [ '\xC0', '\xC1', '\xC2', '\xC3', '\xC4', '\xC5', '\xC6', '\xC7', '\xC8', '\xC9', '\xCA', '\xCB', '\xCC', '\xCD', '\xCE', '\xCF', '\xD0', '\xD1', '\xD2', '\xD3', '\xD4', '\xD5', '\xD6', '\xD7', '\xD8', '\xD9', '\xDA', '\xDB', '\xDC', '\xDD', '\xDE', '\xDF', '\xE0', '\xE1', '\xE2', '\xE3', '\xE4', '\xE5', '\xE6', '\xE7', '\xE8', '\xE9', '\xEA', '\xEB', '\xEC', '\xED', '\xEE', '\xEF', '\xF0', '\xF1', '\xF2', '\xF3', '\xF4', '\xF5', '\xF6', '\xF7', '\xF8', '\xF9', '\xFA', '\xFB', '\xFC', '\xFD', '\xFE', '\xFF' ]; var deburredLetters = [ 'A', 'A', 'A', 'A', 'A', 'A', 'AE', 'C', 'E', 'E', 'E', 'E', 'I', 'I', 'I', 'I', 'D', 'N', 'O', 'O', 'O', 'O', 'O', '', 'O', 'U', 'U', 'U', 'U', 'Y', 'Th', 'ss', 'a', 'a', 'a', 'a', 'a', 'a', 'ae', 'c', 'e', 'e', 'e', 'e', 'i', 'i', 'i', 'i', 'd', 'n', 'o', 'o', 'o', 'o', 'o', '', 'o', 'u', 'u', 'u', 'u', 'y', 'th', 'y' ]; test('`_.' + methodName + '` should convert `string` to ' + caseName + ' case', 4, function() { _.each(['Hello world', 'helloWorld', '--hello-world', '__hello_world__'], function(string) { strictEqual(func(string), expected); }); }); test('`_.' + methodName + '` should handle double-converting strings', 4, function() { _.each(['Hello world', 'helloWorld', '--hello-world', '__hello_world__'], function(string) { strictEqual(func(func(string)), expected); }); }); test('`_.' + methodName + '` should deburr letters', 1, function() { var actual = _.map(burredLetters, function(burred, index) { var isCamel = caseName == 'camel', deburrLetter = deburredLetters[index]; var string = isCamel ? func('z' + burred) : func(burred); var deburredString = isCamel ? 'z' + deburrLetter : deburrLetter.toLowerCase(); return string == deburredString; }); ok(_.every(actual, _.identity)); }); test('`_.' + methodName + '` should coerce `string` to a string', 2, function() { var string = 'Hello world'; strictEqual(func(Object(string)), expected); strictEqual(func({ 'toString': _.constant(string) }), expected); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _('hello world')[methodName](); strictEqual(actual, expected); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.camelCase'); (function() { test('should work with numbers', 3, function() { strictEqual(_.camelCase('too legit 2 quit'), 'tooLegit2Quit'); strictEqual(_.camelCase('walk 500 miles'), 'walk500Miles'); strictEqual(_.camelCase('xhr2 request'), 'xhr2Request'); }); test('should handle acronyms', 3, function() { strictEqual(_.camelCase('safe HTML'), 'safeHTML'); strictEqual(_.camelCase('escape HTML entities'), 'escapeHTMLEntities'); strictEqual(_.camelCase('XMLHttpRequest'), 'xmlHttpRequest'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.capitalize'); (function() { test('should capitalize the first character of a string', 3, function() { strictEqual(_.capitalize('fred'), 'Fred'); strictEqual(_.capitalize('Fred'), 'Fred'); strictEqual(_.capitalize(' fred'), ' fred'); }); test('should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _('fred').capitalize(); strictEqual(actual, 'Fred'); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.chain'); (function() { test('should return a wrapped value', 1, function() { if (!isNpm) { var actual = _.chain({ 'a': 0 }); ok(actual instanceof _); } else { skipTest(); } }); test('should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _({ 'a': 0 }); strictEqual(wrapper.chain(), wrapper); } else { skipTest(); } }); test('should enable chaining of methods that return unwrapped values by default', 6, function() { if (!isNpm) { var array = ['c', 'b', 'a']; ok(_.chain(array).first() instanceof _); ok(_(array).chain().first() instanceof _); ok(_.chain(array).isArray() instanceof _); ok(_(array).chain().isArray() instanceof _); ok(_.chain(array).sortBy().first() instanceof _); ok(_(array).chain().sortBy().first() instanceof _); } else { skipTest(6); } }); test('should chain multiple methods', 6, function() { if (!isNpm) { _.times(2, function(index) { var array = ['one two three four', 'five six seven eight', 'nine ten eleven twelve'], expected = { ' ': 9, 'e': 14, 'f': 2, 'g': 1, 'h': 2, 'i': 4, 'l': 2, 'n': 6, 'o': 3, 'r': 2, 's': 2, 't': 5, 'u': 1, 'v': 4, 'w': 2, 'x': 1 }, wrapper = index ? _(array).chain() : _.chain(array); var actual = wrapper .chain() .map(function(value) { return value.split(''); }) .flatten() .reduce(function(object, chr) { object[chr] || (object[chr] = 0); object[chr]++; return object; }, {}) .value(); deepEqual(actual, expected); array = [1, 2, 3, 4, 5, 6]; wrapper = index ? _(array).chain() : _.chain(array); actual = wrapper .chain() .filter(function(n) { return n % 2; }) .reject(function(n) { return n % 3 == 0; }) .sortBy(function(n) { return -n; }) .value(); deepEqual(actual, [5, 1]); array = [3, 4]; wrapper = index ? _(array).chain() : _.chain(array); actual = wrapper .reverse() .concat([2, 1]) .unshift(5) .tap(function(value) { value.pop(); }) .map(function(n) { return n * n; }) .value(); deepEqual(actual,[25, 16, 9, 4]); }); } else { skipTest(6); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('cloning'); (function() { function Klass() { this.a = 1; } Klass.prototype = { 'b': 1 }; var nonCloneable = { 'a DOM element': body, 'a function': Klass }; var objects = { 'an `arguments` object': arguments, 'an array': ['a', 'b', 'c', ''], 'an array-like-object': { '0': 'a', '1': 'b', '2': 'c', '3': '', 'length': 5 }, 'boolean': false, 'boolean object': Object(false), 'an Error object': new Error('text'), 'an EvalError object': new EvalError('text'), 'a RangeError object': new RangeError('text'), 'a ReferenceError object': new ReferenceError('text'), 'a SyntaxError object': new SyntaxError('text'), 'a TypeError object': new TypeError('text'), 'a URIError object': new URIError('text'), 'a Klass instance': new Klass, 'an object': { 'a': 0, 'b': 1, 'c': 3 }, 'an object with object values': { 'a': /a/, 'b': ['B'], 'c': { 'C': 1 } }, 'an object from another document': _._object || {}, 'null': null, 'a number': 3, 'a number object': Object(3), 'a regexp': /a/gim, 'a string': 'a', 'a string object': Object('a'), 'undefined': undefined }; objects['an array'].length = 5; test('`_.clone` should shallow clone by default', 2, function() { var expected = [{ 'a': 0 }, { 'b': 1 }], actual = _.clone(expected); deepEqual(actual, expected); ok(actual !== expected && actual[0] === expected[0]); }); test('`_.cloneDeep` should deep clone objects with circular references', 1, function() { var object = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { } }; object.foo.b.foo.c = object; object.bar.b = object.foo.b; var clone = _.cloneDeep(object); ok(clone.bar.b === clone.foo.b && clone === clone.foo.b.foo.c && clone !== object); }); _.each(['clone', 'cloneDeep'], function(methodName) { var func = _[methodName], isDeep = methodName == 'cloneDeep', klass = new Klass; _.forOwn(objects, function(object, key) { test('`_.' + methodName + '` should clone ' + key, 2, function() { var clone = func(object); ok(_.isEqual(object, clone)); if (_.isObject(object)) { notStrictEqual(clone, object); } else { strictEqual(clone, object); } }); }); _.forOwn(nonCloneable, function(object, key) { test('`_.' + methodName + '` should not clone ' + key, 1, function() { strictEqual(func(object), object); }); }); _.each(typedArrays, function(type) { test('`_.' + methodName + '` should clone ' + type + ' arrays', 2, function() { var Ctor = root[type] || Array, buffer = Ctor == Array ? 4 : new ArrayBuffer(4), array = new Ctor(buffer), actual = func(array); deepEqual(actual, array); notStrictEqual(actual, array); }); }); test('`_.' + methodName + '` should clone problem JScript properties (test in IE < 9)', 2, function() { var actual = func(shadowedObject); deepEqual(actual, shadowedObject); notStrictEqual(actual, shadowedObject); }); test('`_.' + methodName + '` should perform a ' + (isDeep ? 'deep' : 'shallow') + ' clone when used as a callback for `_.map`', 2, function() { var expected = [{ 'a': [0] }, { 'b': [1] }], actual = _.map(expected, func); deepEqual(actual, expected); if (isDeep) { ok(actual[0] !== expected[0] && actual[0].a !== expected[0].a && actual[1].b !== expected[1].b); } else { ok(actual[0] !== expected[0] && actual[0].a === expected[0].a && actual[1].b === expected[1].b); } }); test('`_.' + methodName + '` should pass the correct `callback` arguments', 1, function() { var argsList = []; func(klass, function() { argsList.push(slice.call(arguments)); }); deepEqual(argsList, isDeep ? [[klass], [1, 'a']] : [[klass]]); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func('a', function(value) { return this[value]; }, { 'a': 'A' }); strictEqual(actual, 'A'); }); test('`_.' + methodName + '` should handle cloning if `callback` returns `undefined`', 1, function() { var actual = func({ 'a': { 'b': 'c' } }, _.noop); deepEqual(actual, { 'a': { 'b': 'c' } }); }); test('`_.' + methodName + '` should clone `index` and `input` array properties', 2, function() { var array = /x/.exec('vwxyz'), actual = func(array); strictEqual(actual.index, 2); strictEqual(actual.input, 'vwxyz'); }); test('`_.' + methodName + '` should clone `lastIndex` regexp property', 1, function() { // avoid a regexp literal for older Opera and use `exec` for older Safari var regexp = RegExp('x', 'g'); regexp.exec('vwxyz'); var actual = func(regexp); strictEqual(actual.lastIndex, 3); }); test('`_.' + methodName + '` should not error on DOM elements', 1, function() { if (document) { var element = document.createElement('div'); try { strictEqual(func(element), element); } catch(e) { ok(false); } } else { skipTest(); } }); test('`_.' + methodName + '` should return a unwrapped value when chaining', 2, function() { if (!isNpm) { var object = objects['an object'], actual = _(object)[methodName](); deepEqual(actual, object); notStrictEqual(actual, object); } else { skipTest(2); } }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.compact'); (function() { test('should filter falsey values', 1, function() { var array = ['0', '1', '2']; deepEqual(_.compact(falsey.concat(array)), array); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(falsey).compact(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.compose'); (function() { test('should create a function that is the composition of the provided functions', 1, function() { var realNameMap = { 'pebbles': 'penelope' }; var format = function(name) { name = realNameMap[name.toLowerCase()] || name; return name.charAt(0).toUpperCase() + name.slice(1).toLowerCase(); }; var greet = function(formatted) { return 'Hiya ' + formatted + '!'; }; var welcome = _.compose(greet, format); strictEqual(welcome('pebbles'), 'Hiya Penelope!'); }); test('should return a new function', 1, function() { notStrictEqual(_.compose(_.noop), _.noop); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(_.noop).compose(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.constant'); (function() { test('should create a function that always returns `value`', 1, function() { var object = { 'a': 1 }, values = falsey.concat(null, null, 1, 'a'), constant = _.constant(object), expected = _.map(values, function() { return true; }); var actual = _.map(values, function(value, index) { if (index == 0) { var result = constant(); } else if (index == 1) { result = constant.call({}); } else { result = constant(value); } return result === object; }); deepEqual(actual, expected); }); test('should work with falsey values', 1, function() { var expected = _.map(falsey, function() { return true; }); var actual = _.map(falsey, function(value, index) { var constant = index ? _.constant(value) : _.constant(); return constant() === value || _.isNaN(value); }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(true).constant(); ok(actual instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.contains'); (function() { _.each({ 'an `arguments` object': arguments, 'an array': [1, 2, 3, 4], 'an object': { 'a': 1, 'b': 2, 'c': 3, 'd': 4 }, 'a string': '1234' }, function(collection, key) { var values = _.toArray(collection); test('should work with ' + key + ' and return `true` for matched values', 1, function() { strictEqual(_.contains(collection, 3), true); }); test('should work with ' + key + ' and return `false` for unmatched values', 1, function() { strictEqual(_.contains(collection, 5), false); }); test('should work with ' + key + ' and a positive `fromIndex`', 2, function() { strictEqual(_.contains(collection, values[2], 2), true); strictEqual(_.contains(collection, values[1], 2), false); }); test('should work with ' + key + ' and a `fromIndex` >= `collection.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.contains(collection, 1, fromIndex), false); strictEqual(_.contains(collection, undefined, fromIndex), false); strictEqual(_.contains(collection, '', fromIndex), false); }); }); test('should work with ' + key + ' and treat falsey `fromIndex` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(fromIndex) { return _.contains(collection, values[0], fromIndex); }); deepEqual(actual, expected); }); test('should work with ' + key + ' and treat non-number `fromIndex` values as `0`', 1, function() { strictEqual(_.contains(collection, values[0], '1'), true); }); test('should work with ' + key + ' and a negative `fromIndex`', 2, function() { strictEqual(_.contains(collection, values[2], -2), true); strictEqual(_.contains(collection, values[1], -2), false); }); test('should work with ' + key + ' and a negative `fromIndex` <= negative `collection.length`', 3, function() { _.each([-4, -6, -Infinity], function(fromIndex) { strictEqual(_.contains(collection, values[0], fromIndex), true); }); }); test('should work with ' + key + ' and return an unwrapped value when chaining', 1, function() { if (!isNpm) { strictEqual(_(collection).contains(3), true); } else { skipTest(); } }); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 2, function() { strictEqual(_.contains(collection, 'bc'), true); strictEqual(_.contains(collection, 'd'), false); }); }); test('should not be possible to perform a binary search', 1, function() { strictEqual(_.contains([3, 2, 1], 3, true), true); }); test('should be aliased', 1, function() { strictEqual(_.include, _.contains); }); }(1, 2, 3, 4)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.countBy'); (function() { var array = [4.2, 6.1, 6.4]; test('should work with a callback', 1, function() { var actual = _.countBy(array, function(num) { return Math.floor(num); }, Math); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.countBy([4, 6, 6]); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.countBy(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [4.2, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.countBy(array, function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.countBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, 1); deepEqual(actual.hasOwnProperty, 2); }); test('should work with a string for `callback`', 1, function() { var actual = _.countBy(['one', 'two', 'three'], 'length'); deepEqual(actual, { '3': 2, '5': 1 }); }); test('should work with an object for `collection`', 1, function() { var actual = _.countBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': 1, '6': 2 }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.countBy(array, 0), { '1': 1, '2': 2 }); deepEqual(_.countBy(array, 1), { 'a': 2, 'b': 1 }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.create'); (function() { test('should create an object that inherits from the given `prototype` object', 3, function() { function Shape() { this.x = 0; this.y = 0; } function Circle() { Shape.call(this); } Circle.prototype = _.create(Shape.prototype); Circle.prototype.constructor = Circle; var actual = new Circle; ok(actual instanceof Circle); ok(actual instanceof Shape); notStrictEqual(Circle.prototype, Shape.prototype); }); test('should assign `properties` to the created object', 3, function() { function Shape() { this.x = 0; this.y = 0; } function Circle() { Shape.call(this); } var expected = { 'constructor': Circle, 'radius': 0 }; Circle.prototype = _.create(Shape.prototype, expected); var actual = new Circle; ok(actual instanceof Circle); ok(actual instanceof Shape); deepEqual(Circle.prototype, expected); }); test('should accept a falsey `prototype` argument', 1, function() { var expected = _.map(falsey, function() { return {}; }); var actual = _.map(falsey, function(value, index) { return index ? _.create(value) : _.create(); }); deepEqual(actual, expected); }); test('should ignore primitive `prototype` arguments and use an empty object instead', 1, function() { var primitives = [true, null, 1, 'a', undefined], expected = _.map(primitives, _.constant(true)); var actual = _.map(primitives, function(value, index) { return _.isPlainObject(index ? _.create(value) : _.create()); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.callback'); (function() { test('should create a callback with a falsey `thisArg`', 1, function() { var values = _.map(falsey, function(value) { return Object(value == null ? root : value); }); var actual = _.map(values, function(value) { var callback = _.callback(function() { return this; }, value); return callback(); }); deepEqual(actual, values); }); test('should return `_.identity` when `func` is nullish', 2, function() { var object = {}; _.each([null, undefined], function(value) { var callback = _.callback(value); strictEqual(callback(object), object); }); }); test('should not error when `func` is nullish and a `thisArg` is provided', 2, function() { var object = {}; _.each([null, undefined], function(value) { try { var callback = _.callback(value, {}); strictEqual(callback(object), object); } catch(e) { ok(false); } }); }); test('should return a callback created by `_.matches` when `func` is an object', 2, function() { var callback = _.callback({ 'a': 1 }); strictEqual(callback({ 'a': 1, 'b': 2 }), true); strictEqual(callback({}), false); }); test('should return a callback created by `_.property` when `func` is a number or string', 2, function() { var array = ['a'], callback = _.callback(0); strictEqual(callback(array), 'a'); callback = _.callback('0'); strictEqual(callback(array), 'a'); }); test('should work without an `argCount`', 1, function() { var args, expected = ['a', 'b', 'c', 'd', 'e']; var callback = _.callback(function() { args = slice.call(arguments); }); callback.apply(null, expected); deepEqual(args, expected); }); test('should work with functions created by `_.partial` and `_.partialRight`', 2, function() { function fn() { var result = [this.a]; push.apply(result, arguments); return result; } var expected = [1, 2, 3], object = { 'a': 1 }, callback = _.callback(_.partial(fn, 2), object); deepEqual(callback(3), expected); callback = _.callback(_.partialRight(fn, 3), object); deepEqual(callback(2), expected); }); test('should return the function provided if already bound with `Function#bind`', 1, function() { function a() {} var object = {}, bound = a.bind && a.bind(object); if (bound && !('prototype' in bound)) { var bound = a.bind(object); strictEqual(_.callback(bound, object), bound); } else { skipTest(); } }); test('should return the function provided when there is no `this` reference', 2, function() { function a() {} function b() { return this.b; } var object = {}; if (_.support.funcDecomp) { strictEqual(_.callback(a, object), a); notStrictEqual(_.callback(b, object), b); } else { skipTest(2); } }); test('should only write metadata to named functions', 3, function() { function a() {}; var b = function() {}; function c() {}; var object = {}; if (defineProperty && _.support.funcDecomp) { _.callback(a, object); ok(expando in a); _.callback(b, object); ok(!(expando in b)); if (_.support.funcNames) { _.support.funcNames = false; _.callback(c, object); ok(expando in c); _.support.funcNames = true; } else { skipTest(); } } else { skipTest(3); } }); test('should not write metadata when `_.support.funcDecomp` is `false`', 1, function() { function a() {}; if (defineProperty && lodashBizarro) { lodashBizarro.callback(a, {}); ok(!(expando in a)); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.curry'); (function() { function fn(a, b, c, d) { return slice.call(arguments); } test('should curry based on the number of arguments provided', 3, function() { var curried = _.curry(fn), expected = [1, 2, 3, 4]; deepEqual(curried(1)(2)(3)(4), expected); deepEqual(curried(1, 2)(3, 4), expected); deepEqual(curried(1, 2, 3, 4), expected); }); test('should work with partialed methods', 2, function() { var curried = _.curry(fn), expected = [1, 2, 3, 4]; var a = _.partial(curried, 1), b = _.bind(a, null, 2), c = _.partialRight(b, 4), d = _.partialRight(b(3), 4); deepEqual(c(3), expected); deepEqual(d(), expected); }); test('should support placeholders', 4, function() { if (!isModularize) { var curried = _.curry(fn); deepEqual(curried(1)(_, 3)(_, 4)(2), [1, 2, 3, 4]); deepEqual(curried(_, 2)(1)(_, 4)(3), [1, 2, 3, 4]); deepEqual(curried(_, _, 3)(_, 2)(_, 4)(1), [1, 2, 3, 4]); deepEqual(curried(_, _, _, 4)(_, _, 3)(_, 2)(1), [1, 2, 3, 4]); } else { skipTest(4); } }); test('should return a function with a `length` of `0`', 6, function() { _.times(2, function(index) { var curried = index ? _.curry(fn, 4) : _.curry(fn); strictEqual(curried.length, 0); strictEqual(curried(1).length, 0); strictEqual(curried(1, 2).length, 0); }); }); test('ensure `new curried` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var curried = _.curry(Foo), object = {}; ok(new curried(false) instanceof Foo); strictEqual(new curried(true), object); }); test('should not alter the `this` binding', 9, function() { function fn(a, b, c) { var value = this || {}; return [value[a], value[b], value[c]]; } var object = { 'a': 1, 'b': 2, 'c': 3 }, expected = [1, 2, 3]; deepEqual(_.curry(_.bind(fn, object), 3)('a')('b')('c'), expected); deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b')('c'), expected); deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b', 'c'), expected); deepEqual(_.bind(_.curry(fn), object)('a')('b')('c'), Array(3)); deepEqual(_.bind(_.curry(fn), object)('a', 'b')('c'), Array(3)); deepEqual(_.bind(_.curry(fn), object)('a', 'b', 'c'), expected); object.curried = _.curry(fn); deepEqual(object.curried('a')('b')('c'), Array(3)); deepEqual(object.curried('a', 'b')('c'), Array(3)); deepEqual(object.curried('a', 'b', 'c'), expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.debounce'); (function() { asyncTest('should debounce a function', 2, function() { if (!(isRhino && isModularize)) { var count = 0, debounced = _.debounce(function() { count++; }, 32); debounced(); debounced(); debounced(); strictEqual(count, 0); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 96); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent debounced calls return the last `func` result', 2, function() { if (!(isRhino && isModularize)) { var debounced = _.debounce(_.identity, 32); debounced('x'); setTimeout(function() { notEqual(debounced('y'), 'y'); }, 64); setTimeout(function() { notEqual(debounced('z'), 'z'); QUnit.start(); }, 128); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent "immediate" debounced calls return the last `func` result', 2, function() { if (!(isRhino && isModularize)) { var debounced = _.debounce(_.identity, 32, true), result = [debounced('x'), debounced('y')]; deepEqual(result, ['x', 'x']); setTimeout(function() { var result = [debounced('a'), debounced('b')]; deepEqual(result, ['a', 'a']); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('should apply default options correctly', 2, function() { if (!(isRhino && isModularize)) { var count = 0; var debounced = _.debounce(function(value) { count++; return value; }, 32, {}); strictEqual(debounced('x'), undefined); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('should support a `leading` option', 7, function() { if (!(isRhino && isModularize)) { var withLeading, counts = [0, 0, 0]; _.each([true, { 'leading': true }], function(options, index) { var debounced = _.debounce(function(value) { counts[index]++; return value; }, 32, options); if (index == 1) { withLeading = debounced; } strictEqual(debounced('x'), 'x'); }); _.each([false, { 'leading': false }], function(options) { var withoutLeading = _.debounce(_.identity, 32, options); strictEqual(withoutLeading('x'), undefined); }); var withLeadingAndTrailing = _.debounce(function() { counts[2]++; }, 32, { 'leading': true }); withLeadingAndTrailing(); withLeadingAndTrailing(); strictEqual(counts[2], 1); setTimeout(function() { deepEqual(counts, [1, 1, 2]); withLeading('x'); strictEqual(counts[1], 2); QUnit.start(); }, 64); } else { skipTest(7); QUnit.start(); } }); asyncTest('should support a `trailing` option', 4, function() { if (!(isRhino && isModularize)) { var withCount = 0, withoutCount = 0; var withTrailing = _.debounce(function(value) { withCount++; return value; }, 32, { 'trailing': true }); var withoutTrailing = _.debounce(function(value) { withoutCount++; return value; }, 32, { 'trailing': false }); strictEqual(withTrailing('x'), undefined); strictEqual(withoutTrailing('x'), undefined); setTimeout(function() { strictEqual(withCount, 1); strictEqual(withoutCount, 0); QUnit.start(); }, 64); } else { skipTest(4); QUnit.start(); } }); test('should support a `maxWait` option', 2, function() { if (!(isRhino && isModularize)) { var limit = (argv || isPhantom) ? 1000 : 320, withCount = 0, withoutCount = 0; var withMaxWait = _.debounce(function() { withCount++; }, 64, { 'maxWait': 128 }); var withoutMaxWait = _.debounce(function() { withoutCount++; }, 96); var start = +new Date; while ((new Date - start) < limit) { withMaxWait(); withoutMaxWait(); } ok(withCount > 0); ok(!withoutCount); } else { skipTest(2); } }); asyncTest('should cancel `maxDelayed` when `delayed` is executed', 1, function() { if (!(isRhino && isModularize)) { var count = 0; var debounced = _.debounce(function() { count++; }, 32, { 'maxWait': 64 }); debounced(); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should execute the `trailing` call with the correct arguments and `this` binding', 2, function() { if (!(isRhino && isModularize)) { var args, count = 0, object = {}; var debounced = _.debounce(function(value) { args = [this]; push.apply(args, arguments); return ++count != 2; }, 32, { 'leading': true, 'maxWait': 64 }); while (true) { if (!debounced.call(object, 'a')) { break; } } setTimeout(function() { strictEqual(count, 2); deepEqual(args, [object, 'a']); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.defaults'); (function() { test('should assign properties of a source object if missing on the destination object', 1, function() { deepEqual(_.defaults({ 'a': 1 }, { 'a': 2, 'b': 2 }), { 'a': 1, 'b': 2 }); }); test('should assign own source properties', 1, function() { function Foo() { this.a = 1; this.c = 3; } Foo.prototype.b = 2; deepEqual(_.defaults({ 'c': 2 }, new Foo), { 'a': 1, 'c': 2 }); }); test('should accept multiple source objects', 2, function() { var expected = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.defaults({ 'a': 1, 'b': 2 }, { 'b': 3 }, { 'c': 3 }), expected); deepEqual(_.defaults({ 'a': 1, 'b': 2 }, { 'b': 3, 'c': 3 }, { 'c': 2 }), expected); }); test('should not overwrite `null` values', 1, function() { var actual = _.defaults({ 'a': null }, { 'a': 1 }); strictEqual(actual.a, null); }); test('should overwrite `undefined` values', 1, function() { var actual = _.defaults({ 'a': undefined }, { 'a': 1 }); strictEqual(actual.a, 1); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.defer'); (function() { asyncTest('should defer `func` execution', 1, function() { if (!(isRhino && isModularize)) { var pass = false; _.defer(function(){ pass = true; }); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should accept additional arguments', 1, function() { if (!(isRhino && isModularize)) { var args; _.defer(function() { args = slice.call(arguments); }, 1, 2, 3); setTimeout(function() { deepEqual(args, [1, 2, 3]); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should be cancelable', 1, function() { if (!(isRhino && isModularize)) { var pass = true; var timerId = _.defer(function() { pass = false; }); clearTimeout(timerId); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.delay'); (function() { asyncTest('should delay `func` execution', 2, function() { if (!(isRhino && isModularize)) { var pass = false; _.delay(function(){ pass = true; }, 96); setTimeout(function() { ok(!pass); }, 32); setTimeout(function() { ok(pass); QUnit.start(); }, 160); } else { skipTest(2); QUnit.start(); } }); asyncTest('should accept additional arguments', 1, function() { if (!(isRhino && isModularize)) { var args; _.delay(function() { args = slice.call(arguments); }, 32, 1, 2, 3); setTimeout(function() { deepEqual(args, [1, 2, 3]); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); asyncTest('should be cancelable', 1, function() { if (!(isRhino && isModularize)) { var pass = true; var timerId = _.delay(function() { pass = false; }, 32); clearTimeout(timerId); setTimeout(function() { ok(pass); QUnit.start(); }, 128); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.difference'); (function() { var args = arguments; test('should return the difference of the given arrays', 2, function() { var actual = _.difference([1, 2, 3, 4, 5], [5, 2, 10]); deepEqual(actual, [1, 3, 4]); actual = _.difference([1, 2, 3, 4, 5], [5, 2, 10], [8, 4]); deepEqual(actual, [1, 3]); }); test('should work with large arrays', 1, function() { var array1 = _.range(largeArraySize + 1), array2 = _.range(largeArraySize), a = {}, b = {}, c = {}; array1.push(a, b, c); array2.push(b, c, a); deepEqual(_.difference(array1, array2), [largeArraySize]); }); test('should work with large arrays of objects', 1, function() { var object1 = {}, object2 = {}, largeArray = _.times(largeArraySize, _.constant(object1)); deepEqual(_.difference([object1, object2], largeArray), [object2]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0, 1, null, 3]; deepEqual(_.difference(array, 3, null, { '0': 1 }), array); deepEqual(_.difference(null, array, null, [2, 1]), [0, null, 3]); deepEqual(_.difference(null, array, null, args), [0, null]); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.endsWith'); (function() { var string = 'abc'; test('should return `true` if a string ends with `target`', 1, function() { strictEqual(_.endsWith(string, 'c'), true); }); test('should return `false` if a string does not end with `target`', 1, function() { strictEqual(_.endsWith(string, 'b'), false); }); test('should work with a `position` argument', 1, function() { strictEqual(_.endsWith(string, 'b', 2), true); }); test('should work with `position` >= `string.length`', 4, function() { _.each([3, 5, maxSafeInteger, Infinity], function(position) { strictEqual(_.endsWith(string, 'c', position), true); }); }); test('should treat falsey `position` values, except `undefined`, as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(position) { return _.endsWith(string, position === undefined ? 'c' : '', position); }); deepEqual(actual, expected); }); test('should treat a negative `position` as `0`', 6, function() { _.each([-1, -3, -Infinity], function(position) { ok(_.every(string, function(chr) { return _.endsWith(string, chr, position) === false; })); strictEqual(_.endsWith(string, '', position), true); }); }); test('should always return `true` when `target` is an empty string regardless of `position`', 1, function() { ok(_.every([-Infinity, NaN, -3, -1, 0, 1, 2, 3, 5, maxSafeInteger, Infinity], function(position) { return _.endsWith(string, '', position, true); })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.escape'); (function() { var escaped = '&amp;&lt;&gt;&quot;&#39;&#96;\/', unescaped = '&<>"\'`\/'; test('should escape values', 1, function() { strictEqual(_.escape(unescaped), escaped); }); test('should not escape the "/" character', 1, function() { strictEqual(_.escape('/'), '/'); }); test('should handle strings with nothing to escape', 1, function() { strictEqual(_.escape('abc'), 'abc'); }); test('should escape the same characters unescaped by `_.unescape`', 1, function() { strictEqual(_.escape(_.unescape(escaped)), escaped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.escapeRegExp'); (function() { test('should escape values', 1, function() { var escaped = '\\.\\*\\+\\?\\^\\$\\{\\}\\(\\)\\|\\[\\]\\/\\\\', unescaped = '.*+?^${}()|[\]\/\\'; strictEqual(_.escapeRegExp(unescaped), escaped); }); test('should handle strings with nothing to escape', 1, function() { strictEqual(_.escapeRegExp('abc'), 'abc'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.every'); (function() { test('should return `true` for empty or falsey collections', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { try { return _.every(value, _.identity); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `true` if the callback returns truthy for all elements in the collection', 1, function() { strictEqual(_.every([true, 1, 'x'], _.identity), true); }); test('should return `false` as soon as the callback result is falsey', 1, function() { strictEqual(_.every([true, null, true], _.identity), false); }); test('should work with collections of `undefined` values (test in IE < 9)', 1, function() { strictEqual(_.every([undefined, undefined, undefined], _.identity), false); }); test('should use `_.identity` when no callback is provided', 2, function() { strictEqual(_.every([0]), false); strictEqual(_.every([1]), true); }); test('should be aliased', 1, function() { strictEqual(_.all, _.every); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('source property checks'); _.each(['assign', 'defaults', 'merge'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should not assign inherited `source` properties', 1, function() { function Foo() {} Foo.prototype = { 'a': 1 }; deepEqual(func({}, new Foo), {}); }); test('should work when used as a callback for `_.reduce`', 1, function() { var array = [{ 'a': 1 }, { 'b': 2 }, { 'c': 3 }], actual = _.reduce(array, _.merge); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); if (methodName == 'merge') { test('`_.' + methodName + '` should treat sparse arrays as dense', 2, function() { var array = Array(3); array[0] = 1; array[2] = 3; var actual = func([], array), expected = array.slice(); expected[1] = undefined; ok('1' in actual); deepEqual(actual, expected); }); } }); /*--------------------------------------------------------------------------*/ QUnit.module('strict mode checks'); _.each(['assign', 'bindAll', 'defaults'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should not throw strict mode errors', 1, function() { var object = { 'a': null, 'b': function(){} }, pass = true; if (freeze) { freeze(object); try { if (methodName == 'bindAll') { func(object); } else { func(object, { 'a': 1 }); } } catch(e) { pass = false; } ok(pass); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.filter'); (function() { test('should return elements the `callback` returns truthy for', 1, function() { var actual = _.filter([1, 2, 3], function(num) { return num % 2; }); deepEqual(actual, [1, 3]); }); test('should not modify wrapped values', 2, function() { if (!isNpm) { var wrapped = _([1, 2, 3, 4]); var actual = wrapped.filter(function(num) { return num < 3; }); deepEqual(actual.value(), [1, 2]); actual = wrapped.filter(function(num) { return num > 2; }); deepEqual(actual.value(), [3, 4]); } else { skipTest(2); } }); test('should be aliased', 1, function() { strictEqual(_.select, _.filter); }); }()); /*--------------------------------------------------------------------------*/ _.each(['find', 'findLast', 'findIndex', 'findLastIndex', 'findKey', 'findLastKey'], function(methodName) { QUnit.module('lodash.' + methodName); var func = _[methodName]; (function() { var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; var expected = ({ 'find': [objects[1], undefined, objects[2], objects[1]], 'findLast': [objects[2], undefined, objects[2], objects[2]], 'findIndex': [1, -1, 2, 1], 'findLastIndex': [2, -1, 2, 2], 'findKey': ['1', undefined, '2', '1'], 'findLastKey': ['2', undefined, '2', '2'] })[methodName]; test('should return the correct value', 1, function() { strictEqual(func(objects, function(object) { return object.a; }), expected[0]); }); test('should work with a `thisArg`', 1, function() { strictEqual(func(objects, function(object, index) { return this[index].a; }, objects), expected[0]); }); test('should return `' + expected[1] + '` if value is not found', 1, function() { strictEqual(func(objects, function(object) { return object.a === 3; }), expected[1]); }); test('should work with an object for `callback`', 1, function() { strictEqual(func(objects, { 'b': 2 }), expected[2]); }); test('should work with a string for `callback`', 1, function() { strictEqual(func(objects, 'b'), expected[3]); }); test('should return `' + expected[1] + '` for empty or falsey collections', 1, function() { var actual = [], emptyValues = /Index/.test(methodName) ? _.reject(empties, _.isPlainObject) : empties, expecting = _.map(emptyValues, function() { return expected[1]; }); _.each(emptyValues, function(value) { try { actual.push(func(value, { 'a': 3 })); } catch(e) { } }); deepEqual(actual, expecting); }); }()); (function() { var expected = ({ 'find': 1, 'findLast': 2, 'findKey': 'a', 'findLastKey': 'b' })[methodName]; if (expected != null) { test('should work with an object for `collection`', 1, function() { var actual = func({ 'a': 1, 'b': 2, 'c': 3 }, function(num) { return num < 3; }); strictEqual(actual, expected); }); } }()); (function() { var expected = ({ 'find': 'a', 'findLast': 'b', 'findIndex': 0, 'findLastIndex': 1 })[methodName]; if (expected != null) { test('should work with a string for `collection`', 1, function() { var actual = func('abc', function(chr, index) { return index < 2; }); strictEqual(actual, expected); }); } if (methodName == 'find') { test('should be aliased', 1, function() { strictEqual(_.detect, func); }); } }()); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.findWhere'); (function() { var objects = [ { 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }, { 'a': 3 } ]; test('should filter by `source` properties', 6, function() { strictEqual(_.findWhere(objects, { 'a': 1 }), objects[0]); strictEqual(_.findWhere(objects, { 'a': 2 }), objects[3]); strictEqual(_.findWhere(objects, { 'a': 3 }), objects[4]); strictEqual(_.findWhere(objects, { 'b': 1 }), undefined); strictEqual(_.findWhere(objects, { 'b': 2 }), objects[2]); strictEqual(_.findWhere(objects, { 'a': 1, 'b': 2 }), objects[2]); }); test('should work with a function for `source`', 1, function() { function source() {} source.a = 2; strictEqual(_.findWhere(objects, source), objects[3]); }); test('should match all elements when provided an empty `source`', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { return _.findWhere(objects, value) === objects[0]; }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.first'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 2, 'b': 2 }, { 'a': 1, 'b': 1 }, { 'a': 0, 'b': 0 } ]; test('should return the first element', 1, function() { strictEqual(_.first(array), 1); }); test('should return the first two elements', 1, function() { deepEqual(_.first(array, 2), [1, 2]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 1 : []; }); var actual = _.map(falsey, function(n) { return _.first(array, n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.first(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.first(array, n), array); }); }); test('should return `undefined` when querying empty arrays', 1, function() { strictEqual(_.first([]), undefined); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.first); deepEqual(actual, [1, 4, 7]); }); test('should work with a callback', 1, function() { var actual = _.first(array, function(num) { return num < 3; }); deepEqual(actual, [1, 2]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.first(array, function() { args = slice.call(arguments); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.first(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [1, 2]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.first(objects, { 'b': 2 }), objects.slice(0, 1)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.first(objects, 'b'), objects.slice(0, 2)); }); test('should chain when passing `n`, `callback`, or `thisArg`', 3, function() { if (!isNpm) { var actual = _(array).first(2); ok(actual instanceof _); actual = _(array).first(function(num) { return num < 3; }); ok(actual instanceof _); actual = _(array).first(function(num, index) { return this[index] < 3; }, array); ok(actual instanceof _); } else { skipTest(3); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).first(); strictEqual(actual, 1); } else { skipTest(); } }); test('should be aliased', 2, function() { strictEqual(_.head, _.first); strictEqual(_.take, _.first); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.flatten'); (function() { var args = arguments, array = [{ 'a': [1, [2]] }, { 'a': [3] }]; test('should flatten `arguments` objects', 1, function() { var actual = _.flatten([args, args]); deepEqual(actual, [1, 2, 3, 1, 2, 3]); }); test('should work with a callback', 1, function() { var actual = _.flatten(array, function(object) { return object.a; }); deepEqual(actual, [1, 2, 3]); }); test('should work with `isShallow` and `callback`', 1, function() { var actual = _.flatten(array, true, function(object) { return object.a; }); deepEqual(actual, [1, [2], 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.flatten(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [array[0], 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.flatten(array, function(object, index) { return this[index].a; }, array); deepEqual(actual, [1, 2, 3]); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.flatten(array, 'a'), [1, 2, 3]); }); test('should perform a deep flatten when used as a callback for `_.map`', 1, function() { var array = [[[['a']]], [[['b']]]], actual = _.map(array, _.flatten); deepEqual(actual, [['a'], ['b']]); }); test('should treat sparse arrays as dense', 4, function() { var array = [[1, 2, 3], Array(3)], expected = [1, 2, 3], actual1 = _.flatten(array), actual2 = _.flatten(array, true); expected.push(undefined, undefined, undefined); deepEqual(actual1, expected); ok('4' in actual1); deepEqual(actual2, expected); ok('4' in actual2); }); test('should work with extremely large arrays', 1, function() { // test in modern browsers if (freeze) { try { var expected = Array(5e5), actual = _.flatten([expected]); deepEqual(actual, expected) } catch(e) { ok(false); } } else { skipTest(); } }); test('should work with empty arrays', 1, function() { var actual = _.flatten([[], [[]], [[], [[[]]]]]); deepEqual(actual, []); }); test('should flatten nested arrays', 1, function() { var array = [1, [2], [3, [[4]]]], expected = [1, 2, 3, 4]; deepEqual(_.flatten(array), expected); }); test('should support shallow flattening nested arrays', 1, function() { var array = [1, [2], [3, [4]]], expected = [1, 2, 3, [4]]; deepEqual(_.flatten(array, true), expected); }); test('should support shallow flattening arrays of other arrays', 1, function() { var array = [[1], [2], [3], [[4]]], expected = [1, 2, 3, [4]]; deepEqual(_.flatten(array, true), expected); }); test('should return an empty array for non array-like objects', 1, function() { var actual = _.flatten({ 'a': 1 }, _.identity); deepEqual(actual, []); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('forEach methods'); _.each(['forEach', 'forEachRight'], function(methodName) { var func = _[methodName], isForEach = methodName == 'forEach'; _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('`_.' + methodName + '` should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args, values = []; func(collection, function(value) { args || (args = slice.call(arguments)); values.push(value); }); if (isForEach) { deepEqual(args, ['a', 0, collection]); deepEqual(values, ['a', 'b', 'c']); } else { deepEqual(args, ['c', 2, collection]); deepEqual(values, ['c', 'b', 'a']); } }); }); test('`_.' + methodName + '` should be aliased', 1, function() { if (isForEach) { strictEqual(_.each, _.forEach); } else { strictEqual(_.eachRight, _.forEachRight); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('forIn methods'); _.each(['forIn', 'forInRight'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` iterates over inherited properties', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var keys = []; func(new Foo, function(value, key) { keys.push(key); }); deepEqual(keys.sort(), ['a', 'b']); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('forOwn methods'); _.each(['forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; test('iterates over the `length` property', 1, function() { var object = { '0': 'zero', '1': 'one', 'length': 2 }, props = []; func(object, function(value, prop) { props.push(prop); }); deepEqual(props.sort(), ['0', '1', 'length']); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('iteration methods'); (function() { var methods = [ 'countBy', 'every', 'filter', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'groupBy', 'indexBy', 'map', 'max', 'min', 'partition', 'reject', 'some' ]; var boolMethods = [ 'every', 'some' ]; var collectionMethods = [ 'countBy', 'every', 'filter', 'find', 'findLast', 'forEach', 'forEachRight', 'groupBy', 'indexBy', 'map', 'max', 'min', 'partition', 'reduce', 'reduceRight', 'reject', 'some' ]; var forInMethods = [ 'forIn', 'forInRight' ]; var iterationMethods = [ 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight' ] var objectMethods = [ 'forIn', 'forInRight', 'forOwn', 'forOwnRight' ]; var rightMethods = [ 'forEachRight', 'forInRight', 'forOwnRight' ]; _.each(methods, function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should pass the correct `callback` arguments', 1, function() { var args, expected = [1, 0, array]; func(array, function() { args || (args = slice.call(arguments)); }); if (_.contains(rightMethods, methodName)) { expected[0] = 3; expected[1] = 2; } if (_.contains(objectMethods, methodName)) { expected[1] += ''; } deepEqual(args, expected); }); test('`_.' + methodName + '` should support the `thisArg` argument', 2, function() { var actual; function callback(num, index) { actual = this[index]; } func([1], callback, [2]); strictEqual(actual, 2); func({ 'a': 1 }, callback, { 'a': 2 }); strictEqual(actual, 2); }); }); _.each(_.difference(methods, boolMethods), function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should return a wrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(array)[methodName](_.noop); ok(actual instanceof _); } else { skipTest(); } }); }); _.each(_.difference(methods, forInMethods), function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` iterates over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var keys = []; func(new Foo, function(value, key) { keys.push(key); }); deepEqual(keys, ['a']); }); }); _.each(iterationMethods, function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should return the collection', 1, function() { strictEqual(func(array, Boolean), array); }); test('`_.' + methodName + '` should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _(array); strictEqual(wrapper[methodName](_.noop), wrapper); } else { skipTest(); } }); }); _.each(collectionMethods, function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should treat objects with lengths of `0` as array-like', 1, function() { var pass = true; func({ 'length': 0 }, function() { pass = false; }, 0); ok(pass); }); test('`_.' + methodName + '` should not treat objects with negative lengths as array-like', 1, function() { var pass = false; func({ 'length': -1 }, function() { pass = true; }, 0); ok(pass); }); test('`_.' + methodName + '` should not treat objects with non-number lengths as array-like', 1, function() { var pass = false; func({ 'length': '0' }, function() { pass = true; }, 0); ok(pass); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('collection iteration bugs'); _.each(['forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { var props = []; func(shadowedObject, function(value, prop) { props.push(prop); }); deepEqual(props.sort(), shadowedProps); }); test('`_.' + methodName + '` does not iterate over non-enumerable properties (test in IE < 9)', 10, function() { _.forOwn({ 'Array': Array.prototype, 'Boolean': Boolean.prototype, 'Date': Date.prototype, 'Error': Error.prototype, 'Function': Function.prototype, 'Object': Object.prototype, 'Number': Number.prototype, 'TypeError': TypeError.prototype, 'RegExp': RegExp.prototype, 'String': String.prototype }, function(proto, key) { var message = 'non-enumerable properties on ' + key + '.prototype', props = []; func(proto, function(value, prop) { props.push(prop); }); if (/Error/.test(key)) { ok(_.every(['constructor', 'toString'], function(prop) { return !_.contains(props, prop); }), message); } else { deepEqual(props, [], message); } }); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.prototype.a = 1; var props = []; function callback(value, prop) { props.push(prop); } func(Foo, callback); deepEqual(props, []); props.length = 0; Foo.prototype = { 'a': 1 }; func(Foo, callback); deepEqual(props, []); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('object assignments'); _.each(['assign', 'defaults', 'merge'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should return `undefined` when no destination object is provided', 1, function() { strictEqual(func(), undefined); }); test('`_.' + methodName + '` should assign problem JScript properties (test in IE < 9)', 1, function() { var object = { 'constructor': '0', 'hasOwnProperty': '1', 'isPrototypeOf': '2', 'propertyIsEnumerable': undefined, 'toLocaleString': undefined, 'toString': undefined, 'valueOf': undefined }; var source = { 'propertyIsEnumerable': '3', 'toLocaleString': '4', 'toString': '5', 'valueOf': '6' }; deepEqual(func(object, source), shadowedObject); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.a = 1; Foo.b = 2; Foo.prototype.c = 3; var expected = { 'a': 1, 'b': 2 }; deepEqual(func({}, Foo), expected); Foo.prototype = { 'c': 3 }; deepEqual(func({}, Foo), expected); }); test('`_.' + methodName + '` should work with `_.reduce`', 1, function() { var array = [{ 'b': 2 }, { 'c': 3 }]; deepEqual(_.reduce(array, func, { 'a': 1}), { 'a': 1, 'b': 2, 'c': 3 }); }); test('`_.' + methodName + '` should not error on nullish sources (test in IE < 9)', 1, function() { try { deepEqual(func({ 'a': 1 }, undefined, { 'b': 2 }, null), { 'a': 1, 'b': 2 }); } catch(e) { ok(false); } }); test('`_.' + methodName + '` should not error when `object` is nullish and source objects are provided', 1, function() { var expected = _.times(2, _.constant(true)); var actual = _.map([null, undefined], function(value) { try { return _.isEqual(func(value, { 'a': 1 }), value); } catch(e) { return false; } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should return the existing wrapper when chaining', 1, function() { if (!isNpm) { var wrapper = _({ 'a': 1 }); strictEqual(wrapper[methodName]({ 'b': 2 }), wrapper); } else { skipTest(); } }); }); _.each(['assign', 'merge'], function(methodName) { var func = _[methodName], isMerge = methodName == 'merge'; test('`_.' + methodName + '` should pass the correct `callback` arguments', 3, function() { var args, object = { 'a': 1 }, source = { 'a': 2 }; func(object, source, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 2, 'a', object, source], 'primitive property values'); args = null; object = { 'a': 1 }; source = { 'b': 2 }; func(object, source, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [undefined, 2, 'b', object, source], 'missing destination property'); var argsList = [], objectValue = [1, 2], sourceValue = { 'b': 2 }; object = { 'a': objectValue }; source = { 'a': sourceValue }; func(object, source, function() { argsList.push(slice.call(arguments)); }); var expected = [[objectValue, sourceValue, 'a', object, source]]; if (isMerge) { expected.push([undefined, 2, 'b', sourceValue, sourceValue]); } deepEqual(argsList, expected, 'non-primitive property values'); }); test('`_.' + methodName + '`should support the `thisArg` argument', 1, function() { var actual = func({}, { 'a': 0 }, function(a, b) { return this[b]; }, [2]); deepEqual(actual, { 'a': 2 }); }); test('`_.' + methodName + '` should not treat the second argument as a callback', 2, function() { function callback() {} callback.b = 2; var actual = func({ 'a': 1 }, callback); deepEqual(actual, { 'a': 1, 'b': 2 }); actual = func({ 'a': 1 }, callback, { 'c': 3 }); deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 }); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('exit early'); _.each(['_baseEach', 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight'], function(methodName) { var func = _[methodName]; if (!func) { return; } test('`_.' + methodName + '` can exit early when iterating arrays', 1, function() { var array = [1, 2, 3], values = []; func(array, function(value) { values.push(value); return false; }); deepEqual(values, [/Right/.test(methodName) ? 3 : 1]); }); test('`_.' + methodName + '` can exit early when iterating objects', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, values = []; func(object, function(value) { values.push(value); return false; }); strictEqual(values.length, 1); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('`__proto__` property bugs'); (function() { test('internal data objects should work with the `__proto__` key', 4, function() { var stringLiteral = '__proto__', stringObject = Object(stringLiteral), expected = [stringLiteral, stringObject]; var largeArray = _.times(largeArraySize, function(count) { return count % 2 ? stringObject : stringLiteral; }); deepEqual(_.difference(largeArray, largeArray), []); deepEqual(_.intersection(largeArray, largeArray), expected); deepEqual(_.uniq(largeArray), expected); deepEqual(_.without.apply(_, [largeArray].concat(largeArray)), []); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.functions'); (function() { test('should return the function names of an object', 1, function() { var object = { 'a': 'a', 'b': _.identity, 'c': /x/, 'd': _.each }; deepEqual(_.functions(object), ['b', 'd']); }); test('should include inherited functions', 1, function() { function Foo() { this.a = _.identity; this.b = 'b' } Foo.prototype.c = _.noop; deepEqual(_.functions(new Foo), ['a', 'c']); }); test('should be aliased', 1, function() { strictEqual(_.methods, _.functions); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.groupBy'); (function() { var array = [4.2, 6.1, 6.4]; test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.groupBy([4, 6, 6]); deepEqual(actual, { '4': [4], '6': [6, 6] }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.groupBy(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [4.2, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.groupBy(array, function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': [4.2], '6': [6.1, 6.4] }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, [4.2]); deepEqual(actual.hasOwnProperty, [6.1, 6.4]); }); test('should work with an object for `collection`', 1, function() { var actual = _.groupBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': [4.2], '6': [6.1, 6.4] }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.groupBy(array, 0), { '1': [[1 , 'a']], '2': [[2, 'a'], [2, 'b']] }); deepEqual(_.groupBy(array, 1), { 'a': [[1 , 'a'], [2, 'a']], 'b': [[2, 'b']] }); }); test('should work with a string for `callback`', 1, function() { var actual = _.groupBy(['one', 'two', 'three'], 'length'); deepEqual(actual, { '3': ['one', 'two'], '5': ['three'] }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.has'); (function() { test('should check for own properties', 2, function() { var object = { 'a': 1 }; strictEqual(_.has(object, 'a'), true); strictEqual(_.has(object, 'b'), false); }); test('should not use the `hasOwnProperty` method of the object', 1, function() { var object = { 'hasOwnProperty': null, 'a': 1 }; strictEqual(_.has(object, 'a'), true); }); test('should not check for inherited properties', 1, function() { function Foo() {} Foo.prototype.a = 1; strictEqual(_.has(new Foo, 'a'), false); }); test('should work with functions', 1, function() { function Foo() {} strictEqual(_.has(Foo, 'prototype'), true); }); test('should return `false` for primitives', 1, function() { var values = falsey.concat(1, 'a'), expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value) { return _.has(value, 'valueOf'); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.identity'); (function() { test('should return the first argument provided', 1, function() { var object = { 'name': 'fred' }; strictEqual(_.identity(object), object); }); }()) /*--------------------------------------------------------------------------*/ QUnit.module('lodash.indexBy'); (function() { test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.indexBy([4, 6, 6]); deepEqual(actual, { '4': 4, '6': 6 }); }); test('should support the `thisArg` argument', 1, function() { var actual = _.indexBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math); deepEqual(actual, { '4': 4.2, '6': 6.4 }); }); test('should only add values to own, not inherited, properties', 2, function() { var actual = _.indexBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num) > 4 ? 'hasOwnProperty' : 'constructor'; }); deepEqual(actual.constructor, 4.2); deepEqual(actual.hasOwnProperty, 6.4); }); test('should work with an object for `collection`', 1, function() { var actual = _.indexBy({ 'a': 4.2, 'b': 6.1, 'c': 6.4 }, function(num) { return Math.floor(num); }); deepEqual(actual, { '4': 4.2, '6': 6.4 }); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 'a'], [2, 'a'], [2, 'b'] ]; deepEqual(_.indexBy(array, 0), { '1': [1 , 'a'], '2': [2, 'b'] }); deepEqual(_.indexBy(array, 1), { 'a': [2, 'a'], 'b': [2, 'b'] }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.indexOf'); (function() { var array = [1, 2, 3, 1, 2, 3]; test('should return the index of the first matched value', 1, function() { strictEqual(_.indexOf(array, 3), 2); }); test('should return `-1` for an unmatched value', 4, function() { strictEqual(_.indexOf(array, 4), -1); strictEqual(_.indexOf(array, 4, true), -1); var empty = []; strictEqual(_.indexOf(empty, undefined), -1); strictEqual(_.indexOf(empty, undefined, true), -1); }); test('should work with a positive `fromIndex`', 1, function() { strictEqual(_.indexOf(array, 1, 2), 3); }); test('should work with `fromIndex` >= `array.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.indexOf(array, 1, fromIndex), -1); strictEqual(_.indexOf(array, undefined, fromIndex), -1); strictEqual(_.indexOf(array, '', fromIndex), -1); }); }); test('should treat falsey `fromIndex` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(0)); var actual = _.map(falsey, function(fromIndex) { return _.indexOf(array, 1, fromIndex); }); deepEqual(actual, expected); }); test('should treat non-number `fromIndex` values as `0`', 1, function() { strictEqual(_.indexOf([1, 2, 3], 1, '1'), 0); }); test('should work with a negative `fromIndex`', 1, function() { strictEqual(_.indexOf(array, 2, -3), 4); }); test('should work with a negative `fromIndex` <= `-array.length`', 3, function() { _.each([-6, -8, -Infinity], function(fromIndex) { strictEqual(_.indexOf(array, 1, fromIndex), 0); }); }); test('should work with `isSorted`', 1, function() { strictEqual(_.indexOf([1, 2, 3], 1, true), 0); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('custom `_.indexOf` methods'); (function() { function Foo() {} function custom(array, value, fromIndex) { var index = (fromIndex || 0) - 1, length = array.length; while (++index < length) { var other = array[index]; if (other === value || (value instanceof Foo && other instanceof Foo)) { return index; } } return -1; } var array = [1, new Foo, 3, new Foo], indexOf = _.indexOf; var largeArray = _.times(largeArraySize, function() { return new Foo; }); test('`_.contains` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; ok(_.contains(array, new Foo)); ok(_.contains({ 'a': 1, 'b': new Foo, 'c': 3 }, new Foo)); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.difference` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.difference(array, [new Foo]), [1, 3]); deepEqual(_.difference(array, largeArray), [1, 3]); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.intersection` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.intersection(array, [new Foo]), [array[1]]); deepEqual(_.intersection(largeArray, [new Foo]), [array[1]]); _.indexOf = indexOf; } else { skipTest(2); } }); test('`_.uniq` should work with a custom `_.indexOf` method', 2, function() { if (!isModularize) { _.indexOf = custom; deepEqual(_.uniq(array), array.slice(0, 3)); deepEqual(_.uniq(largeArray), [largeArray[0]]); _.indexOf = indexOf; } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.initial'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.initial(value) : _.initial(); } catch(e) { } }); deepEqual(actual, expected); }); test('should exclude last element', 1, function() { deepEqual(_.initial(array), [1, 2]); }); test('should exclude the last two elements', 1, function() { deepEqual(_.initial(array, 2), [1]); }); test('should return an empty when querying empty arrays', 1, function() { deepEqual(_.initial([]), []); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? [1, 2] : array; }); var actual = _.map(falsey, function(n) { return _.initial(array, n); }); deepEqual(actual, expected); }); test('should return all elements when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.initial(array, n), array); }); }); test('should return an empty array when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.initial(array, n), []); }); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.initial); deepEqual(actual, [[1, 2], [4, 5], [7, 8]]); }); test('should work with a callback', 1, function() { var actual = _.initial(array, function(num) { return num > 1; }); deepEqual(actual, [1]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.initial(array, function() { args = slice.call(arguments); }); deepEqual(args, [3, 2, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.initial(array, function(num, index) { return this[index] > 1; }, array); deepEqual(actual, [1]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.initial(objects, { 'b': 2 }), objects.slice(0, 2)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.initial(objects, 'b'), objects.slice(0, 1)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.intersection'); (function() { var args = arguments; test('should return the intersection of the given arrays', 1, function() { var actual = _.intersection([1, 3, 2], [5, 2, 1, 4], [2, 1]); deepEqual(actual, [1, 2]); }); test('should return an array of unique values', 2, function() { var array = [1, 1, 3, 2, 2]; deepEqual(_.intersection(array, [5, 2, 2, 1, 4], [2, 1, 1]), [1, 2]); deepEqual(_.intersection(array), [1, 3, 2]); }); test('should work with large arrays of objects', 1, function() { var object = {}, largeArray = _.times(largeArraySize, _.constant(object)); deepEqual(_.intersection([object], largeArray), [object]); }); test('should work with large arrays of objects', 2, function() { var object = {}, largeArray = _.times(largeArraySize, _.constant(object)); deepEqual(_.intersection([object], largeArray), [object]); deepEqual(_.intersection(_.range(largeArraySize), null, [1]), [1]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0, 1, null, 3]; deepEqual(_.intersection(array, 3, null, { '0': 1 }), array); deepEqual(_.intersection(null, array, null, [2, 1]), [1]); deepEqual(_.intersection(null, array, null, args), [1, 3]); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _([1, 3, 2]).intersection([5, 2, 1, 4]); ok(actual instanceof _); deepEqual(actual.value(), [1, 2]); } else { skipTest(2); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.invert'); (function() { test('should invert an object', 2, function() { var object = { 'a': 1, 'b': 2 }, actual = _.invert(object); deepEqual(actual, { '1': 'a', '2': 'b' }); deepEqual(_.invert(actual), { 'a': '1', 'b': '2' }); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.invert(object), { 'a': '0', 'b': '1', '2': 'length' }); }); test('should accept a `multiValue` flag', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 1 }; deepEqual(_.invert(object, true), { '1': ['a', 'c'], '2': ['b'] }); }); test('should only add multiple values to own, not inherited, properties', 2, function() { var object = { 'a': 'hasOwnProperty', 'b': 'constructor' }; deepEqual(_.invert(object), { 'hasOwnProperty': 'a', 'constructor': 'b' }); ok(_.isEqual(_.invert(object, true), { 'hasOwnProperty': ['a'], 'constructor': ['b'] })); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var object = { 'a': 1, 'b': 2 }, actual = _(object).invert(); ok(actual instanceof _); deepEqual(actual.value(), { '1': 'a', '2': 'b' }); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.invoke'); (function() { test('should invoke a methods on each element of a collection', 1, function() { var array = ['a', 'b', 'c']; deepEqual( _.invoke(array, 'toUpperCase'), ['A', 'B', 'C']); }); test('should work with a function `methodName` argument', 1, function() { var actual = _.invoke(['a', 'b', 'c'], function() { return this.toUpperCase(); }); deepEqual(actual, ['A', 'B', 'C']); }); test('should work with an object for `collection`', 1, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.invoke(object, 'toFixed', 1), ['1.0', '2.0', '3.0']); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.invoke(1), []); }); test('should work with nullish elements', 1, function() { var array = ['a', null, undefined, 'd']; deepEqual(_.invoke(array, 'toUpperCase'), ['A', undefined, undefined, 'D']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isArguments'); (function() { var args = arguments; test('should return `true` for `arguments` objects', 1, function() { strictEqual(_.isArguments(args), true); }); test('should return `false` for non `arguments` objects', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isArguments(value) : _.isArguments(); }); strictEqual(_.isArguments([1, 2, 3]), false); strictEqual(_.isArguments(true), false); strictEqual(_.isArguments(new Date), false); strictEqual(_.isArguments(new Error), false); strictEqual(_.isArguments(_), false); strictEqual(_.isArguments({ '0': 1, 'callee': _.noop, 'length': 1 }), false); strictEqual(_.isArguments(1), false); strictEqual(_.isArguments(/x/), false); strictEqual(_.isArguments('a'), false); deepEqual(actual, expected); }); test('should work with `arguments` objects from another realm', 1, function() { if (_._object) { strictEqual(_.isArguments(_._arguments), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isArray'); (function() { var args = arguments; test('should return `true` for arrays', 1, function() { strictEqual(_.isArray([1, 2, 3]), true); }); test('should return `false` for non arrays', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isArray(value) : _.isArray(); }); strictEqual(_.isArray(args), false); strictEqual(_.isArray(true), false); strictEqual(_.isArray(new Date), false); strictEqual(_.isArray(new Error), false); strictEqual(_.isArray(_), false); strictEqual(_.isArray({ '0': 1, 'length': 1 }), false); strictEqual(_.isArray(1), false); strictEqual(_.isArray(/x/), false); strictEqual(_.isArray('a'), false); deepEqual(actual, expected); }); test('should work with arrays from another realm', 1, function() { if (_._object) { strictEqual(_.isArray(_._array), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isBoolean'); (function() { var args = arguments; test('should return `true` for booleans', 4, function() { strictEqual(_.isBoolean(true), true); strictEqual(_.isBoolean(false), true); strictEqual(_.isBoolean(new Boolean(true)), true); strictEqual(_.isBoolean(new Boolean(false)), true); }); test('should return `false` for non booleans', 10, function() { var expected = _.map(falsey, function(value) { return value === false; }); var actual = _.map(falsey, function(value, index) { return index ? _.isBoolean(value) : _.isBoolean(); }); strictEqual(_.isBoolean(args), false); strictEqual(_.isBoolean([1, 2, 3]), false); strictEqual(_.isBoolean(new Date), false); strictEqual(_.isBoolean(new Error), false); strictEqual(_.isBoolean(_), false); strictEqual(_.isBoolean({ 'a': 1 }), false); strictEqual(_.isBoolean(1), false); strictEqual(_.isBoolean(/x/), false); strictEqual(_.isBoolean('a'), false); deepEqual(actual, expected); }); test('should work with booleans from another realm', 1, function() { if (_._object) { strictEqual(_.isBoolean(_._boolean), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isDate'); (function() { var args = arguments; test('should return `true` for dates', 1, function() { strictEqual(_.isDate(new Date), true); }); test('should return `false` for non dates', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isDate(value) : _.isDate(); }); strictEqual(_.isDate(args), false); strictEqual(_.isDate([1, 2, 3]), false); strictEqual(_.isDate(true), false); strictEqual(_.isDate(new Error), false); strictEqual(_.isDate(_), false); strictEqual(_.isDate({ 'a': 1 }), false); strictEqual(_.isDate(1), false); strictEqual(_.isDate(/x/), false); strictEqual(_.isDate('a'), false); deepEqual(actual, expected); }); test('should work with dates from another realm', 1, function() { if (_._object) { strictEqual(_.isDate(_._date), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isElement'); (function() { var args = arguments; function Element() { this.nodeType = 1; } test('should use robust check', 7, function() { var element = body || new Element; strictEqual(_.isElement(element), true); strictEqual(_.isElement({ 'nodeType': 1 }), false); strictEqual(_.isElement({ 'nodeType': new Number(1) }), false); strictEqual(_.isElement({ 'nodeType': true }), false); strictEqual(_.isElement({ 'nodeType': [1] }), false); strictEqual(_.isElement({ 'nodeType': '1' }), false); strictEqual(_.isElement({ 'nodeType': '001' }), false); }); test('should use a stronger check in browsers', 2, function() { var expected = !body; strictEqual(_.isElement(new Element), expected); if (lodashBizarro) { strictEqual(lodashBizarro.isElement(new Element), !expected); } else { skipTest(); } }); test('should return `false` for non DOM elements', 11, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isElement(value) : _.isElement(); }); strictEqual(_.isElement(args), false); strictEqual(_.isElement([1, 2, 3]), false); strictEqual(_.isElement(true), false); strictEqual(_.isElement(new Date), false); strictEqual(_.isElement(new Error), false); strictEqual(_.isElement(_), false); strictEqual(_.isElement({ 'a': 1 }), false); strictEqual(_.isElement(1), false); strictEqual(_.isElement(/x/), false); strictEqual(_.isElement('a'), false); deepEqual(actual, expected); }); test('should work with DOM elements from another realm', 1, function() { if (_._element) { strictEqual(_.isElement(_._element), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isEmpty'); (function() { var args = arguments; test('should return `true` for empty or falsey values', 3, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { return _.isEmpty(value); }); strictEqual(_.isEmpty(), true); strictEqual(_.isEmpty(/x/), true); deepEqual(actual, expected); }); test('should return `false` for non-empty values', 3, function() { strictEqual(_.isEmpty([0]), false); strictEqual(_.isEmpty({ 'a': 0 }), false); strictEqual(_.isEmpty('a'), false); }); test('should work with an object that has a `length` property', 1, function() { strictEqual(_.isEmpty({ 'length': 0 }), false); }); test('should work with `arguments` objects (test in IE < 9)', 1, function() { strictEqual(_.isEmpty(args), false); }); test('should work with jQuery/MooTools DOM query collections', 1, function() { function Foo(elements) { push.apply(this, elements); } Foo.prototype = { 'length': 0, 'splice': Array.prototype.splice }; strictEqual(_.isEmpty(new Foo([])), true); }); test('should not treat objects with negative lengths as array-like', 1, function() { function Foo() {} Foo.prototype.length = -1; strictEqual(_.isEmpty(new Foo), true); }); test('should not treat objects with lengths larger than `maxSafeInteger` as array-like', 1, function() { function Foo() {} Foo.prototype.length = maxSafeInteger + 1; strictEqual(_.isEmpty(new Foo), true); }); test('should not treat objects with non-number lengths as array-like', 1, function() { strictEqual(_.isEmpty({ 'length': '0' }), false); }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.isEmpty(shadowedObject), false); }); test('skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.prototype.a = 1; strictEqual(_.isEmpty(Foo), true); Foo.prototype = { 'a': 1 }; strictEqual(_.isEmpty(Foo), true); }); test('should return an unwrapped value when intuitively chaining', 1, function() { if (!isNpm) { strictEqual(_({}).isEmpty(), true); } else { skipTest(); } }); test('should return a wrapped value when explicitly chaining', 1, function() { if (!isNpm) { ok(_({}).chain().isEmpty() instanceof _); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isEqual'); (function() { test('should perform comparisons between primitive values', 1, function() { var pairs = [ [1, 1, true], [1, new Number(1), true], [1, '1', false], [1, 2, false], [-0, -0, true], [0, 0, true], [0, new Number(0), true], [new Number(0), new Number(0), true], [-0, 0, false], [0, '0', false], [0, null, false], [NaN, NaN, true], [NaN, new Number(NaN), true], [new Number(NaN), new Number(NaN), true], [NaN, 'a', false], [NaN, Infinity, false], ['a', 'a', true], ['a', new String('a'), true], [new String('a'), new String('a'), true], ['a', 'b', false], ['a', ['a'], false], [true, true, true], [true, new Boolean(true), true], [new Boolean(true), new Boolean(true), true], [true, 1, false], [true, 'a', false], [false, false, true], [false, new Boolean(false), true], [new Boolean(false), new Boolean(false), true], [false, 0, false], [false, '', false], [null, null, true], [null, undefined, false], [null, {}, false], [null, '', false], [undefined, undefined, true], [undefined, null, false], [undefined, '', false] ]; var expected = _.map(pairs, function(pair) { return pair[2]; }); var actual = _.map(pairs, function(pair) { return _.isEqual(pair[0], pair[1]); }) deepEqual(actual, expected); }); test('should return `false` for objects with custom `toString` methods', 1, function() { var primitive, object = { 'toString': function() { return primitive; } }, values = [true, null, 1, 'a', undefined], expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value) { primitive = value; return _.isEqual(object, value); }); deepEqual(actual, expected); }); test('should perform comparisons between arrays', 6, function() { var array1 = [true, null, 1, 'a', undefined], array2 = [true, null, 1, 'a', undefined]; strictEqual(_.isEqual(array1, array2), true); array1 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }]; array2 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }]; strictEqual(_.isEqual(array1, array2), true); array1 = [1]; array1[2] = 3; array2 = [1]; array2[1] = undefined; array2[2] = 3; strictEqual(_.isEqual(array1, array2), true); array1 = [new Number(1), false, new String('a'), /x/, new Date(2012, 4, 23), ['a', 'b', [new String('c')]], { 'a': 1 }]; array2 = [1, new Boolean(false), 'a', /x/, new Date(2012, 4, 23), ['a', new String('b'), ['c']], { 'a': 1 }]; strictEqual(_.isEqual(array1, array2), true); array1 = [1, 2, 3]; array2 = [3, 2, 1]; strictEqual(_.isEqual(array1, array2), false); array1 = [1, 2]; array2 = [1, 2, 3]; strictEqual(_.isEqual(array1, array2), false); }); test('should treat arrays with identical values but different non-numeric properties as equal', 3, function() { var array1 = [1, 2, 3], array2 = [1, 2, 3]; array1.every = array1.filter = array1.forEach = array1.indexOf = array1.lastIndexOf = array1.map = array1.some = array1.reduce = array1.reduceRight = null; array2.concat = array2.join = array2.pop = array2.reverse = array2.shift = array2.slice = array2.sort = array2.splice = array2.unshift = null; strictEqual(_.isEqual(array1, array2), true); array1 = [1, 2, 3]; array1.a = 1; array2 = [1, 2, 3]; array2.b = 1; strictEqual(_.isEqual(array1, array2), true); array1 = /x/.exec('vwxyz'); array2 = ['x']; strictEqual(_.isEqual(array1, array2), true); }); test('should perform comparisons between date objects', 4, function() { strictEqual(_.isEqual(new Date(2012, 4, 23), new Date(2012, 4, 23)), true); strictEqual(_.isEqual(new Date(2012, 4, 23), new Date(2013, 3, 25)), false); strictEqual(_.isEqual(new Date(2012, 4, 23), { 'getTime': function() { return 1337756400000; } }), false); strictEqual(_.isEqual(new Date('a'), new Date('a')), false); }); test('should perform comparisons between functions', 2, function() { function a() { return 1 + 2; } function b() { return 1 + 2; } strictEqual(_.isEqual(a, a), true); strictEqual(_.isEqual(a, b), false); }); test('should perform comparisons between plain objects', 5, function() { var object1 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined }, object2 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined }; strictEqual(_.isEqual(object1, object2), true); object1 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } }; object2 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } }; strictEqual(_.isEqual(object1, object2), true); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object2 = { 'a': 3, 'b': 2, 'c': 1 }; strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object2 = { 'd': 1, 'e': 2, 'f': 3 }; strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2 }; object2 = { 'a': 1, 'b': 2, 'c': 3 }; strictEqual(_.isEqual(object1, object2), false); }); test('should perform comparisons of nested objects', 1, function() { var object1 = { 'a': [1, 2, 3], 'b': true, 'c': new Number(1), 'd': 'a', 'e': { 'f': ['a', new String('b'), 'c'], 'g': new Boolean(false), 'h': new Date(2012, 4, 23), 'i': _.noop, 'j': 'a' } }; var object2 = { 'a': [1, new Number(2), 3], 'b': new Boolean(true), 'c': 1, 'd': new String('a'), 'e': { 'f': ['a', 'b', 'c'], 'g': false, 'h': new Date(2012, 4, 23), 'i': _.noop, 'j': 'a' } }; strictEqual(_.isEqual(object1, object2), true); }); test('should perform comparisons between object instances', 4, function() { function Foo() { this.value = 1; } Foo.prototype.value = 1; function Bar() { this.value = 1; } Bar.prototype.value = 2; strictEqual(_.isEqual(new Foo, new Foo), true); strictEqual(_.isEqual(new Foo, new Bar), false); strictEqual(_.isEqual({ 'value': 1 }, new Foo), false); strictEqual(_.isEqual({ 'value': 2 }, new Bar), false); }); test('should perform comparisons between regexes', 5, function() { strictEqual(_.isEqual(/x/gim, /x/gim), true); strictEqual(_.isEqual(/x/gim, /x/mgi), true); strictEqual(_.isEqual(/x/gi, /x/g), false); strictEqual(_.isEqual(/x/, /y/), false); strictEqual(_.isEqual(/x/g, { 'global': true, 'ignoreCase': false, 'multiline': false, 'source': 'x' }), false); }); test('should avoid common type coercions', 9, function() { strictEqual(_.isEqual(true, new Boolean(false)), false); strictEqual(_.isEqual(new Boolean(false), new Number(0)), false); strictEqual(_.isEqual(false, new String('')), false); strictEqual(_.isEqual(new Number(36), new String(36)), false); strictEqual(_.isEqual(0, ''), false); strictEqual(_.isEqual(1, true), false); strictEqual(_.isEqual(1337756400000, new Date(2012, 4, 23)), false); strictEqual(_.isEqual('36', 36), false); strictEqual(_.isEqual(36, '36'), false); }); test('should work with sparse arrays', 2, function() { strictEqual(_.isEqual(Array(3), Array(3)), true); strictEqual(_.isEqual(Array(3), Array(6)), false); }); test('should work with `arguments` objects (test in IE < 9)', 2, function() { var args1 = (function() { return arguments; }(1, 2, 3)), args2 = (function() { return arguments; }(1, 2, 3)), args3 = (function() { return arguments; }(1, 2)); strictEqual(_.isEqual(args1, args2), true); if (!isPhantom) { strictEqual(_.isEqual(args1, args3), false); } else { skipTest(); } }); test('should treat `arguments` objects like `Object` objects', 2, function() { var args = (function() { return arguments; }(1, 2, 3)), object = { '0': 1, '1': 2, '2': 3, 'length': 3 }; function Foo() {} Foo.prototype = object; strictEqual(_.isEqual(args, object), true); if (!isPhantom) { strictEqual(_.isEqual(args, new Foo), false); } else { skipTest(); } }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.isEqual(shadowedObject, {}), false); }); test('should perform comparisons between objects with constructor properties', 5, function() { strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': 1 }), true); strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': '1' }), false); strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': [1] }), true); strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': ['1'] }), false); strictEqual(_.isEqual({ 'constructor': Object }, {}), false); }); test('should perform comparisons between arrays with circular references', 4, function() { var array1 = [], array2 = []; array1.push(array1); array2.push(array2); strictEqual(_.isEqual(array1, array2), true); array1.push('a'); array2.push('a'); strictEqual(_.isEqual(array1, array2), true); array1.push('b'); array2.push('c'); strictEqual(_.isEqual(array1, array2), false); array1 = ['a', 'b', 'c']; array1[1] = array1; array2 = ['a', ['a', 'b', 'c'], 'c']; strictEqual(_.isEqual(array1, array2), false); }); test('should perform comparisons between objects with circular references', 4, function() { var object1 = {}, object2 = {}; object1.a = object1; object2.a = object2; strictEqual(_.isEqual(object1, object2), true); object1.b = 0; object2.b = new Number(0); strictEqual(_.isEqual(object1, object2), true); object1.c = new Number(1); object2.c = new Number(2); strictEqual(_.isEqual(object1, object2), false); object1 = { 'a': 1, 'b': 2, 'c': 3 }; object1.b = object1; object2 = { 'a': 1, 'b': { 'a': 1, 'b': 2, 'c': 3 }, 'c': 3 }; strictEqual(_.isEqual(object1, object2), false); }); test('should perform comparisons between objects with multiple circular references', 3, function() { var array1 = [{}], array2 = [{}]; (array1[0].a = array1).push(array1); (array2[0].a = array2).push(array2); strictEqual(_.isEqual(array1, array2), true); array1[0].b = 0; array2[0].b = new Number(0); strictEqual(_.isEqual(array1, array2), true); array1[0].c = new Number(1); array2[0].c = new Number(2); strictEqual(_.isEqual(array1, array2), false); }); test('should perform comparisons between objects with complex circular references', 1, function() { var object1 = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { 'a': 2 } }; var object2 = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { 'a': 2 } }; object1.foo.b.foo.c = object1; object1.bar.b = object1.foo.b; object2.foo.b.foo.c = object2; object2.bar.b = object2.foo.b; strictEqual(_.isEqual(object1, object2), true); }); test('should perform comparisons between objects with shared property values', 1, function() { var object1 = { 'a': [1, 2] }; var object2 = { 'a': [1, 2], 'b': [1, 2] }; object1.b = object1.a; strictEqual(_.isEqual(object1, object2), true); }); test('should pass the correct `callback` arguments', 1, function() { var argsList = []; var object1 = { 'a': [1, 2], 'b': null }; var object2 = { 'a': [1, 2], 'b': null }; object1.b = object2; object2.b = object1; var expected = [ [object1, object2], [object1.a, object2.a, 'a'], [object1.a[0], object2.a[0], 0], [object1.a[1], object2.a[1], 1], [object1.b, object2.b, 'b'], [object1.b.a, object2.b.a, 'a'], [object1.b.a[0], object2.b.a[0], 0], [object1.b.a[1], object2.b.a[1], 1], [object1.b.b, object2.b.b, 'b'] ]; _.isEqual(object1, object2, function() { argsList.push(slice.call(arguments)); }); deepEqual(argsList, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.isEqual('a', 'b', function(a, b) { return this[a] == this[b]; }, { 'a': 1, 'b': 1 }); strictEqual(actual, true); }); test('should handle comparisons if `callback` returns `undefined`', 1, function() { var actual = _.isEqual('a', 'a', function() {}); strictEqual(actual, true); }); test('should return a boolean value even if `callback` does not', 2, function() { var actual = _.isEqual('a', 'a', function() { return 'a'; }); strictEqual(actual, true); var expected = _.map(falsey, _.constant(false)); actual = []; _.each(falsey, function(value) { actual.push(_.isEqual('a', 'b', _.constant(value))); }); deepEqual(actual, expected); }); test('should ensure `callback` is a function', 1, function() { var array = [1, 2, 3], eq = _.partial(_.isEqual, array), actual = _.every([array, [1, 0, 3]], eq); strictEqual(actual, false); }); test('should work when used as a callback for `_.every`', 1, function() { var actual = _.every([1, 1, 1], _.partial(_.isEqual, 1)); ok(actual); }); test('should treat objects created by `Object.create(null)` like any other plain object', 2, function() { function Foo() { this.a = 1; } Foo.prototype.constructor = null; var otherObject = { 'a': 1 }; strictEqual(_.isEqual(new Foo, otherObject), false); if (create) { var object = create(null); object.a = 1; strictEqual(_.isEqual(object, otherObject), true); } else { skipTest(); } }); test('should perform comparisons between typed arrays', 1, function() { var pairs = _.map(typedArrays, function(type) { var Ctor = root[type] || Array, buffer = Ctor == Array ? 4 : new ArrayBuffer(4); return [new Ctor(buffer), new Ctor(buffer)]; }); var expected = _.times(pairs.length, _.constant(true)); var actual = _.map(pairs, function(pair) { return _.isEqual(pair[0], pair[1]); }); deepEqual(actual, expected); }); test('should perform comparisons between wrapped values', 4, function() { if (!isNpm) { var object1 = _({ 'a': 1, 'b': 2 }), object2 = _({ 'a': 1, 'b': 2 }), actual = object1.isEqual(object2); strictEqual(actual, true); strictEqual(_.isEqual(_(actual), _(true)), true); object1 = _({ 'a': 1, 'b': 2 }); object2 = _({ 'a': 1, 'b': 1 }); actual = object1.isEqual(object2); strictEqual(actual, false); strictEqual(_.isEqual(_(actual), _(false)), true); } else { skipTest(4); } }); test('should perform comparisons between wrapped and non-wrapped values', 4, function() { if (!isNpm) { var object1 = _({ 'a': 1, 'b': 2 }), object2 = { 'a': 1, 'b': 2 }; strictEqual(object1.isEqual(object2), true); strictEqual(_.isEqual(object1, object2), true); object1 = _({ 'a': 1, 'b': 2 }); object2 = { 'a': 1, 'b': 1 }; strictEqual(object1.isEqual(object2), false); strictEqual(_.isEqual(object1, object2), false); } else { skipTest(4); } }); test('should return `true` for like-objects from different documents', 1, function() { // ensure `_._object` is assigned (unassigned in Opera 10.00) if (_._object) { var object = { 'a': 1, 'b': 2, 'c': 3 }; strictEqual(_.isEqual(object, _._object), true); } else { skipTest(); } }); test('should not error on DOM elements', 1, function() { if (document) { var element1 = document.createElement('div'), element2 = element1.cloneNode(true); try { strictEqual(_.isEqual(element1, element2), false); } catch(e) { ok(false); } } else { skipTest(); } }); test('should return an unwrapped value when intuitively chaining', 1, function() { if (!isNpm) { strictEqual(_('a').isEqual('a'), true); } else { skipTest(); } }); test('should return a wrapped value when explicitly chaining', 1, function() { if (!isNpm) { ok(_('a').chain().isEqual('a') instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isError'); (function() { var args = arguments; test('should return `true` for error objects', 1, function() { var errors = [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError], expected = _.map(errors, _.constant(true)); var actual = _.map(errors, function(error) { return _.isError(error) === true; }); deepEqual(actual, expected); }); test('should return `false` for non-error objects', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isError(value) : _.isError(); }); strictEqual(_.isError(args), false); strictEqual(_.isError([1, 2, 3]), false); strictEqual(_.isError(true), false); strictEqual(_.isError(new Date), false); strictEqual(_.isError(_), false); strictEqual(_.isError({ 'a': 1 }), false); strictEqual(_.isError(1), false); strictEqual(_.isError(/x/), false); strictEqual(_.isError('a'), false); deepEqual(actual, expected); }); test('should work with an error object from another realm', 1, function() { if (_._object) { var expected = _.map(_._errors, _.constant(true)); var actual = _.map(_._errors, function(error) { return _.isError(error) === true; }); deepEqual(actual, expected); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isFinite'); (function() { test('should return `true` for finite values', 5, function() { strictEqual(_.isFinite(0), true); strictEqual(_.isFinite(1), true); strictEqual(_.isFinite(3.14), true); strictEqual(_.isFinite(-1), true); strictEqual(_.isFinite(new Number(0)), true); }); test('should return `false` for non-finite values', 3, function() { strictEqual(_.isFinite(NaN), false); strictEqual(_.isFinite(Infinity), false); strictEqual(_.isFinite(-Infinity), false); }); test('should return `false` for non-numeric values', 9, function() { strictEqual(_.isFinite(null), false); strictEqual(_.isFinite(undefined), false); strictEqual(_.isFinite([]), false); strictEqual(_.isFinite(true), false); strictEqual(_.isFinite(new Date), false); strictEqual(_.isFinite(new Error), false); strictEqual(_.isFinite(''), false); strictEqual(_.isFinite(' '), false); strictEqual(_.isFinite('2px'), false); }); test('should return `true` for numeric string values', 3, function() { strictEqual(_.isFinite('2'), true); strictEqual(_.isFinite('0'), true); strictEqual(_.isFinite('08'), true); }); test('should work with numbers from another realm', 1, function() { if (_._object) { strictEqual(_.isFinite(_._number), true); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isFunction'); (function() { var args = arguments; test('should return `true` for functions', 1, function() { strictEqual(_.isFunction(_), true); }); test('should return `false` for non functions', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isFunction(value) : _.isFunction(); }); strictEqual(_.isFunction(args), false); strictEqual(_.isFunction([1, 2, 3]), false); strictEqual(_.isFunction(true), false); strictEqual(_.isFunction(new Date), false); strictEqual(_.isFunction(new Error), false); strictEqual(_.isFunction({ 'a': 1 }), false); strictEqual(_.isFunction(1), false); strictEqual(_.isFunction(/x/), false); strictEqual(_.isFunction('a'), false); deepEqual(actual, expected); }); test('should work with host objects in non-edge document modes (test in IE 11)', 1, function() { if (xml) { // trigger Chakra bug // https://github.com/jashkenas/underscore/issues/1621 _.times(100, _.isFunction); strictEqual(_.isFunction(xml), false); } else { skipTest(); } }); test('should work with functions from another realm', 1, function() { if (_._object) { strictEqual(_.isFunction(_._function), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNaN'); (function() { var args = arguments; test('should return `true` for NaNs', 2, function() { strictEqual(_.isNaN(NaN), true); strictEqual(_.isNaN(new Number(NaN)), true); }); test('should return `false` for non NaNs', 11, function() { var expected = _.map(falsey, function(value) { return value !== value; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNaN(value) : _.isNaN(); }); strictEqual(_.isNaN(args), false); strictEqual(_.isNaN([1, 2, 3]), false); strictEqual(_.isNaN(true), false); strictEqual(_.isNaN(new Date), false); strictEqual(_.isNaN(new Error), false); strictEqual(_.isNaN(_), false); strictEqual(_.isNaN({ 'a': 1 }), false); strictEqual(_.isNaN(1), false); strictEqual(_.isNaN(/x/), false); strictEqual(_.isNaN('a'), false); deepEqual(actual, expected); }); test('should work with NaNs from another realm', 1, function() { if (_._object) { strictEqual(_.isNaN(_._nan), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNull'); (function() { var args = arguments; test('should return `true` for nulls', 1, function() { strictEqual(_.isNull(null), true); }); test('should return `false` for non nulls', 11, function() { var expected = _.map(falsey, function(value) { return value === null; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNull(value) : _.isNull(); }); strictEqual(_.isNull(args), false); strictEqual(_.isNull([1, 2, 3]), false); strictEqual(_.isNull(true), false); strictEqual(_.isNull(new Date), false); strictEqual(_.isNull(new Error), false); strictEqual(_.isNull(_), false); strictEqual(_.isNull({ 'a': 1 }), false); strictEqual(_.isNull(1), false); strictEqual(_.isNull(/x/), false); strictEqual(_.isNull('a'), false); deepEqual(actual, expected); }); test('should work with nulls from another realm', 1, function() { if (_._object) { strictEqual(_.isNull(_._null), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isNumber'); (function() { var args = arguments; test('should return `true` for numbers', 2, function() { strictEqual(_.isNumber(0), true); strictEqual(_.isNumber(new Number(0)), true); }); test('should return `false` for non numbers', 10, function() { var expected = _.map(falsey, function(value) { return typeof value == 'number'; }); var actual = _.map(falsey, function(value, index) { return index ? _.isNumber(value) : _.isNumber(); }); strictEqual(_.isNumber(args), false); strictEqual(_.isNumber([1, 2, 3]), false); strictEqual(_.isNumber(true), false); strictEqual(_.isNumber(new Date), false); strictEqual(_.isNumber(new Error), false); strictEqual(_.isNumber(_), false); strictEqual(_.isNumber({ 'a': 1 }), false); strictEqual(_.isNumber(/x/), false); strictEqual(_.isNumber('a'), false); deepEqual(actual, expected); }); test('should work with numbers from another realm', 1, function() { if (_._object) { strictEqual(_.isNumber(_._number), true); } else { skipTest(); } }); test('should avoid `[xpconnect wrapped native prototype]` in Firefox', 1, function() { strictEqual(_.isNumber(+"2"), true); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isObject'); (function() { var args = arguments; test('should return `true` for objects', 11, function() { strictEqual(_.isObject(args), true); strictEqual(_.isObject([1, 2, 3]), true); strictEqual(_.isObject(new Boolean(false)), true); strictEqual(_.isObject(new Date), true); strictEqual(_.isObject(new Error), true); strictEqual(_.isObject(_), true); strictEqual(_.isObject({ 'a': 1 }), true); strictEqual(_.isObject(new Number(0)), true); strictEqual(_.isObject(/x/), true); strictEqual(_.isObject(new String('a')), true); if (document) { strictEqual(_.isObject(body), true); } else { skipTest(); } }); test('should return `false` for non objects', 1, function() { var values = falsey.concat('a', true), expected = _.map(values, _.constant(false)); var actual = _.map(values, function(value, index) { return index ? _.isObject(value) : _.isObject(); }); deepEqual(actual, expected); }); test('should work with objects from another realm', 8, function() { if (_._element) { strictEqual(_.isObject(_._element), true); } else { skipTest(); } if (_._object) { strictEqual(_.isObject(_._object), true); strictEqual(_.isObject(_._boolean), true); strictEqual(_.isObject(_._date), true); strictEqual(_.isObject(_._function), true); strictEqual(_.isObject(_._number), true); strictEqual(_.isObject(_._regexp), true); strictEqual(_.isObject(_._string), true); } else { skipTest(7); } }); test('should avoid V8 bug #2291 (test in Chrome 19-20)', 1, function() { // trigger V8 bug // http://code.google.com/p/v8/issues/detail?id=2291 var object = {}; // 1: Useless comparison statement, this is half the trigger object == object; // 2: Initial check with object, this is the other half of the trigger _.isObject(object); strictEqual(_.isObject('x'), false); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isPlainObject'); (function() { var element = document && document.createElement('div'); test('should detect plain objects', 5, function() { function Foo(a) { this.a = 1; } strictEqual(_.isPlainObject({}), true); strictEqual(_.isPlainObject({ 'a': 1 }), true); strictEqual(_.isPlainObject({ 'constructor': Foo }), true); strictEqual(_.isPlainObject([1, 2, 3]), false); strictEqual(_.isPlainObject(new Foo(1)), false); }); test('should return `true` for objects with a `[[Prototype]]` of `null`', 1, function() { if (create) { strictEqual(_.isPlainObject(create(null)), true); } else { skipTest(); } }); test('should return `true` for plain objects with a custom `valueOf` property', 2, function() { strictEqual(_.isPlainObject({ 'valueOf': 0 }), true); if (element) { var valueOf = element.valueOf; element.valueOf = 0; strictEqual(_.isPlainObject(element), false); element.valueOf = valueOf; } else { skipTest(); } }); test('should return `false` for DOM elements', 1, function() { if (element) { strictEqual(_.isPlainObject(element), false); } else { skipTest(); } }); test('should return `false` for Object objects without a `[[Class]]` of "Object"', 3, function() { strictEqual(_.isPlainObject(arguments), false); strictEqual(_.isPlainObject(Error), false); strictEqual(_.isPlainObject(Math), false); }); test('should return `false` for non objects', 3, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isPlainObject(value) : _.isPlainObject(); }); strictEqual(_.isPlainObject(true), false); strictEqual(_.isPlainObject('a'), false); deepEqual(actual, expected); }); test('should work with objects from another realm', 1, function() { if (_._object) { strictEqual(_.isPlainObject(_._object), true); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isRegExp'); (function() { var args = arguments; test('should return `true` for regexes', 2, function() { strictEqual(_.isRegExp(/x/), true); strictEqual(_.isRegExp(RegExp('x')), true); }); test('should return `false` for non regexes', 10, function() { var expected = _.map(falsey, _.constant(false)); var actual = _.map(falsey, function(value, index) { return index ? _.isRegExp(value) : _.isRegExp(); }); strictEqual(_.isRegExp(args), false); strictEqual(_.isRegExp([1, 2, 3]), false); strictEqual(_.isRegExp(true), false); strictEqual(_.isRegExp(new Date), false); strictEqual(_.isRegExp(new Error), false); strictEqual(_.isRegExp(_), false); strictEqual(_.isRegExp({ 'a': 1 }), false); strictEqual(_.isRegExp(1), false); strictEqual(_.isRegExp('a'), false); deepEqual(actual, expected); }); test('should work with regexes from another realm', 1, function() { if (_._object) { strictEqual(_.isRegExp(_._regexp), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isString'); (function() { var args = arguments; test('should return `true` for strings', 2, function() { strictEqual(_.isString('a'), true); strictEqual(_.isString(new String('a')), true); }); test('should return `false` for non strings', 10, function() { var expected = _.map(falsey, function(value) { return value === ''; }); var actual = _.map(falsey, function(value, index) { return index ? _.isString(value) : _.isString(); }); strictEqual(_.isString(args), false); strictEqual(_.isString([1, 2, 3]), false); strictEqual(_.isString(true), false); strictEqual(_.isString(new Date), false); strictEqual(_.isString(new Error), false); strictEqual(_.isString(_), false); strictEqual(_.isString({ '0': 1, 'length': 1 }), false); strictEqual(_.isString(1), false); strictEqual(_.isString(/x/), false); deepEqual(actual, expected); }); test('should work with strings from another realm', 1, function() { if (_._object) { strictEqual(_.isString(_._string), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.isUndefined'); (function() { var args = arguments; test('should return `true` for `undefined` values', 2, function() { strictEqual(_.isUndefined(), true); strictEqual(_.isUndefined(undefined), true); }); test('should return `false` for non `undefined` values', 11, function() { var expected = _.map(falsey, function(value) { return value === undefined; }); var actual = _.map(falsey, function(value, index) { return index ? _.isUndefined(value) : _.isUndefined(); }); strictEqual(_.isUndefined(args), false); strictEqual(_.isUndefined([1, 2, 3]), false); strictEqual(_.isUndefined(true), false); strictEqual(_.isUndefined(new Date), false); strictEqual(_.isUndefined(new Error), false); strictEqual(_.isUndefined(_), false); strictEqual(_.isUndefined({ 'a': 1 }), false); strictEqual(_.isUndefined(1), false); strictEqual(_.isUndefined(/x/), false); strictEqual(_.isUndefined('a'), false); deepEqual(actual, expected); }); test('should work with `undefined` from another realm', 1, function() { if (_._object) { strictEqual(_.isUndefined(_._undefined), true); } else { skipTest(); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('isType checks'); (function() { test('should return `false` for subclassed values', 8, function() { var funcs = [ 'isArray', 'isBoolean', 'isDate', 'isError', 'isFunction', 'isNumber', 'isRegExp', 'isString' ]; _.each(funcs, function(methodName) { function Foo() {} Foo.prototype = root[methodName.slice(2)].prototype; var object = new Foo; if (toString.call(object) == '[object Object]') { strictEqual(_[methodName](object), false, '`_.' + methodName + '` returns `false`'); } else { skipTest(); } }); }); test('should not error on host objects (test in IE)', 12, function() { if (xml) { var funcs = [ 'isArray', 'isArguments', 'isBoolean', 'isDate', 'isElement', 'isFunction', 'isObject', 'isNull', 'isNumber', 'isRegExp', 'isString', 'isUndefined' ]; _.each(funcs, function(methodName) { var pass = true; try { _[methodName](xml); } catch(e) { pass = false; } ok(pass, '`_.' + methodName + '` should not error'); }); } else { skipTest(12) } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('keys methods'); _.each(['keys', 'keysIn'], function(methodName) { var args = arguments, func = _[methodName], isKeys = methodName == 'keys'; test('`_.' + methodName + '` should return the keys of an object', 1, function() { var object = { 'a': 1, 'b': 1 }, actual = func(object); deepEqual(actual.sort(), ['a', 'b']); }); test('`_.' + methodName + '` should treat sparse arrays as dense', 1, function() { var array = [1]; array[2] = 3; var actual = func(array); deepEqual(actual.sort(), ['0', '1', '2']); }); test('`_.' + methodName + '` should custom properties on arrays', 1, function() { var array = [1]; array.a = 1; var actual = func(array); deepEqual(actual.sort(), ['0', 'a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of arrays', 1, function() { Array.prototype.a = 1; var expected = isKeys ? ['0'] : ['0', 'a'], actual = func([1]); deepEqual(actual.sort(), expected); delete Array.prototype.a; }); test('`_.' + methodName + '` should work with `arguments` objects (test in IE < 9)', 1, function() { if (!isPhantom) { var actual = func(args); deepEqual(actual.sort(), ['0', '1', '2']); } else { skipTest(); } }); test('`_.' + methodName + '` should custom properties on `arguments` objects', 1, function() { if (!isPhantom) { args.a = 1; var actual = func(args); deepEqual(actual.sort(), ['0', '1', '2', 'a']); delete args.a; } else { skipTest(); } }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of `arguments` objects', 1, function() { if (!isPhantom) { Object.prototype.a = 1; var expected = isKeys ? ['0', '1', '2'] : ['0', '1', '2', 'a'], actual = func(args); deepEqual(actual.sort(), expected); delete Object.prototype.a; } else { skipTest(); } }); test('`_.' + methodName + '` should work with string objects (test in IE < 9)', 1, function() { var actual = func(Object('abc')); deepEqual(actual.sort(), ['0', '1', '2']); }); test('`_.' + methodName + '` should custom properties on string objects', 1, function() { var object = Object('a'); object.a = 1; var actual = func(object); deepEqual(actual.sort(), ['0', 'a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties of string objects', 1, function() { String.prototype.a = 1; var expected = isKeys ? ['0'] : ['0', 'a'], actual = func(Object('a')); deepEqual(actual.sort(), expected); delete String.prototype.a; }); test('`_.' + methodName + '` fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { var actual = func(shadowedObject); deepEqual(actual.sort(), shadowedProps); }); test('`_.' + methodName + '` skips the prototype property of functions (test in Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1)', 2, function() { function Foo() {} Foo.a = 1; Foo.b = 2; Foo.prototype.c = 3; var expected = ['a', 'b'], actual = func(Foo); deepEqual(actual.sort(), expected); Foo.prototype = { 'c': 3 }; actual = func(Foo); deepEqual(actual.sort(), expected); }); test('`_.' + methodName + '` skips the `constructor` property on prototype objects', 2, function() { function Foo() {} Foo.prototype.a = 1; var expected = ['a']; deepEqual(func(Foo.prototype), ['a']); Foo.prototype = { 'constructor': Foo, 'a': 1 }; deepEqual(func(Foo.prototype), ['a']); }); test('`_.' + methodName + '` should ' + (isKeys ? 'not' : '') + ' include inherited properties', 1, function() { function Foo() { this.a = 1; this.b = 2; } Foo.prototype.c = 3; var expected = isKeys ? ['a', 'b'] : ['a', 'b', 'c'], actual = func(new Foo); deepEqual(actual.sort(), expected); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.last'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1 }, { 'a': 2, 'b': 2 } ]; test('should return the last element', 1, function() { strictEqual(_.last(array), 3); }); test('should return the last two elements', 1, function() { deepEqual(_.last(array, 2), [2, 3]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 3 : []; }); var actual = _.map(falsey, function(n) { return _.last(array, n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.last(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.last(array, n), array); }); }); test('should return `undefined` when querying empty arrays', 1, function() { strictEqual(_.last([]), undefined); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.last); deepEqual(actual, [3, 6, 9]); }); test('should work with a callback', 1, function() { var actual = _.last(array, function(num) { return num > 1; }); deepEqual(actual, [2, 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.last(array, function() { args = slice.call(arguments); }); deepEqual(args, [3, 2, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.last(array, function(num, index) { return this[index] > 1; }, array); deepEqual(actual, [2, 3]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.last(objects, { 'b': 2 }), objects.slice(-1)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.last(objects, 'b'), objects.slice(-2)); }); test('should chain when passing `n`, `callback`, or `thisArg`', 3, function() { if (!isNpm) { var actual = _(array).last(2); ok(actual instanceof _); actual = _(array).last(function(num) { return num > 1; }); ok(actual instanceof _); actual = _(array).last(function(num, index) { return this[index] > 1; }, array); ok(actual instanceof _); } else { skipTest(3); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).last(); strictEqual(actual, 3); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.lastIndexOf'); (function() { var array = [1, 2, 3, 1, 2, 3]; test('should return the index of the last matched value', 1, function() { strictEqual(_.lastIndexOf(array, 3), 5); }); test('should return `-1` for an unmatched value', 1, function() { strictEqual(_.lastIndexOf(array, 4), -1); }); test('should work with a positive `fromIndex`', 1, function() { strictEqual(_.lastIndexOf(array, 1, 2), 0); }); test('should work with `fromIndex` >= `array.length`', 12, function() { _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.lastIndexOf(array, undefined, fromIndex), -1); strictEqual(_.lastIndexOf(array, 1, fromIndex), 3); strictEqual(_.lastIndexOf(array, '', fromIndex), -1); }); }); test('should treat falsey `fromIndex` values, except `0` and `NaN`, as `array.length`', 1, function() { var expected = _.map(falsey, function(value) { return typeof value == 'number' ? -1 : 5; }); var actual = _.map(falsey, function(fromIndex) { return _.lastIndexOf(array, 3, fromIndex); }); deepEqual(actual, expected); }); test('should treat non-number `fromIndex` values as `array.length`', 2, function() { strictEqual(_.lastIndexOf(array, 3, '1'), 5); strictEqual(_.lastIndexOf(array, 3, true), 5); }); test('should work with a negative `fromIndex`', 1, function() { strictEqual(_.lastIndexOf(array, 2, -3), 1); }); test('should work with a negative `fromIndex` <= `-array.length`', 3, function() { _.each([-6, -8, -Infinity], function(fromIndex) { strictEqual(_.lastIndexOf(array, 1, fromIndex), 0); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('indexOf methods'); (function() { _.each(['indexOf', 'lastIndexOf'], function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant(-1)); var actual = _.map(falsey, function(value, index) { try { return index ? func(value) : func(); } catch(e) { } }); deepEqual(actual, expected); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.map'); (function() { var array = [1, 2, 3]; test('should pass the correct `callback` arguments', 1, function() { var args; _.map(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 2, function() { function callback(num, index) { return this[index] + num; } var actual = _.map([1], callback, [2]); deepEqual(actual, [3]); actual = _.map({ 'a': 1 }, callback, { 'a': 2 }); deepEqual(actual, [3]); }); test('should iterate over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var actual = _.map(new Foo, function(value, key) { return key; }); deepEqual(actual, ['a']); }); test('should work on an object with no `callback`', 1, function() { var actual = _.map({ 'a': 1, 'b': 2, 'c': 3 }); deepEqual(actual, array); }); test('should handle object arguments with non-numeric length properties', 1, function() { if (defineProperty) { var object = {}; defineProperty(object, 'length', { 'value': 'x' }); deepEqual(_.map(object, _.identity), []); } else { skipTest(); } }); test('should treat a nodelist as an array-like object', 1, function() { if (document) { var actual = _.map(document.getElementsByTagName('body'), function(element) { return element.nodeName.toLowerCase(); }); deepEqual(actual, ['body']); } else { skipTest(); } }); test('should accept a falsey `collection` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.map(value) : _.map(); } catch(e) { } }); deepEqual(actual, expected); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.map(1), []); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { ok(_(array).map(_.noop) instanceof _); } else { skipTest(); } }); test('should be aliased', 1, function() { strictEqual(_.collect, _.map); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.mapValues'); (function() { var object = { 'a': 1, 'b': 2, 'c': 3 }; test('should pass the correct `callback` arguments', 1, function() { var args; _.mapValues(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 'a', object]); }); test('should support the `thisArg` argument', 2, function() { function callback(num, key) { return this[key] + num; } var actual = _.mapValues({ 'a': 1 }, callback, { 'a': 2 }); deepEqual(actual, { 'a': 3 }); actual = _.mapValues([1], callback, [2]); deepEqual(actual, { '0': 3 }); }); test('should iterate over own properties of objects', 1, function() { function Foo() { this.a = 1; } Foo.prototype.b = 2; var actual = _.mapValues(new Foo, function(value, key) { return key; }); deepEqual(actual, { 'a': 'a' }); }); test('should work on an object with no `callback`', 1, function() { var actual = _.mapValues({ 'a': 1, 'b': 2, 'c': 3 }); deepEqual(actual, object); }); test('should accept a falsey `object` argument', 1, function() { var expected = _.map(falsey, _.constant({})); var actual = _.map(falsey, function(value, index) { try { return index ? _.mapValues(value) : _.mapValues(); } catch(e) { } }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 1, function() { if (!isNpm) { ok(_(object).mapValues(_.noop) instanceof _); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.matches'); (function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, sources = [{ 'a': 1 }, { 'a': 1, 'c': 3 }]; test('should create a function that performs a deep comparison between a given object and the `source` object', 6, function() { _.each(sources, function(source, index) { var matches = _.matches(source); strictEqual(matches.length, 1); strictEqual(matches(object), true); matches = _.matches(index ? { 'c': 3, 'd': 4 } : { 'b': 1 }); strictEqual(matches(object), false); }); }); test('should return `true` when comparing an empty `source`', 1, function() { var expected = _.map(empties, _.constant(true)); var actual = _.map(empties, function(value) { var matches = _.matches(value); return matches(object) === true; }); deepEqual(actual, expected); }); test('should not error error for falsey `object` values', 2, function() { var expected = _.map(falsey, _.constant(true)); _.each(sources, function(source) { var matches = _.matches(source); var actual = _.map(falsey, function(value, index) { try { var result = index ? matches(value) : matches(); return result === false; } catch(e) { } }); deepEqual(actual, expected); }); }); test('should return `true` when comparing an empty `source` to a falsey `object`', 1, function() { var expected = _.map(falsey, _.constant(true)), matches = _.matches({}); var actual = _.map(falsey, function(value, index) { try { var result = index ? matches(value) : matches(); return result === true; } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.max'); (function() { test('should return the largest value from a collection', 1, function() { strictEqual(3, _.max([1, 2, 3])); }); test('should return `-Infinity` for empty collections', 1, function() { var expected = _.map(empties, function() { return -Infinity; }); var actual = _.map(empties, function(value) { try { return _.max(value); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `-Infinity` for non-numeric collection values', 1, function() { var collections = [['a', 'b'], { 'a': 'a', 'b': 'b' }], expected = _.map(collections, function() { return -Infinity; }); var actual = _.map(collections, function(value) { try { return _.max(value); } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.memoize'); (function() { test('should memoize results based on the first argument provided', 2, function() { var memoized = _.memoize(function(a, b, c) { return a + b + c; }); strictEqual(memoized(1, 2, 3), 6); strictEqual(memoized(1, 3, 5), 6); }); test('should support a `resolver` argument', 2, function() { var fn = function(a, b, c) { return a + b + c; }, memoized = _.memoize(fn, fn); strictEqual(memoized(1, 2, 3), 6); strictEqual(memoized(1, 3, 5), 9); }); test('should not set a `this` binding', 2, function() { var memoized = _.memoize(function(a, b, c) { return a + this.b + this.c; }); var object = { 'b': 2, 'c': 3, 'memoized': memoized }; strictEqual(object.memoized(1), 6); strictEqual(object.memoized(2), 7); }); test('should throw a TypeError if `resolve` is truthy and not a function', function() { raises(function() { _.memoize(_.noop, {}); }, TypeError); }); test('should not throw a TypeError if `resolve` is falsey', function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(value, index) { try { return _.isFunction(index ? _.memoize(_.noop, value) : _.memoize(_.noop)); } catch(e) { } }); deepEqual(actual, expected); }); test('should check cache for own properties', 1, function() { var actual = [], memoized = _.memoize(_.identity); _.each(shadowedProps, function(value) { actual.push(memoized(value)); }); deepEqual(actual, shadowedProps); }); test('should expose a `cache` object on the `memoized` function', 4, function() { _.times(2, function(index) { var resolver = index && _.identity, memoized = _.memoize(_.identity, resolver); memoized('a'); strictEqual(memoized.cache.a, 'a'); memoized.cache.a = 'b'; strictEqual(memoized('a'), 'b'); }); }); test('should skip the `__proto__` key', 4, function() { _.times(2, function(index) { var count = 0, resolver = index && _.identity; var memoized = _.memoize(function() { count++; return []; }, resolver); memoized('__proto__'); memoized('__proto__'); strictEqual(count, 2); ok(!(memoized.cache instanceof Array)); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.merge'); (function() { var args = arguments; test('should merge `source` into the destination object', 1, function() { var names = { 'characters': [ { 'name': 'barney' }, { 'name': 'fred' } ] }; var ages = { 'characters': [ { 'age': 36 }, { 'age': 40 } ] }; var heights = { 'characters': [ { 'height': '5\'4"' }, { 'height': '5\'5"' } ] }; var expected = { 'characters': [ { 'name': 'barney', 'age': 36, 'height': '5\'4"' }, { 'name': 'fred', 'age': 40, 'height': '5\'5"' } ] }; deepEqual(_.merge(names, ages, heights), expected); }); test('should merge sources containing circular references', 1, function() { var object = { 'foo': { 'a': 1 }, 'bar': { 'a': 2 } }; var source = { 'foo': { 'b': { 'foo': { 'c': { } } } }, 'bar': { } }; source.foo.b.foo.c = source; source.bar.b = source.foo.b; var actual = _.merge(object, source); ok(actual.bar.b === actual.foo.b && actual.foo.b.foo.c === actual.foo.b.foo.c.foo.b.foo.c); }); test('should not treat `arguments` objects as plain objects', 1, function() { var object = { 'args': args }; var source = { 'args': { '3': 4 } }; var actual = _.merge(object, source); strictEqual(_.isArguments(actual.args), false); }); test('should work with four arguments', 1, function() { var expected = { 'a': 4 }; deepEqual(_.merge({ 'a': 1 }, { 'a': 2 }, { 'a': 3 }, expected), expected); }); test('should assign `null` values', 1, function() { var actual = _.merge({ 'a': 1 }, { 'a': null }); strictEqual(actual.a, null); }); test('should not assign `undefined` values', 1, function() { var actual = _.merge({ 'a': 1 }, { 'a': undefined }); strictEqual(actual.a, 1); }); test('should handle merging if `callback` returns `undefined`', 1, function() { var actual = _.merge({ 'a': { 'b': [1, 1] } }, { 'a': { 'b': [0] } }, function() {}); deepEqual(actual, { 'a': { 'b': [0, 1] } }); }); test('should defer to `callback` when it returns a value other than `undefined`', 1, function() { var actual = _.merge({ 'a': { 'b': [0, 1] } }, { 'a': { 'b': [2] } }, function(a, b) { return _.isArray(a) ? a.concat(b) : undefined; }); deepEqual(actual, { 'a': { 'b': [0, 1, 2] } }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.min'); (function() { test('should return the smallest value from a collection', 1, function() { strictEqual(1, _.min([1, 2, 3])); }); test('should return `Infinity` for empty collections', 1, function() { var expected = _.map(empties, function() { return Infinity; }); var actual = _.map(empties, function(value) { try { return _.min(value); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `Infinity` for non-numeric collection values', 1, function() { var collections = [['a', 'b'], { 'a': 'a', 'b': 'b' }], expected = _.map(collections, function() { return Infinity; }); var actual = _.map(collections, function(value) { try { return _.min(value); } catch(e) { } }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.max and lodash.min'); _.each(['max', 'min'], function(methodName) { var array = [1, 2, 3], func = _[methodName], isMax = methodName == 'max'; test('`_.' + methodName + '` should work with Date objects', 1, function() { var now = new Date, past = new Date(0); strictEqual(func([now, past]), isMax ? now : past); }); test('`_.' + methodName + '` should work with a callback argument', 1, function() { var actual = func(array, function(num) { return -num; }); strictEqual(actual, isMax ? 1 : 3); }); test('`_.' + methodName + '` should pass the correct `callback` arguments when iterating an array', 1, function() { var args; func(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('`_.' + methodName + '` should pass the correct `callback` arguments when iterating an object', 1, function() { var args, object = { 'a': 1, 'b': 2 }, firstKey = _.first(_.keys(object)); var expected = firstKey == 'a' ? [1, 'a', object] : [2, 'b', object]; func(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func(array, function(num, index) { return -this[index]; }, array); strictEqual(actual, isMax ? 1 : 3); }); test('`_.' + methodName + '` should work when used as a callback for `_.map`', 1, function() { var array = [[2, 3, 1], [5, 6, 4], [8, 9, 7]], actual = _.map(array, func); deepEqual(actual, isMax ? [3, 6, 9] : [1, 4, 7]); }); test('`_.' + methodName + '` should iterate an object', 1, function() { var actual = func({ 'a': 1, 'b': 2, 'c': 3 }); strictEqual(actual, isMax ? 3 : 1); }); test('`_.' + methodName + '` should iterate a string', 2, function() { _.each(['abc', Object('abc')], function(value) { var actual = func(value); strictEqual(actual, isMax ? 'c' : 'a'); }); }); test('`_.' + methodName + '` should work when `callback` returns +/-Infinity', 1, function() { var object = { 'a': (isMax ? -Infinity : Infinity) }; var actual = func([object, { 'a': object.a }], function(object) { return object.a; }); strictEqual(actual, object); }); test('`_.' + methodName + '` should work with extremely large arrays', 1, function() { var array = _.range(0, 5e5); strictEqual(func(array), isMax ? 499999 : 0); }); test('`_.' + methodName + '` should work when chaining on an array with only one value', 1, function() { if (!isNpm) { var actual = _([40])[methodName]().value(); strictEqual(actual, 40); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.mixin'); (function() { function wrapper(value) { if (!(this instanceof wrapper)) { return new wrapper(value); } this.__wrapped__ = value; } var value = ['a'], source = { 'a': function(array) { return array[0]; }, 'b': 'B' }; test('should mixin `source` methods into lodash', 4, function() { _.mixin(source); strictEqual(_.a(value), 'a'); strictEqual(_(value).a().__wrapped__, 'a'); delete _.a; delete _.prototype.a; ok(!('b' in _)); ok(!('b' in _.prototype)); delete _.b; delete _.prototype.b; }); test('should use `this` as the default `object` value', 3, function() { var object = _.create(_); object.mixin(source); strictEqual(object.a(value), 'a'); ok(!('a' in _)); ok(!('a' in _.prototype)); delete wrapper.a; delete wrapper.prototype.a; delete wrapper.b; delete wrapper.prototype.b; }); test('should accept an `object` argument', 1, function() { var object = {}; _.mixin(object, source); strictEqual(object.a(value), 'a'); }); test('should return `object`', 2, function() { var object = {}; strictEqual(_.mixin(object, source), object); strictEqual(_.mixin(), _); }); test('should work with a function for `object`', 2, function() { _.mixin(wrapper, source); var wrapped = wrapper(value), actual = wrapped.a(); strictEqual(actual.__wrapped__, 'a'); ok(actual instanceof wrapper); delete wrapper.a; delete wrapper.prototype.a; delete wrapper.b; delete wrapper.prototype.b; }); test('should not assign inherited `source` properties', 1, function() { function Foo() {} Foo.prototype = { 'a': _.noop }; deepEqual(_.mixin({}, new Foo, {}), {}); }); test('should accept an `options` argument', 16, function() { function message(func, chain) { return (func === _ ? 'lodash' : 'provided') + ' function should ' + (chain ? '' : 'not ') + 'chain'; } _.each([_, wrapper], function(func) { _.each([false, true, { 'chain': false }, { 'chain': true }], function(options) { if (func === _) { _.mixin(source, options); } else { _.mixin(func, source, options); } var wrapped = func(value), actual = wrapped.a(); if (options === true || (options && options.chain)) { strictEqual(actual.__wrapped__, 'a', message(func, true)); ok(actual instanceof func, message(func, true)); } else { strictEqual(actual, 'a', message(func, false)); ok(!(actual instanceof func), message(func, false)); } delete func.a; delete func.prototype.a; delete func.b; delete func.prototype.b; }); }); }); test('should not error for non-object `options` values', 2, function() { var pass = true; try { _.mixin({}, source, 1); } catch(e) { pass = false; } ok(pass); pass = true; try { _.mixin(source, 1); } catch(e) { pass = false; } delete _.a; delete _.prototype.a; delete _.b; delete _.prototype.b; ok(pass); }); test('should return the existing wrapper when chaining', 2, function() { if (!isNpm) { _.each([_, wrapper], function(func) { if (func === _) { var wrapper = _(source), actual = wrapper.mixin(); strictEqual(actual.value(), _); } else { wrapper = _(func); actual = wrapper.mixin(source); strictEqual(actual, wrapper); } delete func.a; delete func.prototype.a; delete func.b; delete func.prototype.b; }); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.noop'); (function() { test('should always return `undefined`', 1, function() { var values = falsey.concat([], true, new Date, _, {}, /x/, 'a'), expected = _.map(values, _.constant()); var actual = _.map(values, function(value, index) { return index ? _.noop(value) : _.noop(); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.now'); (function() { asyncTest('should return the number of milliseconds that have elapsed since the Unix epoch', 2, function() { var stamp = +new Date, actual = _.now(); ok(actual >= stamp); if (!(isRhino && isModularize)) { setTimeout(function() { ok(_.now() > actual); QUnit.start(); }, 32); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.omit'); (function() { var args = arguments, object = { 'a': 1, 'b': 2, 'c': 3 }, expected = { 'b': 2 }; test('should create an object with omitted properties', 2, function() { deepEqual(_.omit(object, 'a'), { 'b': 2, 'c': 3 }); deepEqual(_.omit(object, 'a', 'c'), expected); }); test('should support picking an array of properties', 1, function() { deepEqual(_.omit(object, ['a', 'c']), expected); }); test('should support picking an array of properties and individual properties', 1, function() { deepEqual(_.omit(object, ['a'], 'c'), expected); }); test('should iterate over inherited properties', 1, function() { function Foo() {} Foo.prototype = object; deepEqual(_.omit(new Foo, 'a', 'c'), expected); }); test('should work with `arguments` objects as secondary arguments', 1, function() { deepEqual(_.omit(object, args), expected); }); test('should work with an array `object` argument', 1, function() { deepEqual(_.omit([1, 2, 3], '0', '2'), { '1': 2 }); }); test('should work with a callback argument', 1, function() { var actual = _.omit(object, function(num) { return num != 2; }); deepEqual(actual, expected); }); test('should pass the correct `callback` arguments', 1, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.keys(object).pop(); var expected = lastKey == 'b' ? [1, 'a', object] : [2, 'b', object]; _.omit(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.omit(object, function(num) { return num != this.b; }, { 'b': 2 }); deepEqual(actual, expected); }); test('should coerce property names to strings', 1, function() { deepEqual(_.omit({ '0': 'a' }, 0), {}); }); }('a', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.once'); (function() { test('should execute `func` once', 1, function() { var count = 0, once = _.once(function() { count++; }); once(); once(); strictEqual(count, 1); }); test('should not set a `this` binding', 1, function() { var once = _.once(function() { this.count++; }), object = { 'count': 0, 'once': once }; object.once(); object.once(); strictEqual(object.count, 1); }); test('should ignore recursive calls', 1, function() { var count = 0; var once = _.once(function() { count++; once(); }); once(); strictEqual(count, 1); }); test('should not throw more than once', 2, function() { var once = _.once(function() { throw new Error; }); raises(function() { once(); }, Error); var pass = true; try { once(); } catch(e) { pass = false; } ok(pass); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pad'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.pad('abc', 9), ' abc '); }); test('should truncate pad characters to fit the pad length', 2, function() { strictEqual(_.pad('abc', 8), ' abc '); strictEqual(_.pad('abc', 8, '_-'), '_-abc_-_'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.pad(Object('abc'), 4), 'abc '); strictEqual(_.pad({ 'toString': _.constant('abc') }, 5), ' abc '); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.padLeft'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.padLeft('abc', 6), ' abc'); }); test('should truncate pad characters to fit the pad length', 1, function() { strictEqual(_.padLeft('abc', 6, '_-'), '_-_abc'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.padLeft(Object('abc'), 4), ' abc'); strictEqual(_.padLeft({ 'toString': _.constant('abc') }, 5), ' abc'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.padRight'); (function() { test('should pad a string to a given length', 1, function() { strictEqual(_.padRight('abc', 6), 'abc '); }); test('should truncate pad characters to fit the pad length', 1, function() { strictEqual(_.padRight('abc', 6, '_-'), 'abc_-_'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.padRight(Object('abc'), 4), 'abc '); strictEqual(_.padRight({ 'toString': _.constant('abc') }, 5), 'abc '); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('pad methods'); _.each(['pad', 'padLeft', 'padRight'], function(methodName, index) { var func = _[methodName]; test('`_.' + methodName + '` should not pad is string is >= `length`', 2, function() { strictEqual(func('abc', 2), 'abc'); strictEqual(func('abc', 3), 'abc'); }); test('`_.' + methodName + '` should treat negative `length` as `0`', 2, function() { _.each([0, -2], function(length) { strictEqual(func('abc', length), 'abc'); }); }); test('`_.' + methodName + '` should coerce `length` to a number', 2, function() { _.each(['', '4'], function(length) { var actual = length ? (index == 1 ? ' abc' : 'abc ') : 'abc'; strictEqual(func('abc', length), actual); }); }); test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string and `chars`', 6, function() { _.each([null, '_-'], function(chars) { strictEqual(func(null, 0, chars), ''); strictEqual(func(undefined, 0, chars), ''); strictEqual(func('', 0, chars), ''); }); }); test('`_.' + methodName + '` should work with `null`, `undefined`, or empty string for `chars`', 3, function() { notStrictEqual(func('abc', 6, null), 'abc'); notStrictEqual(func('abc', 6, undefined), 'abc'); strictEqual(func('abc', 6, ''), 'abc'); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pairs'); (function() { test('should create a two dimensional array of an object\'s key-value pairs', 1, function() { var object = { 'a': 1, 'b': 2 }; deepEqual(_.pairs(object), [['a', 1], ['b', 2]]); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.pairs(object), [['0', 'a'], ['1', 'b'], ['length', 2]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.parseInt'); (function() { test('should accept a `radix` argument', 1, function() { var expected = _.range(2, 37); var actual = _.map(expected, function(radix) { return _.parseInt('10', radix); }); deepEqual(actual, expected); }); test('should use a radix of `10`, for non-hexadecimals, if `radix` is `undefined` or `0`', 4, function() { strictEqual(_.parseInt('10'), 10); strictEqual(_.parseInt('10', 0), 10); strictEqual(_.parseInt('10', 10), 10); strictEqual(_.parseInt('10', undefined), 10); }); test('should use a radix of `16`, for hexadecimals, if `radix` is `undefined` or `0`', 8, function() { _.each(['0x20', '0X20'], function(string) { strictEqual(_.parseInt(string), 32); strictEqual(_.parseInt(string, 0), 32); strictEqual(_.parseInt(string, 16), 32); strictEqual(_.parseInt(string, undefined), 32); }); }); test('should use a radix of `10` for string with leading zeros', 2, function() { strictEqual(_.parseInt('08'), 8); strictEqual(_.parseInt('08', 10), 8); }); test('should parse strings with leading whitespace (test in Chrome, Firefox, and Opera)', 8, function() { strictEqual(_.parseInt(whitespace + '10'), 10); strictEqual(_.parseInt(whitespace + '10', 10), 10); strictEqual(_.parseInt(whitespace + '08'), 8); strictEqual(_.parseInt(whitespace + '08', 10), 8); _.each(['0x20', '0X20'], function(string) { strictEqual(_.parseInt(whitespace + string), 32); strictEqual(_.parseInt(whitespace + string, 16), 32); }); }); test('should coerce `radix` to a number', 2, function() { var object = { 'valueOf': function() { return 0; } }; strictEqual(_.parseInt('08', object), 8); strictEqual(_.parseInt('0x20', object), 32); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('partial methods'); _.each(['partial', 'partialRight'], function(methodName) { var func = _[methodName], isPartial = methodName == 'partial'; test('`_.' + methodName + '` partially applies arguments', 1, function() { var par = func(_.identity, 'a'); strictEqual(par(), 'a'); }); test('`_.' + methodName + '` creates a function that can be invoked with additional arguments', 1, function() { var fn = function(a, b) { return [a, b]; }, expected = ['a', 'b'], par = func(fn, 'a'); deepEqual(par('b'), isPartial ? expected : expected.reverse()); }); test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked without additional arguments', 1, function() { var fn = function() { return arguments.length; }, par = func(fn); strictEqual(par(), 0); }); test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked with additional arguments', 1, function() { var par = func(_.identity); strictEqual(par('a'), 'a'); }); test('`_.' + methodName + '` should support placeholders', 4, function() { if (!isModularize) { var fn = function() { return slice.call(arguments); }, par = func(fn, _, 'b', _); deepEqual(par('a', 'c'), ['a', 'b', 'c']); deepEqual(par('a'), ['a', 'b', undefined]); deepEqual(par(), [undefined, 'b', undefined]); if (isPartial) { deepEqual(par('a', 'c', 'd'), ['a', 'b', 'c', 'd']); } else { par = func(fn, _, 'c', _); deepEqual(par('a', 'b', 'd'), ['a', 'b', 'c', 'd']); } } else { skipTest(4); } }); test('`_.' + methodName + '` should not alter the `this` binding', 3, function() { var fn = function() { return this.a; }, object = { 'a': 1 }; var par = func(_.bind(fn, object)); strictEqual(par(), object.a); par = _.bind(func(fn), object); strictEqual(par(), object.a); object.par = func(fn); strictEqual(object.par(), object.a); }); test('`_.' + methodName + '` creates a function with a `length` of `0`', 1, function() { var fn = function(a, b, c) {}, par = func(fn, 'a'); strictEqual(par.length, 0); }); test('`_.' + methodName + '` ensure `new partialed` is an instance of `func`', 2, function() { function Foo(value) { return value && object; } var object = {}, par = func(Foo); ok(new par instanceof Foo); strictEqual(new par(true), object); }); test('`_.' + methodName + '` should clone metadata for created functions', 3, function() { var greet = function(greeting, name) { return greeting + ' ' + name; }; var par1 = func(greet, 'hi'), par2 = func(par1, 'barney'), par3 = func(par1, 'pebbles'); strictEqual(par1('fred'), isPartial ? 'hi fred' : 'fred hi') strictEqual(par2(), isPartial ? 'hi barney' : 'barney hi'); strictEqual(par3(), isPartial ? 'hi pebbles' : 'pebbles hi'); }); test('`_.' + methodName + '` should work with curried methods', 2, function() { var fn = function(a, b, c) { return a + b + c; }, curried = _.curry(func(fn, 1), 2); strictEqual(curried(2, 3), 6); strictEqual(curried(2)(3), 6); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.partialRight'); (function() { test('should work as a deep `_.defaults`', 1, function() { var object = { 'a': { 'b': 1 } }, source = { 'a': { 'b': 2, 'c': 3 } }, expected = { 'a': { 'b': 1, 'c': 3 } }; var defaultsDeep = _.partialRight(_.merge, function deep(value, other) { return _.merge(value, other, deep); }); deepEqual(defaultsDeep(object, source), expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('methods using `createWrapper`'); (function() { test('combinations of partial functions should work', 1, function() { function fn() { return slice.call(arguments); } var a = _.partial(fn), b = _.partialRight(a, 3), c = _.partial(b, 1); deepEqual(c(2), [1, 2, 3]); }); test('combinations of bound and partial functions should work', 3, function() { function fn() { var result = [this.a]; push.apply(result, arguments); return result; } var expected = [1, 2, 3, 4], object = { 'a': 1, 'fn': fn }; var a = _.bindKey(object, 'fn'), b = _.partialRight(a, 4), c = _.partial(b, 2); deepEqual(c(3), expected); a = _.bind(fn, object); b = _.partialRight(a, 4); c = _.partial(b, 2); deepEqual(c(3), expected); a = _.partial(fn, 2); b = _.bind(a, object); c = _.partialRight(b, 4); deepEqual(c(3), expected); }); test('recursively bound functions should work', 1, function() { function fn() { return this.a; } var a = _.bind(fn, { 'a': 1 }), b = _.bind(a, { 'a': 2 }), c = _.bind(b, { 'a': 3 }); strictEqual(c(), 1); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.partition'); (function() { var array = [1, 0, 1]; test('should always return two groups of elements', 3, function() { deepEqual(_.partition([], _.identity), [[], []]); deepEqual(_.partition(array, _.constant(true)), [array, []]); deepEqual(_.partition(array, _.constant(false)), [[], array]); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.partition(array); deepEqual(actual, [[1, 1], [0]]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.partition(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.partition([1.1, 0.2, 1.3], function(num) { return this.floor(num); }, Math); deepEqual(actual, [[1.1, 1.3], [0.2]]); }); test('should work with an object for `collection`', 1, function() { var actual = _.partition({ 'a': 1.1, 'b': 0.2, 'c': 1.3 }, function(num) { return Math.floor(num); }); deepEqual(actual, [[1.1, 1.3], [0.2]]); }); test('should work with a number for `callback`', 2, function() { var array = [ [1, 0], [0, 1], [1, 0] ]; deepEqual(_.partition(array, 0), [[array[0], array[2]], [array[1]]]); deepEqual(_.partition(array, 1), [[array[1]], [array[0], array[2]]]); }); test('should work with a string for `callback`', 1, function() { var objects = [{ 'a': 1 }, { 'a': 1 }, { 'b': 2 }], actual = _.partition(objects, 'a'); deepEqual(actual, [objects.slice(0, 2), objects.slice(2)]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pick'); (function() { var args = arguments, object = { 'a': 1, 'b': 2, 'c': 3 }, expected = { 'a': 1, 'c': 3 }; test('should create an object of picked properties', 2, function() { deepEqual(_.pick(object, 'a'), { 'a': 1 }); deepEqual(_.pick(object, 'a', 'c'), expected); }); test('should support picking an array of properties', 1, function() { deepEqual(_.pick(object, ['a', 'c']), expected); }); test('should support picking an array of properties and individual properties', 1, function() { deepEqual(_.pick(object, ['a'], 'c'), expected); }); test('should iterate over inherited properties', 1, function() { function Foo() {} Foo.prototype = object; deepEqual(_.pick(new Foo, 'a', 'c'), expected); }); test('should work with `arguments` objects as secondary arguments', 1, function() { deepEqual(_.pick(object, args), expected); }); test('should work with an array `object` argument', 1, function() { deepEqual(_.pick([1, 2, 3], '1'), { '1': 2 }); }); test('should work with a callback argument', 1, function() { var actual = _.pick(object, function(num) { return num != 2; }); deepEqual(actual, expected); }); test('should pass the correct `callback` arguments', 1, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.keys(object).pop(); var expected = lastKey == 'b' ? [1, 'a', object] : [2, 'b', object]; _.pick(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); test('should correctly set the `this` binding', 1, function() { var actual = _.pick(object, function(num) { return num != this.b; }, { 'b': 2 }); deepEqual(actual, expected); }); test('should coerce property names to strings', 1, function() { deepEqual(_.pick({ '0': 'a', '1': 'b' }, 0), { '0': 'a' }); }); }('a', 'c')); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pluck'); (function() { test('should return an array of property values from each element of a collection', 1, function() { var objects = [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }], actual = _.pluck(objects, 'name'); deepEqual(actual, ['barney', 'fred']); }); test('should work with an object for `collection`', 1, function() { var object = { 'a': [1], 'b': [1, 2], 'c': [1, 2, 3] }; deepEqual(_.pluck(object, 'length'), [1, 2, 3]); }); test('should work with nullish elements', 1, function() { var objects = [{ 'a': 1 }, null, undefined, { 'a': 4 }]; deepEqual(_.pluck(objects, 'a'), [1, undefined, undefined, 4]); }); test('should coerce `key` to a string', 1, function() { function fn() {} fn.toString = _.constant('fn'); var objects = [{ 'null': 1 }, { 'undefined': 2 }, { 'fn': 3 }, { '[object Object]': 4 }], values = [null, undefined, fn, {}] var actual = _.map(objects, function(object, index) { return _.pluck([object], values[index]); }); deepEqual(actual, [[1], [2], [3], [4]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.property'); (function() { test('should create a function that plucks a property value of a given object', 3, function() { var object = { 'a': 1, 'b': 2 }, property = _.property('a'); strictEqual(property.length, 1); strictEqual(property(object), 1); property = _.property('b'); strictEqual(property(object), 2); }); test('should work with non-string `prop` arguments', 1, function() { var array = [1, 2, 3], property = _.property(1); strictEqual(property(array), 2); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pull'); (function() { test('should modify and return the array', 2, function() { var array = [1, 2, 3], actual = _.pull(array, 1, 3); deepEqual(array, [2]); ok(actual === array); }); test('should preserve holes in arrays', 2, function() { var array = [1, 2, 3, 4]; delete array[1]; delete array[3]; _.pull(array, 1); ok(!('0' in array)); ok(!('2' in array)); }); test('should treat holes as `undefined`', 1, function() { var array = [1, 2, 3]; delete array[1]; _.pull(array, undefined); deepEqual(array, [1, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.pullAt'); (function() { test('should modify the array and return removed elements', 2, function() { var array = [1, 2, 3], actual = _.pullAt(array, [0, 1]); deepEqual(array, [3]); deepEqual(actual, [1, 2]); }); test('should work with unsorted indexes', 2, function() { var array = [1, 2, 3, 4], actual = _.pullAt(array, [1, 3, 0]); deepEqual(array, [3]); deepEqual(actual, [2, 4, 1]); }); test('should work with repeated indexes', 2, function() { var array = [1, 2, 3, 4], actual = _.pullAt(array, [0, 2, 0, 1, 0, 2]); deepEqual(array, [4]); deepEqual(actual, [1, 3, 1, 2, 1, 3]); }); test('should return `undefined` for nonexistent keys', 2, function() { var array = ['a', 'b', 'c'], actual = _.pullAt(array, [2, 4, 0]); deepEqual(array, ['b']); deepEqual(actual, ['c', undefined, 'a']); }); test('should return an empty array when no keys are provided', 2, function() { var array = ['a', 'b', 'c'], actual = _.pullAt(array); deepEqual(array, ['a', 'b', 'c']); deepEqual(actual, []); }); test('should accept multiple index arguments', 2, function() { var array = ['a', 'b', 'c', 'd'], actual = _.pullAt(array, 3, 0, 2); deepEqual(array, ['b']); deepEqual(actual, ['d', 'a', 'c']); }); test('should ignore non-index values', 2, function() { var array = ['a', 'b', 'c'], clone = array.slice(); var values = _.reject(empties, function(value) { return value === 0 || _.isArray(value); }).concat(-1, 1.1); var expected = _.map(values, _.constant(undefined)), actual = _.pullAt.apply(_, [array].concat(values)); deepEqual(actual, expected); deepEqual(array, clone); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.random'); (function() { var array = Array(1000); test('should return `0` or `1` when arguments are not provided', 1, function() { var actual = _.map(array, function() { return _.random(); }); deepEqual(_.uniq(actual).sort(), [0, 1]); }); test('supports not passing a `max` argument', 1, function() { ok(_.some(array, function() { return _.random(5) !== 5; })); }); test('supports large integer values', 2, function() { var min = Math.pow(2, 31), max = Math.pow(2, 62); ok(_.every(array, function() { return _.random(min, max) >= min; })); ok(_.some(array, function() { return _.random(Number.MAX_VALUE) > 0; })); }); test('should coerce arguments to numbers', 1, function() { strictEqual(_.random('1', '1'), 1); }); test('should support floats', 2, function() { var min = 1.5, max = 1.6, actual = _.random(min, max); ok(actual % 1); ok(actual >= min && actual <= max); }); test('supports passing a `floating` argument', 3, function() { var actual = _.random(true); ok(actual % 1 && actual >= 0 && actual <= 1); actual = _.random(2, true); ok(actual % 1 && actual >= 0 && actual <= 2); actual = _.random(2, 4, true); ok(actual % 1 && actual >= 2 && actual <= 4); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.range'); (function() { test('should work when passing a single `end` argument', 1, function() { deepEqual(_.range(4), [0, 1, 2, 3]); }); test('should work when passing `start` and `end` arguments', 1, function() { deepEqual(_.range(1, 5), [1, 2, 3, 4]); }); test('should work when passing `start`, `end`, and `step` arguments', 1, function() { deepEqual(_.range(0, 20, 5), [0, 5, 10, 15]); }); test('should support a `step` of `0`', 1, function() { deepEqual(_.range(1, 4, 0), [1, 1, 1]); }); test('should work when passing `step` larger than `end`', 1, function() { deepEqual(_.range(1, 5, 20), [1]); }); test('should work when passing a negative `step` argument', 2, function() { deepEqual(_.range(0, -4, -1), [0, -1, -2, -3]); deepEqual(_.range(21, 10, -3), [21, 18, 15, 12]); }); test('should treat falsey `start` arguments as `0`', 13, function() { _.each(falsey, function(value, index) { if (index) { deepEqual(_.range(value), []); deepEqual(_.range(value, 1), [0]); } else { deepEqual(_.range(), []); } }); }); test('should coerce arguments to finite numbers', 1, function() { var actual = [_.range('0', 1), _.range('1'), _.range(0, 1, '1'), _.range(NaN), _.range(NaN, NaN)]; deepEqual(actual, [[0], [0], [0], [], []]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reduce'); (function() { var array = [1, 2, 3]; test('should use the first element of a collection as the default `accumulator`', 1, function() { strictEqual(_.reduce(array), 1); }); test('should pass the correct `callback` arguments when iterating an array', 2, function() { var args; _.reduce(array, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, [0, 1, 0, array]); args = null; _.reduce(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 2, 1, array]); }); test('should pass the correct `callback` arguments when iterating an object', 2, function() { var args, object = { 'a': 1, 'b': 2 }, firstKey = _.first(_.keys(object)); var expected = firstKey == 'a' ? [0, 1, 'a', object] : [0, 2, 'b', object]; _.reduce(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); args = null; expected = firstKey == 'a' ? [1, 2, 'b', object] : [2, 1, 'a', object]; _.reduce(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args; var actual = _.reduce(collection, function(accumulator, value) { args || (args = slice.call(arguments)); return accumulator + value; }); deepEqual(args, ['a', 'b', 1, collection]); strictEqual(actual, 'abc'); }); }); test('should be aliased', 2, function() { strictEqual(_.foldl, _.reduce); strictEqual(_.inject, _.reduce); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reduceRight'); (function() { var array = [1, 2, 3]; test('should use the last element of a collection as the default `accumulator`', 1, function() { strictEqual(_.reduceRight(array), 3); }); test('should pass the correct `callback` arguments when iterating an array', 2, function() { var args; _.reduceRight(array, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, [0, 3, 2, array]); args = null; _.reduceRight(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [3, 2, 1, array]); }); test('should pass the correct `callback` arguments when iterating an object', 2, function() { var args, object = { 'a': 1, 'b': 2 }, lastKey = _.last(_.keys(object)); var expected = lastKey == 'b' ? [0, 2, 'b', object] : [0, 1, 'a', object]; _.reduceRight(object, function() { args || (args = slice.call(arguments)); }, 0); deepEqual(args, expected); args = null; expected = lastKey == 'b' ? [2, 1, 'a', object] : [1, 2, 'b', object]; _.reduceRight(object, function() { args || (args = slice.call(arguments)); }); deepEqual(args, expected); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection` (test in IE < 9)', 2, function() { var args; var actual = _.reduceRight(collection, function(accumulator, value) { args || (args = slice.call(arguments)); return accumulator + value; }); deepEqual(args, ['c', 'b', 1, collection]); strictEqual(actual, 'cba'); }); }); test('should be aliased', 1, function() { strictEqual(_.foldr, _.reduceRight); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('reduce methods'); _.each(['reduce', 'reduceRight'], function(methodName) { var array = [1, 2, 3], func = _[methodName]; test('`_.' + methodName + '` should reduce a collection to a single value', 1, function() { var actual = func(['a', 'b', 'c'], function(accumulator, value) { return accumulator + value; }, ''); strictEqual(actual, methodName == 'reduce' ? 'abc' : 'cba'); }); test('`_.' + methodName + '` should support the `thisArg` argument', 1, function() { var actual = func(array, function(sum, num, index) { return sum + this[index]; }, 0, array); deepEqual(actual, 6); }); test('`_.' + methodName + '` should support empty or falsey collections without an initial `accumulator` value', 1, function() { var actual = [], expected = _.map(empties, _.constant()); _.each(empties, function(value) { try { actual.push(func(value, _.noop)); } catch(e) { } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should support empty or falsey collections with an initial `accumulator` value', 1, function() { var expected = _.map(empties, _.constant('x')); var actual = _.map(empties, function(value) { try { return func(value, _.noop, 'x'); } catch(e) { } }); deepEqual(actual, expected); }); test('`_.' + methodName + '` should handle an initial `accumulator` value of `undefined`', 1, function() { var actual = func([], _.noop, undefined); strictEqual(actual, undefined); }); test('`_.' + methodName + '` should return `undefined` for empty collections when no `accumulator` is provided (test in IE > 9 and modern browsers)', 2, function() { var array = [], object = { '0': 1, 'length': 0 }; if ('__proto__' in array) { array.__proto__ = object; strictEqual(_.reduce(array, _.noop), undefined); } else { skipTest(); } strictEqual(_.reduce(object, _.noop), undefined); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var actual = _(array)[methodName](function(sum, num) { return sum + num; }); strictEqual(actual, 6); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.reject'); (function() { test('should return elements the `callback` returns falsey for', 1, function() { var actual = _.reject([1, 2, 3], function(num) { return num % 2; }); deepEqual(actual, [2]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('filter methods'); _.each(['filter', 'reject'], function(methodNames) { var func = _[methodNames]; test('`_.' + methodNames + '` should not modify the resulting value from within `callback`', 1, function() { var actual = func([0], function(num, index, array) { array[index] = 1; return methodNames == 'filter'; }); deepEqual(actual, [0]); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.remove'); (function() { test('should modify the array and return removed elements', 2, function() { var array = [1, 2, 3]; var actual = _.remove(array, function(num) { return num < 3; }); deepEqual(array, [3]); deepEqual(actual, [1, 2]); }); test('should pass the correct `callback` arguments', 1, function() { var args, array = [1, 2, 3]; _.remove(array, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var array = [1, 2, 3]; var actual = _.remove(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [1, 2]); }); test('should preserve holes in arrays', 2, function() { var array = [1, 2, 3, 4]; delete array[1]; delete array[3]; _.remove(array, function(num) { return num === 1; }); ok(!('0' in array)); ok(!('2' in array)); }); test('should treat holes as `undefined`', 1, function() { var array = [1, 2, 3]; delete array[1]; _.remove(array, function(num) { return num == null; }); deepEqual(array, [1, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.repeat'); (function() { test('should repeat a string `n` times', 2, function() { strictEqual(_.repeat('*', 3), '***'); strictEqual(_.repeat('abc', 2), 'abcabc'); }); test('should return an empty string for negative `n` or `n` of `0`', 2, function() { strictEqual(_.repeat('abc', 0), ''); strictEqual(_.repeat('abc', -2), ''); }); test('should coerce `n` to a number', 3, function() { strictEqual(_.repeat('abc'), ''); strictEqual(_.repeat('abc', '2'), 'abcabc'); strictEqual(_.repeat('*', { 'valueOf': _.constant(3) }), '***'); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.repeat(Object('abc'), 2), 'abcabc'); strictEqual(_.repeat({ 'toString': _.constant('*') }, 3), '***'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.result'); (function() { var object = { 'a': 1, 'b': null, 'c': function() { return this.a; } }; test('should resolve property values', 4, function() { strictEqual(_.result(object, 'a'), 1); strictEqual(_.result(object, 'b'), null); strictEqual(_.result(object, 'c'), 1); strictEqual(_.result(object, 'd'), undefined); }); test('should return `undefined` when `object` is nullish', 2, function() { strictEqual(_.result(null, 'a'), undefined); strictEqual(_.result(undefined, 'a'), undefined); }); test('should return the specified default value for undefined properties', 1, function() { var values = falsey.concat(1, _.constant(1)); var expected = _.transform(values, function(result, value) { result.push(value, value); }); var actual = _.transform(values, function(result, value) { result.push( _.result(object, 'd', value), _.result(null, 'd', value) ); }); deepEqual(actual, expected); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.rest'); (function() { var array = [1, 2, 3]; var objects = [ { 'a': 2, 'b': 2 }, { 'a': 1, 'b': 1 }, { 'a': 0, 'b': 0 } ]; test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant([])); var actual = _.map(falsey, function(value, index) { try { return index ? _.rest(value) : _.rest(); } catch(e) { } }); deepEqual(actual, expected); }); test('should exclude the first element', 1, function() { deepEqual(_.rest(array), [2, 3]); }); test('should exclude the first two elements', 1, function() { deepEqual(_.rest(array, 2), [3]); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? [2, 3] : array; }); var actual = _.map(falsey, function(n) { return _.rest(array, n); }); deepEqual(actual, expected); }); test('should return all elements when `n` < `1`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.rest(array, n), array); }); }); test('should return an empty array when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.rest(array, n), []); }); }); test('should return an empty when querying empty arrays', 1, function() { deepEqual(_.rest([]), []); }); test('should work when used as a callback for `_.map`', 1, function() { var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]], actual = _.map(array, _.rest); deepEqual(actual, [[2, 3], [5, 6], [8, 9]]); }); test('should work with a callback', 1, function() { var actual = _.rest(array, function(num) { return num < 3; }); deepEqual(actual, [3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.rest(array, function() { args = slice.call(arguments); }); deepEqual(args, [1, 0, array]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.rest(array, function(num, index) { return this[index] < 3; }, array); deepEqual(actual, [3]); }); test('should work with an object for `callback`', 1, function() { deepEqual(_.rest(objects, { 'b': 2 }), objects.slice(-2)); }); test('should work with a string for `callback`', 1, function() { deepEqual(_.rest(objects, 'b'), objects.slice(-1)); }); test('should be aliased', 2, function() { strictEqual(_.drop, _.rest); strictEqual(_.tail, _.rest); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.runInContext'); (function() { test('should not require a fully populated `context` object', 1, function() { if (!isModularize) { var lodash = _.runInContext({ 'setTimeout': function(callback) { callback(); } }); var pass = false; lodash.delay(function() { pass = true; }, 32); ok(pass); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sample'); (function() { var array = [1, 2, 3]; test('should return a random element', 1, function() { var actual = _.sample(array); ok(_.contains(array, actual)); }); test('should return two random elements', 1, function() { var actual = _.sample(array, 2); ok(actual[0] !== actual[1] && _.contains(array, actual[0]) && _.contains(array, actual[1])); }); test('should contain elements of the collection', 1, function() { var actual = _.sample(array, array.length); deepEqual(actual.sort(), array); }); test('should treat falsey `n` values, except nullish, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value == null ? 1 : []; }); var actual = _.map(falsey, function(n) { return _.sample([1], n); }); deepEqual(actual, expected); }); test('should return an empty array when `n` < `1` or `NaN`', 3, function() { _.each([0, -1, -Infinity], function(n) { deepEqual(_.sample(array, n), []); }); }); test('should return all elements when `n` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(n) { deepEqual(_.sample(array, n).sort(), array); }); }); test('should return `undefined` when sampling an empty array', 1, function() { strictEqual(_.sample([]), undefined); }); test('should return an empty array for empty or falsey collections', 1, function() { var actual = []; var expected = _.transform(empties, function(result) { result.push([], []); }); _.each(empties, function(value) { try { actual.push(_.shuffle(value), _.shuffle(value, 1)); } catch(e) { } }); deepEqual(actual, expected); }); test('should sample an object', 2, function() { var object = { 'a': 1, 'b': 2, 'c': 3 }, actual = _.sample(object); ok(_.contains(array, actual)); actual = _.sample(object, 2); ok(actual[0] !== actual[1] && _.contains(array, actual[0]) && _.contains(array, actual[1])); }); test('should work when used as a callback for `_.map`', 1, function() { var a = [1, 2, 3], b = [4, 5, 6], c = [7, 8, 9], actual = _.map([a, b, c], _.sample); ok(_.contains(a, actual[0]) && _.contains(b, actual[1]) && _.contains(c, actual[2])); }); test('should chain when passing `n`', 1, function() { if (!isNpm) { var actual = _(array).sample(2); ok(actual instanceof _); } else { skipTest(); } }); test('should not chain when arguments are not provided', 1, function() { if (!isNpm) { var actual = _(array).sample(); ok(_.contains(array, actual)); } else { skipTest(); } }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 2, function() { var actual = _.sample(collection); ok(_.contains(collection, actual)); actual = _.sample(collection, 2); ok(actual[0] !== actual[1] && _.contains(collection, actual[0]) && _.contains(collection, actual[1])); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.shuffle'); (function() { var array = [1, 2, 3], object = { 'a': 1, 'b': 2, 'c': 3 }; test('should return a new array', 1, function() { notStrictEqual(_.shuffle(array), array); }); test('should contain the same elements after a collection is shuffled', 2, function() { deepEqual(_.shuffle(array).sort(), array); deepEqual(_.shuffle(object).sort(), array); }); test('should shuffle an object', 1, function() { var actual = _.shuffle(object); deepEqual(actual.sort(), array); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.shuffle(1), []); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { var actual = _.shuffle(collection); deepEqual(actual.sort(), ['a','b', 'c']); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.size'); (function() { var args = arguments, array = [1, 2, 3]; test('should return the number of own enumerable properties of an object', 1, function() { strictEqual(_.size({ 'one': 1, 'two': 2, 'three': 3 }), 3); }); test('should return the length of an array', 1, function() { strictEqual(_.size(array), 3); }); test('should accept a falsey `object` argument', 1, function() { var expected = _.map(falsey, _.constant(0)); var actual = _.map(falsey, function(value, index) { try { return index ? _.size(value) : _.size(); } catch(e) { } }); deepEqual(actual, expected); }); test('should work with `arguments` objects (test in IE < 9)', 1, function() { strictEqual(_.size(args), 3); }); test('should work with jQuery/MooTools DOM query collections', 1, function() { function Foo(elements) { push.apply(this, elements); } Foo.prototype = { 'length': 0, 'splice': Array.prototype.splice }; strictEqual(_.size(new Foo(array)), 3); }); test('should not treat objects with negative lengths as array-like', 1, function() { strictEqual(_.size({ 'length': -1 }), 1); }); test('should not treat objects with lengths larger than `maxSafeInteger` as array-like', 1, function() { strictEqual(_.size({ 'length': maxSafeInteger + 1 }), 1); }); test('should not treat objects with non-number lengths as array-like', 1, function() { strictEqual(_.size({ 'length': '0' }), 1); }); test('fixes the JScript `[[DontEnum]]` bug (test in IE < 9)', 1, function() { strictEqual(_.size(shadowedObject), 7); }); _.each({ 'literal': 'abc', 'object': Object('abc') }, function(collection, key) { test('should work with a string ' + key + ' for `collection`', 1, function() { deepEqual(_.size(collection), 3); }); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.slice'); (function() { var array = [1, 2, 3]; test('should work with a positive `start`', 1, function() { deepEqual(_.slice(array, 1), [2, 3]); }); test('should work with a `start` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(start) { deepEqual(_.slice(array, start), []); }); }); test('should treat falsey `start` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(array)); var actual = _.map(falsey, function(start) { return _.slice(array, start); }); deepEqual(actual, expected); }); test('should work with a negative `start`', 1, function() { deepEqual(_.slice(array, -1), [3]); }); test('should work with a negative `start` <= negative `array.length`', 3, function() { _.each([-3, -4, -Infinity], function(start) { deepEqual(_.slice(array, start), [1, 2, 3]); }); }); test('should work with a positive `end`', 1, function() { deepEqual(_.slice(array, 0, 1), [1]); }); test('should work with a `end` >= `array.length`', 4, function() { _.each([3, 4, Math.pow(2, 32), Infinity], function(end) { deepEqual(_.slice(array, 0, end), [1, 2, 3]); }); }); test('should treat falsey `end` values, except `undefined`, as `0`', 1, function() { var expected = _.map(falsey, function(value) { return value === undefined ? array : []; }); var actual = _.map(falsey, function(end) { return _.slice(array, 0, end); }); deepEqual(actual, expected); }); test('should work with a negative `end`', 1, function() { deepEqual(_.slice(array, 0, -1), [1, 2]); }); test('should work with a negative `end` <= negative `array.length`', 3, function() { _.each([-3, -4, -Infinity], function(end) { deepEqual(_.slice(array, 0, end), []); }); }); test('should coerce `start` and `end` to finite numbers', 1, function() { var actual = [_.slice(array, '0', 1), _.slice(array, 0, '1'), _.slice(array, '1'), _.slice(array, NaN, 1), _.slice(array, 1, NaN)]; deepEqual(actual, [[1], [1], [2, 3], [1], []]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.some'); (function() { test('should return `false` for empty or falsey collections', 1, function() { var expected = _.map(empties, _.constant(false)); var actual = _.map(empties, function(value) { try { return _.some(value, _.identity); } catch(e) { } }); deepEqual(actual, expected); }); test('should return `true` if the callback returns truthy for any element in the collection', 2, function() { strictEqual(_.some([false, 1, ''], _.identity), true); strictEqual(_.some([null, 'x', 0], _.identity), true); }); test('should return `false` if the callback returns falsey for all elements in the collection', 2, function() { strictEqual(_.some([false, false, false], _.identity), false); strictEqual(_.some([null, 0, ''], _.identity), false); }); test('should return `true` as soon as the `callback` result is truthy', 1, function() { strictEqual(_.some([null, true, null], _.identity), true); }); test('should use `_.identity` when no callback is provided', 2, function() { strictEqual(_.some([0, 1]), true); strictEqual(_.some([0, 0]), false); }); test('should be aliased', 1, function() { strictEqual(_.any, _.some); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sortBy'); (function() { function Pair(a, b, c) { this.a = a; this.b = b; this.c = c; } var objects = [ { 'a': 'x', 'b': 3 }, { 'a': 'y', 'b': 4 }, { 'a': 'x', 'b': 1 }, { 'a': 'y', 'b': 2 } ]; var stableOrder = [ new Pair(1, 1, 1), new Pair(1, 2, 1), new Pair(1, 1, 1), new Pair(1, 2, 1), new Pair(1, 3, 1), new Pair(1, 4, 1), new Pair(1, 5, 1), new Pair(1, 6, 1), new Pair(2, 1, 2), new Pair(2, 2, 2), new Pair(2, 3, 2), new Pair(2, 4, 2), new Pair(2, 5, 2), new Pair(2, 6, 2), new Pair(undefined, 1, 1), new Pair(undefined, 2, 1), new Pair(undefined, 3, 1), new Pair(undefined, 4, 1), new Pair(undefined, 5, 1), new Pair(undefined, 6, 1) ]; test('should sort in ascending order', 1, function() { var actual = _.pluck(_.sortBy(objects, function(object) { return object.b; }), 'b'); deepEqual(actual, [1, 2, 3, 4]); }); test('should perform a stable sort (test in IE > 8, Opera, and V8)', 1, function() { var actual = _.sortBy(stableOrder, function(pair) { return pair.a; }); deepEqual(actual, stableOrder); }); test('should work with `undefined` values', 1, function() { var array = [undefined, 4, 1, undefined, 3, 2]; deepEqual(_.sortBy(array, _.identity), [1, 2, 3, 4, undefined, undefined]); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.sortBy([3, 2, 1]); deepEqual(actual, [1, 2, 3]); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.sortBy(objects, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [objects[0], 0, objects]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math); deepEqual(actual, [3, 1, 2]); }); test('should work with a string for `callback`', 1, function() { var actual = _.pluck(_.sortBy(objects, 'b'), 'b'); deepEqual(actual, [1, 2, 3, 4]); }); test('should work with an object for `collection`', 1, function() { var actual = _.sortBy({ 'a': 1, 'b': 2, 'c': 3 }, function(num) { return Math.sin(num); }); deepEqual(actual, [3, 1, 2]); }); test('should treat number values for `collection` as empty', 1, function() { deepEqual(_.sortBy(1), []); }); test('should support sorting by an array of properties', 1, function() { var actual = _.sortBy(objects, ['a', 'b']); deepEqual(actual, [objects[2], objects[0], objects[3], objects[1]]); }); test('should perform a stable sort when sorting by multiple properties (test in IE > 8, Opera, and V8)', 1, function() { var actual = _.sortBy(stableOrder, ['a', 'c']); deepEqual(actual, stableOrder); }); test('should coerce arrays returned from a callback', 1, function() { var actual = _.sortBy(objects, function(object) { var result = [object.a, object.b]; result.toString = function() { return String(this[0]); }; return result; }); deepEqual(actual, [objects[0], objects[2], objects[1], objects[3]]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.sortedIndex'); (function() { var array = [20, 30, 50], objects = [{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }]; test('should return the insert index of a given value', 2, function() { strictEqual(_.sortedIndex(array, 40), 2); strictEqual(_.sortedIndex(array, 30), 1); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.sortedIndex(array, 40, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [40]); }); test('should support the `thisArg` argument', 1, function() { var actual = _.sortedIndex(array, 40, function(num) { return this[num]; }, { '20': 20, '30': 30, '40': 40 }); strictEqual(actual, 2); }); test('should work with a string for `callback`', 1, function() { var actual = _.sortedIndex(objects, { 'x': 40 }, 'x'); strictEqual(actual, 2); }); test('supports arrays with lengths larger than `Math.pow(2, 31) - 1`', 1, function() { var length = Math.pow(2, 32) - 1, index = length - 1, array = Array(length), steps = 0; if (array.length == length) { array[index] = index; _.sortedIndex(array, index, function() { steps++; }); strictEqual(steps, 33); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.support'); (function() { test('should contain properties with boolean values', 1, function() { ok(_.every(_.values(_.support), function(value) { return value === true || value === false; })); }); test('should not contain minified properties (test production builds)', 1, function() { var props = [ 'argsClass', 'argsObject', 'dom', 'enumErrorProps', 'enumPrototypes', 'fastBind', 'funcDecomp', 'funcNames', 'nodeClass', 'nonEnumArgs', 'nonEnumShadows', 'nonEnumStrings', 'ownLast', 'spliceObjects', 'unindexedChars' ]; ok(_.isEmpty(_.difference(_.keys(_.support), props))); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.startsWith'); (function() { var string = 'abc'; test('should return `true` if a string starts with `target`', 1, function() { strictEqual(_.startsWith(string, 'a'), true); }); test('should return `false` if a string does not start with `target`', 1, function() { strictEqual(_.startsWith(string, 'b'), false); }); test('should work with a `position` argument', 1, function() { strictEqual(_.startsWith(string, 'b', 1), true); }); test('should work with `position` >= `string.length`', 4, function() { _.each([3, 5, maxSafeInteger, Infinity], function(position) { strictEqual(_.startsWith(string, 'a', position), false); }); }); test('should treat falsey `position` values as `0`', 1, function() { var expected = _.map(falsey, _.constant(true)); var actual = _.map(falsey, function(position) { return _.startsWith(string, 'a', position); }); deepEqual(actual, expected); }); test('should treat a negative `position` as `0`', 6, function() { _.each([-1, -3, -Infinity], function(position) { strictEqual(_.startsWith(string, 'a', position), true); strictEqual(_.startsWith(string, 'b', position), false); }); }); test('should always return `true` when `target` is an empty string regardless of `position`', 1, function() { ok(_.every([-Infinity, NaN, -3, -1, 0, 1, 2, 3, 5, maxSafeInteger, Infinity], function(position) { return _.startsWith(string, '', position, true); })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.startsWith and lodash.endsWith'); _.each(['startsWith', 'endsWith'], function(methodName) { var func = _[methodName], isEndsWith = methodName == 'endsWith', chr = isEndsWith ? 'c' : 'a', string = 'abc'; test('`_.' + methodName + '` should coerce `string` to a string', 2, function() { strictEqual(func(Object(string), chr), true); strictEqual(func({ 'toString': _.constant(string) }, chr), true); }); test('`_.' + methodName + '` should coerce `target` to a string', 2, function() { strictEqual(func(string, Object(chr)), true); strictEqual(func(string, { 'toString': _.constant(chr) }), true); }); test('`_.' + methodName + '` should coerce `position` to a number', 2, function() { var position = isEndsWith ? 2 : 1; strictEqual(func(string, 'b', Object(position)), true); strictEqual(func(string, 'b', { 'toString': _.constant(String(position)) }), true); }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.tap'); (function() { test('should intercept and return the given value', 2, function() { if (!isNpm) { var intercepted, array = [1, 2, 3]; var actual = _.tap(array, function(value) { intercepted = value; }); strictEqual(actual, array); strictEqual(intercepted, array); } else { skipTest(2); } }); test('should return intercept unwrapped values and return wrapped values when chaining', 2, function() { if (!isNpm) { var intercepted, array = [1, 2, 3]; var actual = _(array).tap(function(value) { intercepted = value; value.pop(); }); ok(actual instanceof _); strictEqual(intercepted, array); } else { skipTest(2); } }); test('should support the `thisArg` argument', 1, function() { if (!isNpm) { var array = [1, 2]; var actual = _(array.slice()).tap(function(value) { value.push(this[0]); }, array); deepEqual(actual.value(), [1, 2, 1]); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.template'); (function() { test('should escape values in "escape" delimiters', 1, function() { var escaped = '<p>&amp;&lt;&gt;&quot;&#39;\/</p>', unescaped = '&<>"\'\/'; var compiled = _.template('<p><%- value %></p>'); strictEqual(compiled({ 'value': unescaped }), escaped); }); test('should evaluate JavaScript in "evaluate" delimiters', 1, function() { var compiled = _.template( '<ul><%\ for (var key in collection) {\ %><li><%= collection[key] %></li><%\ } %></ul>' ); var actual = compiled({ 'collection': { 'a': 'A', 'b': 'B' } }); strictEqual(actual, '<ul><li>A</li><li>B</li></ul>'); }); test('should interpolate data object properties', 1, function() { var compiled = _.template('<%= a %>BC'); strictEqual(compiled({ 'a': 'A' }), 'ABC'); }); test('should support escaped values in "interpolation" delimiters', 1, function() { var compiled = _.template('<%= a ? "a=\\"A\\"" : "" %>'); strictEqual(compiled({ 'a': true }), 'a="A"'); }); test('should work with "interpolate" delimiters containing ternary operators', 1, function() { var compiled = _.template('<%= value ? value : "b" %>'), data = { 'value': 'a' }; strictEqual(compiled(data), 'a'); }); test('should work with "interpolate" delimiters containing global values', 1, function() { var compiled = _.template('<%= typeof Math.abs %>'); try { var actual = compiled(); } catch(e) { } strictEqual(actual, 'function'); }); test('should work with complex "interpolate" delimiters', 22, function() { _.each({ '<%= a + b %>': '3', '<%= b - a %>': '1', '<%= a = b %>': '2', '<%= !a %>': 'false', '<%= ~a %>': '-2', '<%= a * b %>': '2', '<%= a / b %>': '0.5', '<%= a % b %>': '1', '<%= a >> b %>': '0', '<%= a << b %>': '4', '<%= a & b %>': '0', '<%= a ^ b %>': '3', '<%= a | b %>': '3', '<%= {}.toString.call(0) %>': '[object Number]', '<%= a.toFixed(2) %>': '1.00', '<%= obj["a"] %>': '1', '<%= delete a %>': 'true', '<%= "a" in obj %>': 'true', '<%= obj instanceof Object %>': 'true', '<%= new Boolean %>': 'false', '<%= typeof a %>': 'number', '<%= void a %>': '' }, function(value, key) { var compiled = _.template(key), data = { 'a': 1, 'b': 2 }; strictEqual(compiled(data), value, key); }); }); test('should parse ES6 template delimiters', 2, function() { var data = { 'value': 2 }; strictEqual(_.template('1${value}3', data), '123'); strictEqual(_.template('${"{" + value + "\\}"}', data), '{2}'); }); test('should not reference `_.escape` when "escape" delimiters are not used', 1, function() { var compiled = _.template('<%= typeof __e %>'); strictEqual(compiled({}), 'undefined'); }); test('should allow referencing variables declared in "evaluate" delimiters from other delimiters', 1, function() { var compiled = _.template('<% var b = a; %><%= b.value %>'), data = { 'a': { 'value': 1 } }; strictEqual(compiled(data), '1'); }); test('should support single line comments in "evaluate" delimiters (test production builds)', 1, function() { var compiled = _.template('<% // comment %><% if (value) { %>yap<% } else { %>nope<% } %>'); strictEqual(compiled({ 'value': true }), 'yap'); }); test('should work with custom `_.templateSettings` delimiters', 1, function() { var settings = _.clone(_.templateSettings); _.assign(_.templateSettings, { 'escape': /\{\{-([\s\S]+?)\}\}/g, 'evaluate': /\{\{([\s\S]+?)\}\}/g, 'interpolate': /\{\{=([\s\S]+?)\}\}/g }); var compiled = _.template('<ul>{{ _.each(collection, function(value, index) { }}<li>{{= index }}: {{- value }}</li>{{ }); }}</ul>'), expected = '<ul><li>0: a &amp; A</li><li>1: b &amp; B</li></ul>'; strictEqual(compiled({ 'collection': ['a & A', 'b & B'] }), expected); _.assign(_.templateSettings, settings); }); test('should work with `_.templateSettings` delimiters containing special characters', 1, function() { var settings = _.clone(_.templateSettings); _.assign(_.templateSettings, { 'escape': /<\?-([\s\S]+?)\?>/g, 'evaluate': /<\?([\s\S]+?)\?>/g, 'interpolate': /<\?=([\s\S]+?)\?>/g }); var compiled = _.template('<ul><? _.each(collection, function(value, index) { ?><li><?= index ?>: <?- value ?></li><? }); ?></ul>'), expected = '<ul><li>0: a &amp; A</li><li>1: b &amp; B</li></ul>'; strictEqual(compiled({ 'collection': ['a & A', 'b & B'] }), expected); _.assign(_.templateSettings, settings); }); test('should work with no delimiters', 1, function() { var expected = 'abc'; strictEqual(_.template(expected, {}), expected); }); test('should support the "imports" option', 1, function() { var options = { 'imports': { 'a': 1 } }, compiled = _.template('<%= a %>', null, options); strictEqual(compiled({}), '1'); }); test('should support the "variable" options', 1, function() { var compiled = _.template( '<% _.each( data.a, function( value ) { %>' + '<%= value.valueOf() %>' + '<% }) %>', null, { 'variable': 'data' } ); try { var data = { 'a': [1, 2, 3] }; strictEqual(compiled(data), '123'); } catch(e) { ok(false); } }); test('should use a `with` statement by default', 1, function() { var compiled = _.template('<%= index %><%= collection[index] %><% _.each(collection, function(value, index) { %><%= index %><% }); %>'), actual = compiled({ 'index': 1, 'collection': ['a', 'b', 'c'] }); strictEqual(actual, '1b012'); }); test('should work correctly with `this` references', 2, function() { var compiled = _.template('a<%= this.String("b") %>c'); strictEqual(compiled(), 'abc'); var object = { 'b': 'B' }; object.compiled = _.template('A<%= this.b %>C', null, { 'variable': 'obj' }); strictEqual(object.compiled(), 'ABC'); }); test('should work with backslashes', 1, function() { var compiled = _.template('<%= a %> \\b'); strictEqual(compiled({ 'a': 'A' }), 'A \\b'); }); test('should work with escaped characters in string literals', 2, function() { var compiled = _.template('<% print("\'\\n\\r\\t\\u2028\\u2029\\\\") %>'); strictEqual(compiled(), "'\n\r\t\u2028\u2029\\"); compiled = _.template('\'\n\r\t<%= a %>\u2028\u2029\\"'); strictEqual(compiled({ 'a': 'A' }), '\'\n\r\tA\u2028\u2029\\"'); }); test('should handle \\u2028 & \\u2029 characters', 1, function() { var compiled = _.template('\u2028<%= "\\u2028\\u2029" %>\u2029'); strictEqual(compiled(), '\u2028\u2028\u2029\u2029'); }); test('should work with statements containing quotes', 1, function() { var compiled = _.template("<%\ if (a == 'A' || a == \"a\") {\ %>'a',\"A\"<%\ } %>" ); strictEqual(compiled({ 'a': 'A' }), "'a',\"A\""); }); test('should work with templates containing newlines and comments', 1, function() { var compiled = _.template('<%\n\ // comment\n\ if (value) { value += 3; }\n\ %><p><%= value %></p>' ); strictEqual(compiled({ 'value': 3 }), '<p>6</p>'); }); test('should not error with IE conditional comments enabled (test with development build)', 1, function() { var compiled = _.template(''), pass = true; /*@cc_on @*/ try { compiled(); } catch(e) { pass = false; } ok(pass); }); test('should tokenize delimiters', 1, function() { var compiled = _.template('<span class="icon-<%= type %>2"></span>'), data = { 'type': 1 }; strictEqual(compiled(data), '<span class="icon-12"></span>'); }); test('should evaluate delimiters once', 1, function() { var actual = [], compiled = _.template('<%= func("a") %><%- func("b") %><% func("c") %>'); compiled({ 'func': function(value) { actual.push(value); } }); deepEqual(actual, ['a', 'b', 'c']); }); test('should match delimiters before escaping text', 1, function() { var compiled = _.template('<<\n a \n>>', null, { 'evaluate': /<<(.*?)>>/g }); strictEqual(compiled(), '<<\n a \n>>'); }); test('should resolve `null` and `undefined` values to an empty string', 4, function() { var compiled = _.template('<%= a %><%- a %>'); strictEqual(compiled({ 'a': null }), ''); strictEqual(compiled({ 'a': undefined }), ''); compiled = _.template('<%= a.b %><%- a.b %>'); strictEqual(compiled({ 'a': {} }), ''); strictEqual(compiled({ 'a': {} }), ''); }); test('should parse delimiters with newlines', 1, function() { var expected = '<<\nprint("<p>" + (value ? "yes" : "no") + "</p>")\n>>', compiled = _.template(expected, null, { 'evaluate': /<<(.+?)>>/g }), data = { 'value': true }; strictEqual(compiled(data), expected); }); test('should support recursive calls', 1, function() { var compiled = _.template('<%= a %><% a = _.template(c, obj) %><%= a %>'), data = { 'a': 'A', 'b': 'B', 'c': '<%= b %>' }; strictEqual(compiled(data), 'AB'); }); test('should coerce `text` argument to a string', 1, function() { var data = { 'a': 1 }, object = { 'toString': function() { return '<%= a %>'; } }; strictEqual(_.template(object, data), '1'); }); test('should not augment the `options` object', 1, function() { var options = {}; _.template('', {}, options); deepEqual(options, {}); }); test('should not modify `_.templateSettings` when `options` are provided', 2, function() { ok(!('a' in _.templateSettings)); _.template('', {}, { 'a': 1 }); ok(!('a' in _.templateSettings)); delete _.templateSettings.a; }); test('should not error for non-object `data` and `options` values', 2, function() { var pass = true; try { _.template('', 1); } catch(e) { pass = false; } ok(pass); pass = true; try { _.template('', 1, 1); } catch(e) { pass = false; } ok(pass); }); test('should provide the template source when a SyntaxError occurs', 1, function() { try { _.template('<% if x %>'); } catch(e) { var source = e.source; } ok(/__p/.test(source)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.truncate'); (function() { var string = 'hi-diddly-ho there, neighborino'; test('should truncate to a length of `30` by default', 1, function() { strictEqual(_.truncate(string), 'hi-diddly-ho there, neighbo...'); }); test('should not truncate if `string` is <= `length`', 2, function() { strictEqual(_.truncate(string, string.length), string); strictEqual(_.truncate(string, string.length + 2), string); }); test('should truncate string the given length', 1, function() { strictEqual(_.truncate(string, 24), 'hi-diddly-ho there, n...'); }); test('should support a `omission` option', 1, function() { strictEqual(_.truncate(string, { 'omission': ' [...]' }), 'hi-diddly-ho there, neig [...]'); }); test('should support a `length` option', 1, function() { strictEqual(_.truncate(string, { 'length': 4 }), 'h...'); }); test('should support a `separator` option', 2, function() { strictEqual(_.truncate(string, { 'length': 24, 'separator': ' ' }), 'hi-diddly-ho there,...'); strictEqual(_.truncate(string, { 'length': 24, 'separator': /,? +/ }), 'hi-diddly-ho there...'); }); test('should treat negative `length` as `0`', 4, function() { _.each([0, -2], function(length) { strictEqual(_.truncate(string, length), '...'); strictEqual(_.truncate(string, { 'length': length }), '...'); }); }); test('should coerce `length` to a number', 4, function() { _.each(['', '4'], function(length, index) { var actual = index ? 'h...' : '...'; strictEqual(_.truncate(string, length), actual); strictEqual(_.truncate(string, { 'length': { 'valueOf': _.constant(length) } }), actual); }); }); test('should coerce `string` to a string', 2, function() { strictEqual(_.truncate(Object(string), 4), 'h...'); strictEqual(_.truncate({ 'toString': _.constant(string) }, 5), 'hi...'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.throttle'); (function() { asyncTest('should throttle a function', 2, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function() { count++; }, 32); throttled(); throttled(); throttled(); var lastCount = count; ok(count > 0); setTimeout(function() { ok(count > lastCount); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); asyncTest('subsequent calls should return the result of the first call', 5, function() { if (!(isRhino && isModularize)) { var throttled = _.throttle(_.identity, 32), result = [throttled('a'), throttled('b')]; deepEqual(result, ['a', 'a']); setTimeout(function() { var result = [throttled('x'), throttled('y')]; notEqual(result[0], 'a'); notStrictEqual(result[0], undefined); notEqual(result[1], 'y'); notStrictEqual(result[1], undefined); QUnit.start(); }, 64); } else { skipTest(5); QUnit.start(); } }); asyncTest('should clear timeout when `func` is called', 1, function() { if (!isModularize) { var callCount = 0, dateCount = 0; var getTime = function() { return ++dateCount < 3 ? +new Date : Infinity; }; var lodash = _.runInContext(_.assign({}, root, { 'Date': function() { return { 'getTime': getTime, 'valueOf': getTime }; } })); var throttled = lodash.throttle(function() { callCount++; }, 32); throttled(); throttled(); throttled(); setTimeout(function() { strictEqual(callCount, 2); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('should not trigger a trailing call when invoked once', 2, function() { if (!(isRhino && isModularize)) { var count = 0, throttled = _.throttle(function() { count++; }, 32); throttled(); strictEqual(count, 1); setTimeout(function() { strictEqual(count, 1); QUnit.start(); }, 64); } else { skipTest(2); QUnit.start(); } }); _.times(2, function(index) { test('should trigger a call when invoked repeatedly' + (index ? ' and `leading` is `false`' : ''), 1, function() { if (!(isRhino && isModularize)) { var count = 0, limit = 256, options = index ? { 'leading': false } : {}; var throttled = _.throttle(function() { count++; }, 32, options); var start = +new Date; while ((new Date - start) < limit) { throttled(); } ok(count > 1); } else { skipTest(); } }); }); asyncTest('should apply default options correctly', 3, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function(value) { count++; return value; }, 32, {}); strictEqual(throttled('a'), 'a'); strictEqual(throttled('b'), 'a'); setTimeout(function() { strictEqual(count, 2); QUnit.start(); }, 256); } else { skipTest(3); QUnit.start(); } }); test('should support a `leading` option', 4, function() { if (!(isRhino && isModularize)) { _.each([true, { 'leading': true }], function(options) { var withLeading = _.throttle(_.identity, 32, options); strictEqual(withLeading('a'), 'a'); }); _.each([false, { 'leading': false }], function(options) { var withoutLeading = _.throttle(_.identity, 32, options); strictEqual(withoutLeading('a'), undefined); }); } else { skipTest(4); } }); asyncTest('should support a `trailing` option', 6, function() { if (!(isRhino && isModularize)) { var withCount = 0, withoutCount = 0; var withTrailing = _.throttle(function(value) { withCount++; return value; }, 64, { 'trailing': true }); var withoutTrailing = _.throttle(function(value) { withoutCount++; return value; }, 64, { 'trailing': false }); strictEqual(withTrailing('a'), 'a'); strictEqual(withTrailing('b'), 'a'); strictEqual(withoutTrailing('a'), 'a'); strictEqual(withoutTrailing('b'), 'a'); setTimeout(function() { strictEqual(withCount, 2); strictEqual(withoutCount, 1); QUnit.start(); }, 256); } else { skipTest(6); QUnit.start(); } }); asyncTest('should not update `lastCalled`, at the end of the timeout, when `trailing` is `false`', 1, function() { if (!(isRhino && isModularize)) { var count = 0; var throttled = _.throttle(function() { count++; }, 64, { 'trailing': false }); throttled(); throttled(); setTimeout(function() { throttled(); throttled(); }, 96); setTimeout(function() { ok(count > 1); QUnit.start(); }, 192); } else { skipTest(); QUnit.start(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.debounce and lodash.throttle'); _.each(['debounce', 'throttle'], function(methodName) { var func = _[methodName], isThrottle = methodName == 'throttle'; test('_.' + methodName + ' should not error for non-object `options` values', 1, function() { var pass = true; try { func(_.noop, 32, 1); } catch(e) { pass = false; } ok(pass); }); asyncTest('_.' + methodName + ' should call `func` with the correct `this` binding', 1, function() { if (!(isRhino && isModularize)) { var object = { 'funced': func(function() { actual.push(this); }, 32) }; var actual = [], expected = _.times(isThrottle ? 2 : 1, _.constant(object)); object.funced(); if (isThrottle) { object.funced(); } setTimeout(function() { deepEqual(actual, expected); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('_.' + methodName + ' supports recursive calls', 2, function() { if (!(isRhino && isModularize)) { var actual = [], args = _.map(['a', 'b', 'c'], function(chr) { return [{}, chr]; }), length = isThrottle ? 2 : 1, expected = args.slice(0, length), queue = args.slice(); var funced = func(function() { var current = [this]; push.apply(current, arguments); actual.push(current); var next = queue.shift(); if (next) { funced.call(next[0], next[1]); } }, 32); var next = queue.shift(); funced.call(next[0], next[1]); deepEqual(actual, expected.slice(0, length - 1)); setTimeout(function() { deepEqual(actual, expected); QUnit.start(); }, 32); } else { skipTest(2); QUnit.start(); } }); asyncTest('_.' + methodName + ' should work if the system time is set backwards', 1, function() { if (!isModularize) { var callCount = 0, dateCount = 0; var getTime = function() { return ++dateCount < 2 ? +new Date : +new Date(2012, 3, 23, 23, 27, 18); }; var lodash = _.runInContext(_.assign({}, root, { 'Date': function() { return { 'getTime': getTime, 'valueOf': getTime }; } })); var funced = lodash[methodName](function() { callCount++; }, 32); funced(); setTimeout(function() { funced(); strictEqual(callCount, isThrottle ? 2 : 1); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); asyncTest('_.' + methodName + ' should support cancelling delayed calls', 1, function() { if (!(isRhino && isModularize)) { var callCount = 0; var funced = func(function() { callCount++; }, 32, { 'leading': false }); funced(); funced.cancel(); setTimeout(function() { strictEqual(callCount, 0); QUnit.start(); }, 64); } else { skipTest(); QUnit.start(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.toArray'); (function() { test('should return the values of objects', 1, function() { var array = [1, 2, 3], object = { 'a': 1, 'b': 2, 'c': 3 }; deepEqual(_.toArray(object), array); }); test('should work with a string for `collection` (test in Opera < 10.52)', 2, function() { deepEqual(_.toArray('abc'), ['a', 'b', 'c']); deepEqual(_.toArray(Object('abc')), ['a', 'b', 'c']); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.slice and lodash.toArray'); _.each(['slice', 'toArray'], function(methodName) { var args = (function() { return arguments; }(1, 2, 3)), array = [1, 2, 3], func = _[methodName]; test('should return a dense array', 3, function() { var sparse = Array(3); sparse[1] = 2; var actual = func(sparse); ok('0' in actual); ok('2' in actual); deepEqual(actual, sparse); }); test('should treat array-like objects like arrays', 2, function() { var object = { '0': 'a', '1': 'b', '2': 'c', 'length': 3 }; deepEqual(func(object), ['a', 'b', 'c']); deepEqual(func(args), array); }); test('should return a shallow clone of arrays', 2, function() { var actual = func(array); notStrictEqual(actual, array); deepEqual(func(array), array); }); test('should work with a node list for `collection` (test in IE < 9)', 1, function() { if (document) { try { var nodeList = document.getElementsByTagName('body'), actual = func(nodeList); } catch(e) { } deepEqual(actual, [body]); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.times'); (function() { test('should rollover large `n` values', 1, function() { var actual = _.times(Math.pow(2, 32) + 1); deepEqual(actual, [0]); }); test('should coerce non-finite `n` values to `0`', 3, function() { _.each([-Infinity, NaN, Infinity], function(n) { deepEqual(_.times(n), []); }); }); test('should pass the correct `callback` arguments', 1, function() { var args; _.times(1, function() { args || (args = slice.call(arguments)); }); deepEqual(args, [0]); }); test('should support the `thisArg` argument', 1, function() { var expect = [1, 2, 3]; var actual = _.times(3, function(num) { return this[num]; }, expect); deepEqual(actual, expect); }); test('should use `_.identity` when no `callback` is provided', 1, function() { var actual = _.times(3); deepEqual(actual, [0, 1, 2]); }); test('should return an array of the results of each `callback` execution', 1, function() { deepEqual(_.times(3, function(n) { return n * 2; }), [0, 2, 4]); }); test('should return an empty array for falsey and negative `n` arguments', 1, function() { var values = falsey.concat(-1, -Infinity), expected = _.map(values, _.constant([])); var actual = _.map(values, function(value, index) { return index ? _.times(value) : _.times(); }); deepEqual(actual, expected); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _(3).times(); ok(actual instanceof _); deepEqual(actual.value(), [0, 1, 2]); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.transform'); (function() { test('should produce an that is an instance of the given object\'s constructor', 2, function() { function Foo() { this.a = 1; this.b = 2; this.c = 3; } var actual = _.transform(new Foo, function(result, value, key) { result[key] = value * value; }); ok(actual instanceof Foo); deepEqual(_.clone(actual), { 'a': 1, 'b': 4, 'c': 9 }); }); test('should treat sparse arrays as dense', 1, function() { var actual = _.transform(Array(1), function(result, value, index) { result[index] = String(value); }); deepEqual(actual, ['undefined']); }); test('should work without a callback argument', 1, function() { function Foo() {} ok(_.transform(new Foo) instanceof Foo); }); test('should check that `object` is an object before using it as the `accumulator` `[[Prototype]]', 1, function() { ok(!(_.transform(1) instanceof Number)); }); _.each({ 'array': [1, 2, 3], 'object': { 'a': 1, 'b': 2, 'c': 3 } }, function(object, key) { test('should pass the correct `callback` arguments when transforming an ' + key, 2, function() { var args; _.transform(object, function() { args || (args = slice.call(arguments)); }); var first = args[0]; if (key == 'array') { ok(first !== object && _.isArray(first)); deepEqual(args, [first, 1, 0, object]); } else { ok(first !== object && _.isPlainObject(first)); deepEqual(args, [first, 1, 'a', object]); } }); test('should support the `thisArg` argument when transforming an ' + key, 2, function() { var actual = _.transform(object, function(result, value, key) { result[key] = this[key]; }, null, object); notStrictEqual(actual, object); deepEqual(actual, object); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('trim methods'); _.each(['trim', 'trimLeft', 'trimRight'], function(methodName, index) { var func = _[methodName]; var parts = []; if (index != 2) { parts.push('leading'); } if (index != 1) { parts.push('trailing'); } parts = parts.join(' and '); test('`_.' + methodName + '` should remove ' + parts + ' whitespace', 1, function() { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(string), expected); }); test('`_.' + methodName + '` should not remove non-whitespace characters', 1, function() { var problemChars = '\x85\u200b\ufffe', string = problemChars + 'a b c' + problemChars; strictEqual(func(string), string); }); test('`_.' + methodName + '` should coerce `string` to a string', 1, function() { var object = { 'toString': function() { return whitespace + 'a b c' + whitespace; } }, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(object), expected); }); test('`_.' + methodName + '` should remove ' + parts + ' `chars`', 1, function() { var string = '-_-a-b-c-_-', expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : ''); strictEqual(func(string, '_-'), expected); }); test('`_.' + methodName + '` should coerce `chars` to a string', 1, function() { var object = { 'toString': function() { return '_-'; } }, string = '-_-a-b-c-_-', expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : ''); strictEqual(func(string, object), expected); }); test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string and `chars`', 6, function() { _.each([null, '_-'], function(chars) { strictEqual(func(null, chars), ''); strictEqual(func(undefined, chars), ''); strictEqual(func('', chars), ''); }); }); test('`_.' + methodName + '` should work with `null`, `undefined`, or empty string for `chars`', 3, function() { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''); strictEqual(func(string, null), expected); strictEqual(func(string, undefined), expected); strictEqual(func(string, ''), string); }); test('`_.' + methodName + '` should return an unwrapped value when chaining', 1, function() { if (!isNpm) { var string = whitespace + 'a b c' + whitespace, expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''), actual = _(string)[methodName](); strictEqual(actual, expected); } else { skipTest(); } }); }); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.unescape'); (function() { var escaped = '&amp;&lt;&gt;&quot;&#39;\/', unescaped = '&<>"\'\/'; test('should unescape entities in the correct order', 1, function() { strictEqual(_.unescape('&amp;lt;'), '&lt;'); }); test('should unescape the proper entities', 1, function() { strictEqual(_.unescape(escaped), unescaped); }); test('should not unescape the "&#x2F;" entity', 1, function() { strictEqual(_.unescape('&#x2F;'), '&#x2F;'); }); test('should handle strings with nothing to unescape', 1, function() { strictEqual(_.unescape('abc'), 'abc'); }); test('should unescape the same characters escaped by `_.escape`', 1, function() { strictEqual(_.unescape(_.escape(unescaped)), unescaped); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.union'); (function() { var args = arguments; test('should return the union of the given arrays', 1, function() { var actual = _.union([1, 3, 2], [5, 2, 1, 4], [2, 1]); deepEqual(actual, [1, 3, 2, 5, 4]); }); test('should not flatten nested arrays', 1, function() { var actual = _.union([1, 3, 2], [1, [5]], [2, [4]]); deepEqual(actual, [1, 3, 2, [5], [4]]); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [0]; deepEqual(_.union(array, 3, null, { '0': 1 }), array); deepEqual(_.union(null, array, null, [2, 1]), [0, 2, 1]); deepEqual(_.union(null, array, null, args), [0, 1, 2, 3]); }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.uniq'); (function() { var objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }, { 'a': 2 }, { 'a': 3 }, { 'a': 1 }]; test('should return unique values of an unsorted array', 1, function() { var array = [2, 3, 1, 2, 3, 1]; deepEqual(_.uniq(array), [2, 3, 1]); }); test('should return unique values of a sorted array', 1, function() { var array = [1, 1, 2, 2, 3]; deepEqual(_.uniq(array), [1, 2, 3]); }); test('should work with `isSorted`', 1, function() { var array = [1, 1, 2, 2, 3]; deepEqual(_.uniq([1, 1, 2, 2, 3], true), [1, 2, 3]); }); test('should work with a callback', 1, function() { var actual = _.uniq(objects, false, function(object) { return object.a; }); deepEqual(actual, objects.slice(0, 3)); }); test('should work with a callback without specifying `isSorted`', 1, function() { var actual = _.uniq(objects, function(object) { return object.a; }); deepEqual(actual, objects.slice(0, 3)); }); test('should support the `thisArg` argument', 1, function() { var actual = _.uniq([1, 2, 1.5, 3, 2.5], function(num) { return this.floor(num); }, Math); deepEqual(actual, [1, 2, 3]); }); test('should perform an unsorted uniq operation when used as a callback for `_.map`', 1, function() { var array = [[2, 1, 2], [1, 2, 1]], actual = _.map(array, _.uniq); deepEqual(actual, [[2, 1], [1, 2]]); }); test('should work with large arrays', 1, function() { var object = {}; var largeArray = _.times(largeArraySize, function(index) { switch (index % 3) { case 0: return 0; case 1: return 'a'; case 2: return object; } }); deepEqual(_.uniq(largeArray), [0, 'a', object]); }); test('should work with large arrays of boolean, `null`, and `undefined` values', 1, function() { var array = [], expected = [true, false, null, undefined], count = Math.ceil(largeArraySize / expected.length); _.times(count, function() { push.apply(array, expected); }); deepEqual(_.uniq(array), expected); }); test('should distinguish between numbers and numeric strings', 1, function() { var array = [], expected = ['2', 2, Object('2'), Object(2)], count = Math.ceil(largeArraySize / expected.length); _.times(count, function() { push.apply(array, expected); }); deepEqual(_.uniq(array), expected); }); _.each({ 'an object': ['a'], 'a number': 0, 'a string': '0' }, function(callback, key) { test('should work with ' + key + ' for `callback`', 1, function() { var actual = _.uniq([['a'], ['b'], ['a']], callback); deepEqual(actual, [['a'], ['b']]); }); }); test('should be aliased', 1, function() { strictEqual(_.unique, _.uniq); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.uniqueId'); (function() { test('should generate unique ids', 1, function() { var actual = []; _.times(1000, function() { actual.push(_.uniqueId()); }); strictEqual(_.uniq(actual).length, actual.length); }); test('should return a string value when not passing a prefix argument', 1, function() { strictEqual(typeof _.uniqueId(), 'string'); }); test('should coerce the prefix argument to a string', 1, function() { var actual = [_.uniqueId(3), _.uniqueId(2), _.uniqueId(1)]; ok(/3\d+,2\d+,1\d+/.test(actual)); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.values'); (function() { test('should get the values of an object', 1, function() { var object = { 'a': 1, 'b': 2 }; deepEqual(_.values(object), [1, 2]); }); test('should work with an object that has a `length` property', 1, function() { var object = { '0': 'a', '1': 'b', 'length': 2 }; deepEqual(_.values(object), ['a', 'b', 2]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.where'); (function() { var objects = [ { 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }, { 'a': 3 } ]; test('should filter by `source` properties', 6, function() { deepEqual(_.where(objects, { 'a': 1 }), [{ 'a': 1 }, { 'a': 1 }, { 'a': 1, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 2 }), [{ 'a': 2, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 3 }), [{ 'a': 3 }]); deepEqual(_.where(objects, { 'b': 1 }), []); deepEqual(_.where(objects, { 'b': 2 }), [{ 'a': 1, 'b': 2 }, { 'a': 2, 'b': 2 }]); deepEqual(_.where(objects, { 'a': 1, 'b': 2 }), [{ 'a': 1, 'b': 2 }]); }); test('should not filter by inherited `source` properties', 2, function() { function Foo() {} Foo.prototype = { 'a': 2 }; var source = new Foo; source.b = 2; var expected = [objects[2], objects[3]], actual = _.where(objects, source); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, objects))); }); test('should filter by problem JScript properties (test in IE < 9)', 1, function() { var collection = [shadowedObject]; deepEqual(_.where(collection, shadowedObject), [shadowedObject]); }); test('should work with an object for `collection`', 2, function() { var collection = { 'x': { 'a': 1 }, 'y': { 'a': 3 }, 'z': { 'a': 1, 'b': 2 } }; var expected = [collection.x, collection.z], actual = _.where(collection, { 'a': 1 }); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, _.values(collection)))); }); test('should work with a function for `source`', 1, function() { function source() {} source.a = 2; deepEqual(_.where(objects, source), [{ 'a': 2, 'b': 2 }]); }); test('should match all elements when provided an empty `source`', 1, function() { var expected = _.map(empties, _.constant(objects)); var actual = _.map(empties, function(value) { var result = _.where(objects, value); return result !== objects && result; }); deepEqual(actual, expected); }); test('should perform a deep partial comparison of `source`', 2, function() { var collection = [{ 'a': { 'b': { 'c': 1, 'd': 2 }, 'e': 3 }, 'f': 4 }], expected = collection.slice(), actual = _.where(collection, { 'a': { 'b': { 'c': 1 } } }); deepEqual(actual, expected); ok(_.isEmpty(_.difference(actual, collection))); }); test('should search of arrays for values', 2, function() { var collection = [{ 'a': [1, 2] }], expected = collection.slice(); deepEqual(_.where(collection, { 'a': [] }), []); deepEqual(_.where(collection, { 'a': [2] }), expected); }); test('should perform a partial comparison of *all* objects within arrays of `source`', 2, function() { var collection = [ { 'a': [{ 'b': 1, 'c': 2, 'd': 3 }, { 'b': 4, 'c': 5, 'd': 6 }] }, { 'a': [{ 'b': 1, 'c': 2, 'd': 3 }, { 'b': 4, 'c': 6, 'd': 7 }] } ]; var actual = _.where(collection, { 'a': [{ 'b': 1, 'c': 2 }, { 'b': 4, 'c': 5 }] }); deepEqual(actual, [collection[0]]); ok(_.isEmpty(_.difference(actual, collection))); }); test('should handle a `source` with `undefined` values', 4, function() { var source = { 'b': undefined }, actual = _.where([{ 'a': 1 }, { 'a': 1, 'b': 1 }], source); deepEqual(actual, []); var object = { 'a': 1, 'b': undefined }; actual = _.where([object], source); deepEqual(actual, [object]); source = { 'a': { 'c': undefined } }; actual = _.where([{ 'a': { 'b': 1 } }, { 'a':{ 'b':1 , 'c': 1 } }], source); deepEqual(actual, []); object = { 'a': { 'b': 1, 'c': undefined } }; actual = _.where([object], source); deepEqual(actual, [object]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.without'); (function() { test('should use strict equality to determine the values to reject', 2, function() { var object1 = { 'a': 1 }, object2 = { 'b': 2 }, array = [object1, object2]; deepEqual(_.without(array, { 'a': 1 }), array); deepEqual(_.without(array, object1), [object2]); }); test('should remove all occurrences of each value from an array', 1, function() { var array = [1, 2, 3, 1, 2, 3]; deepEqual(_.without(array, 1, 2), [3, 3]); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.wrap'); (function() { test('should create a wrapped function', 1, function() { var p = _.wrap(_.escape, function(func, text) { return '<p>' + func(text) + '</p>'; }); strictEqual(p('fred, barney, & pebbles'), '<p>fred, barney, &amp; pebbles</p>'); }); test('should pass the correct `wrapper` arguments', 1, function() { var args; var wrapped = _.wrap(_.noop, function() { args || (args = slice.call(arguments)); }); wrapped(1, 2, 3); deepEqual(args, [_.noop, 1, 2, 3]); }); test('should not set a `this` binding', 1, function() { var p = _.wrap(_.escape, function(func) { return '<p>' + func(this.text) + '</p>'; }); var object = { 'p': p, 'text': 'fred, barney, & pebbles' }; strictEqual(object.p(), '<p>fred, barney, &amp; pebbles</p>'); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.xor'); (function() { var args = arguments; test('should return the symmetric difference of the given arrays', 1, function() { var actual = _.xor([1, 2, 5], [2, 3, 5], [3, 4, 5]); deepEqual(actual, [1, 4, 5]); }); test('should return an array of unique values', 2, function() { var actual = _.xor([1, 1, 2, 5], [2, 2, 3, 5], [3, 4, 5, 5]); deepEqual(actual, [1, 4, 5]); actual = _.xor([1, 1]); deepEqual(actual, [1]); }); test('should return a new array when a single array is provided', 1, function() { var array = [1]; notStrictEqual(_.xor(array), array); }); test('should ignore individual secondary arguments', 1, function() { var array = [0]; deepEqual(_.xor(array, 3, null, { '0': 1 }), array); }); test('should ignore values that are not arrays or `arguments` objects', 3, function() { var array = [1, 2]; deepEqual(_.xor(array, 3, null, { '0': 1 }), array); deepEqual(_.xor(null, array, null, [2, 3]), [1, 3]); deepEqual(_.xor(null, array, null, args), [3]); }); test('should return a wrapped value when chaining', 2, function() { if (!isNpm) { var actual = _([1, 2, 3]).xor([5, 2, 1, 4]); ok(actual instanceof _); deepEqual(actual.value(), [3, 5, 4]); } else { skipTest(2); } }); }(1, 2, 3)); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.zip'); (function() { var object = { 'an empty array': [ [], [] ], '0-tuples': [ [[], []], [] ], '2-tuples': [ [['barney', 'fred'], [36, 40]], [['barney', 36], ['fred', 40]] ], '3-tuples': [ [['barney', 'fred'], [36, 40], [true, false]], [['barney', 36, true], ['fred', 40, false]] ] }; _.forOwn(object, function(pair, key) { test('should work with ' + key, 2, function() { var actual = _.zip.apply(_, pair[0]); deepEqual(actual, pair[1]); deepEqual(_.zip.apply(_, actual), actual.length ? pair[0] : []); }); }); test('should work with tuples of different lengths', 4, function() { var pair = [ [['barney', 36], ['fred', 40, false]], [['barney', 'fred'], [36, 40], [undefined, false]] ]; var actual = _.zip(pair[0]); ok('0' in actual[2]); deepEqual(actual, pair[1]); actual = _.zip.apply(_, actual); ok('2' in actual[0]); deepEqual(actual, [['barney', 36, undefined], ['fred', 40, false]]); }); test('should support consuming its return value', 1, function() { var expected = [['barney', 'fred'], [36, 40]]; deepEqual(_.zip(_.zip(_.zip(_.zip(expected)))), expected); }); test('should be aliased', 1, function() { strictEqual(_.unzip, _.zip); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash.zipObject'); (function() { var object = { 'barney': 36, 'fred': 40 }, array = [['barney', 36], ['fred', 40]]; test('should skip falsey elements in a given two dimensional array', 1, function() { var actual = _.zipObject(array.concat(falsey)); deepEqual(actual, object); }); test('should zip together key/value arrays into an object', 1, function() { var actual = _.zipObject(['barney', 'fred'], [36, 40]); deepEqual(actual, object); }); test('should ignore extra `values`', 1, function() { deepEqual(_.zipObject(['a'], [1, 2]), { 'a': 1 }); }); test('should accept a two dimensional array', 1, function() { var actual = _.zipObject(array); deepEqual(actual, object); }); test('should not assume `keys` is two dimensional if `values` is not provided', 1, function() { var actual = _.zipObject(['barney', 'fred']); deepEqual(actual, { 'barney': undefined, 'fred': undefined }); }); test('should accept a falsey `array` argument', 1, function() { var expected = _.map(falsey, _.constant({})); var actual = _.map(falsey, function(value, index) { try { return index ? _.zipObject(value) : _.zipObject(); } catch(e) { } }); deepEqual(actual, expected); }); test('should support consuming the return value of `_.pairs`', 1, function() { deepEqual(_.zipObject(_.pairs(object)), object); }); test('should be aliased', 1, function() { strictEqual(_.object, _.zipObject); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).shift'); (function() { test('should remove the value at index `0` when length is `0` (test in IE 8 compatibility mode)', 2, function() { if (!isNpm) { var wrapped = _({ '0': 1, 'length': 1 }); wrapped.shift(); deepEqual(wrapped.keys().value(), ['length']); strictEqual(wrapped.first(), undefined); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).splice'); (function() { test('should remove the value at index `0` when length is `0` (test in IE < 9, and in compatibility mode for IE 9)', 2, function() { if (!isNpm) { var wrapped = _({ '0': 1, 'length': 1 }); wrapped.splice(0, 1); deepEqual(wrapped.keys().value(), ['length']); strictEqual(wrapped.first(), undefined); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).toString'); (function() { test('should return the `toString` result of the wrapped value', 1, function() { if (!isNpm) { var wrapped = _([1, 2, 3]); strictEqual(String(wrapped), '1,2,3'); } else { skipTest(); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...).valueOf'); (function() { test('should return the `valueOf` result of the wrapped value', 1, function() { if (!isNpm) { var wrapped = _(123); strictEqual(Number(wrapped), 123); } else { skipTest(); } }); test('should stringify the wrapped value when passed to `JSON.stringify`', 1, function() { if (!isNpm && JSON) { var wrapped = _([1, 2, 3]); strictEqual(JSON.stringify(wrapped), '[1,2,3]'); } else { skipTest(); } }); test('should be aliased', 2, function() { if (!isNpm) { var expected = _.prototype.valueOf; strictEqual(_.prototype.toJSON, expected); strictEqual(_.prototype.value, expected); } else { skipTest(2); } }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return existing wrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'push', 'reverse', 'sort', 'unshift' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return the existing wrapped value', 1, function() { if (!isNpm) { strictEqual(wrapped[methodName](), wrapped); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return new wrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'concat', 'slice', 'splice' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return a new wrapped value', 1, function() { if (!isNpm) { ok(wrapped[methodName]() instanceof _); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods that return unwrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'clone', 'contains', 'every', 'find', 'first', 'has', 'isArguments', 'isArray', 'isBoolean', 'isDate', 'isElement', 'isEmpty', 'isEqual', 'isFinite', 'isFunction', 'isNaN', 'isNull', 'isNumber', 'isObject', 'isPlainObject', 'isRegExp', 'isString', 'isUndefined', 'join', 'last', 'pop', 'shift', 'reduce', 'reduceRight', 'some' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` should return an unwrapped value', 1, function() { if (!isNpm) { var actual = methodName == 'reduceRight' ? wrapped[methodName](_.identity) : wrapped[methodName](); ok(!(actual instanceof _)); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash(...) methods capable of returning wrapped and unwrapped values'); (function() { var array = [1, 2, 3], wrapped = _(array); var funcs = [ 'first', 'last', 'sample' ]; _.each(funcs, function(methodName) { test('`_(...).' + methodName + '` called without an `n` argument should return an unwrapped value', 1, function() { if (!isNpm) { strictEqual(typeof wrapped[methodName](), 'number'); } else { skipTest(); } }); test('`_(...).' + methodName + '` called with an `n` argument should return a wrapped value', 1, function() { if (!isNpm) { ok(wrapped[methodName](1) instanceof _); } else { skipTest(); } }); test('`_.' + methodName + '` should return `undefined` when querying falsey arguments without an `n` argument', 1, function() { if (!isNpm) { var actual = [], expected = _.map(falsey, _.constant()), func = _[methodName]; _.each(falsey, function(value, index) { try { actual.push(index ? func(value) : func()); } catch(e) { } }); deepEqual(actual, expected); } else { skipTest(); } }); test('`_.' + methodName + '` should return an empty array when querying falsey arguments with an `n` argument', 1, function() { if (!isNpm) { var expected = _.map(falsey, _.constant([])), func = _[methodName]; var actual = _.map(falsey, function(value, index) { try { return func(value, 2); } catch(e) { } }); deepEqual(actual, expected); } else { skipTest(); } }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('"Arrays" category methods'); (function() { var args = arguments, array = [1, 2, 3, 4, 5, 6]; test('should work with `arguments` objects', 23, function() { function message(methodName) { return '`_.' + methodName + '` should work with `arguments` objects'; } deepEqual(_.at(args, 0, 4), [1, 5], message('at')); deepEqual(_.at(array, args), [2, undefined, 4, undefined, 6], '_.at should work with `arguments` objects as secondary arguments'); deepEqual(_.difference(args, [null]), [1, [3], 5], message('difference')); deepEqual(_.difference(array, args), [2, 3, 4, 6], '_.difference should work with `arguments` objects as secondary arguments'); deepEqual(_.union(args, [null, 6]), [1, null, [3], 5, 6], message('union')); deepEqual(_.union(array, args), array.concat([null, [3]]), '_.union should work with `arguments` objects as secondary arguments'); deepEqual(_.compact(args), [1, [3], 5], message('compact')); deepEqual(_.findIndex(args, _.identity), 0, message('findIndex')); deepEqual(_.findLastIndex(args, _.identity), 4, message('findLastIndex')); deepEqual(_.first(args), 1, message('first')); deepEqual(_.flatten(args), [1, null, 3, null, 5], message('flatten')); deepEqual(_.indexOf(args, 5), 4, message('indexOf')); deepEqual(_.initial(args, 4), [1], message('initial')); deepEqual(_.intersection(args, [1]), [1], message('intersection')); deepEqual(_.last(args), 5, message('last')); deepEqual(_.lastIndexOf(args, 1), 0, message('lastIndexOf')); deepEqual(_.rest(args, 4), [5], message('rest')); deepEqual(_.sortedIndex(args, 6), 5, message('sortedIndex')); deepEqual(_.uniq(args), [1, null, [3], 5], message('uniq')); deepEqual(_.without(args, null), [1, [3], 5], message('without')); deepEqual(_.zip(args, args), [[1, 1], [null, null], [[3], [3]], [null, null], [5, 5]], message('zip')); if (_.support.argsClass && _.support.argsObject && !_.support.nonEnumArgs) { _.pull(args, null); deepEqual([args[0], args[1], args[2]], [1, [3], 5], message('pull')); _.remove(args, function(value) { return typeof value == 'number'; }); ok(args.length === 1 && _.isEqual(args[0], [3]), message('remove')); } else { skipTest(2) } }); test('should accept falsey primary arguments', 4, function() { function message(methodName) { return '`_.' + methodName + '` should accept falsey primary arguments'; } deepEqual(_.difference(null, array), array, message('difference')); deepEqual(_.intersection(null, array), array, message('intersection')); deepEqual(_.union(null, array), array, message('union')); deepEqual(_.xor(null, array), array, message('xor')); }); test('should accept falsey secondary arguments', 3, function() { function message(methodName) { return '`_.' + methodName + '` should accept falsey secondary arguments'; } deepEqual(_.difference(array, null), array, message('difference')); deepEqual(_.intersection(array, null), array, message('intersection')); deepEqual(_.union(array, null), array, message('union')); }); }(1, null, [3], null, 5)); /*--------------------------------------------------------------------------*/ /*--------------------------------------------------------------------------*/ QUnit.module('"Strings" category methods'); (function() { var stringMethods = [ 'camelCase', 'capitalize', 'escape', 'escapeRegExp', 'kebabCase', 'pad', 'padLeft', 'padRight', 'repeat', 'snakeCase', 'trim', 'trimLeft', 'trimRight', 'truncate', 'unescape' ]; _.each(stringMethods, function(methodName) { var func = _[methodName]; test('`_.' + methodName + '` should return an empty string when provided `null`, `undefined`, or empty string', 3, function() { strictEqual(func(null), ''); strictEqual(func(undefined), ''); strictEqual(func(''), ''); }); }); }()); /*--------------------------------------------------------------------------*/ QUnit.module('lodash methods'); (function() { var allMethods = _.reject(_.functions(_), function(methodName) { return /^_/.test(methodName); }); var returnArrays = [ 'at', 'compact', 'difference', 'filter', 'first', 'flatten', 'functions', 'initial', 'intersection', 'invoke', 'last', 'keys', 'map', 'pairs', 'pluck', 'pull', 'pullAt', 'range', 'reject', 'remove', 'rest', 'sample', 'shuffle', 'sortBy', 'times', 'toArray', 'union', 'uniq', 'values', 'where', 'without', 'xor', 'zip' ]; var rejectFalsey = [ 'after', 'bind', 'compose', 'curry', 'debounce', 'defer', 'delay', 'memoize', 'negate', 'once', 'partial', 'partialRight', 'tap', 'throttle', 'wrap' ]; var acceptFalsey = _.difference(allMethods, rejectFalsey); test('should accept falsey arguments', 187, function() { var emptyArrays = _.map(falsey, _.constant([])), isExposed = '_' in root, oldDash = root._; _.each(acceptFalsey, function(methodName) { var expected = emptyArrays, func = _[methodName], pass = true; var actual = _.map(falsey, function(value, index) { try { return index ? func(value) : func(); } catch(e) { pass = false; } }); if (methodName == 'noConflict') { if (isExposed) { root._ = oldDash; } else { delete root._; } } else if (methodName == 'pull') { expected = falsey; } if (_.contains(returnArrays, methodName) && !_.contains(['first', 'last', 'sample'], methodName)) { deepEqual(actual, expected, '_.' + methodName + ' returns an array'); } ok(pass, '`_.' + methodName + '` accepts falsey arguments'); }); // skip tests for missing methods of modularized builds _.each(['noConflict', 'runInContext', 'tap'], function(methodName) { if (!_[methodName]) { skipTest(); } }); }); test('should return an array', 66, function() { var array = [1, 2, 3]; _.each(returnArrays, function(methodName) { var actual, func = _[methodName]; switch (methodName) { case 'invoke': actual = func(array, 'toFixed'); break; case 'first': case 'last': case 'sample': actual = func(array, 1); break; default: actual = func(array); } ok(_.isArray(actual), '_.' + methodName + ' returns an array'); var isPull = methodName == 'pull'; strictEqual(actual === array, isPull, '_.' + methodName + ' should ' + (isPull ? '' : 'not ') + 'return the provided array'); }); }); test('should throw a TypeError for falsey arguments', 15, function() { _.each(rejectFalsey, function(methodName) { var expected = _.map(falsey, _.constant(true)), func = _[methodName]; var actual = _.map(falsey, function(value, index) { var pass = !index && methodName == 'compose'; try { index ? func(value) : func(); } catch(e) { pass = !pass; } return pass; }); deepEqual(actual, expected, '`_.' + methodName + '` rejects falsey arguments'); }); }); test('should handle `null` `thisArg` arguments', 44, function() { var expected = (function() { return this; }).call(null); var funcs = [ 'assign', 'clone', 'cloneDeep', 'countBy', 'dropWhile', 'dropRightWhile', 'every', 'flatten', 'filter', 'find', 'findIndex', 'findKey', 'findLast', 'findLastIndex', 'findLastKey', 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'groupBy', 'isEqual', 'map', 'mapValues', 'max', 'merge', 'min', 'omit', 'partition', 'pick', 'reduce', 'reduceRight', 'reject', 'remove', 'some', 'sortBy', 'sortedIndex', 'takeWhile', 'takeRightWhile', 'tap', 'times', 'transform', 'uniq' ]; _.each(funcs, function(methodName) { var actual, array = ['a'], func = _[methodName], message = '`_.' + methodName + '` handles `null` `thisArg` arguments'; function callback() { actual = this; } if (func) { if (/^reduce/.test(methodName) || methodName == 'transform') { func(array, callback, 0, null); } else if (_.contains(['assign', 'merge'], methodName)) { func(array, array, callback, null); } else if (_.contains(['isEqual', 'sortedIndex'], methodName)) { func(array, 'a', callback, null); } else if (methodName == 'times') { func(1, callback, null); } else { func(array, callback, null); } strictEqual(actual, expected, message); } else { skipTest(); } }); }); test('should not contain minified method names (test production builds)', 1, function() { ok(_.every(_.functions(_), function(methodName) { return methodName.length > 2 || methodName === 'at'; })); }); }()); /*--------------------------------------------------------------------------*/ QUnit.config.asyncRetries = 10; QUnit.config.hidepassed = true; if (!document) { QUnit.config.noglobals = true; QUnit.start(); } }.call(this));
Minor source nit in test/test.js.
test/test.js
Minor source nit in test/test.js.
<ide><path>est/test.js <ide> }()); <ide> <ide> /** Load and install QUnit Extras */ <del> var qa = load('../vendor/qunit-extras/qunit-extras.js'); <del> if (qa) { <del> qa.runInContext(root); <add> var qe = load('../vendor/qunit-extras/qunit-extras.js'); <add> if (qe) { <add> qe.runInContext(root); <ide> } <ide> <ide> /*--------------------------------------------------------------------------*/
Java
apache-2.0
5201873b60eb30de2b84754d66fa9b14cd771570
0
GideonLeGrange/panstamp-java
package me.legrange.panstamp; import me.legrange.panstamp.event.AbstractPanStampListener; import me.legrange.panstamp.event.AbstractRegisterListener; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.logging.Level; import java.util.logging.Logger; import me.legrange.panstamp.definition.DeviceDefinition; import me.legrange.panstamp.definition.EndpointDefinition; import me.legrange.panstamp.definition.ParameterDefinition; import me.legrange.panstamp.definition.RegisterDefinition; import me.legrange.swap.SwapMessage; /** * An implementation of a panStamp abstraction. Instances of this class * represent instances of panStamp devices connected to the network behind the * gateway. * * @since 1.0 * @author Gideon le Grange https://github.com/GideonLeGrange * */ public final class PanStamp { /** * Get the address of this device. * * @return The address */ public int getAddress() { return address; } /** * Return the network channel * * @return The channel * @throws NetworkException Thrown if there is a problem getting the channel * value */ public int getChannel() throws NetworkException { Integer v = getIntValue(StandardEndpoint.FREQUENCY_CHANNEL); if (v != null) { return v; } return getGateway().getChannel(); } /** * Return the transmit interval * * @return The interval * @throws NetworkException Thrown if there is a problem getting the * interval value */ public int getTxInterval() throws NetworkException { return getIntValue(StandardEndpoint.PERIODIC_TX_INTERVAL, 0); } /** * Return the current security option * * @return The security option * @throws NetworkException Thrown if there is a problem getting the * security option value */ public int getSecurityOption() throws NetworkException { return getIntValue(StandardEndpoint.SECURITY_OPTION, 0); } /** * Return the network ID * * @return The network ID * @throws NetworkException Thrown if there is a problem getting the network * ID value */ public int getNetwork() throws NetworkException { Integer v = getIntValue(StandardEndpoint.NETWORK_ID); if (v != null) { return v; } return getGateway().getNetworkId(); } /** * Set the address of the panStamp * * @param addr The address to set * @throws NetworkException Thrown if there is a problem reading the * interval. */ public void setAddress(int addr) throws NetworkException { if (addr != getAddress()) { setIntValue(StandardEndpoint.DEVICE_ADDRESS, addr); } } /** * Set the network id of the device * * @param network The network id * @throws NetworkException Thrown if there is a problem setting the ID */ public void setNetwork(int network) throws NetworkException { if (network != getNetwork()) { setIntValue(StandardEndpoint.NETWORK_ID, network); } } /** * Set the network channel of the device * * @param channel The channel to set. * @throws NetworkException Thrown if there is a problem setting the channel */ public void setChannel(int channel) throws NetworkException { if (channel != getChannel()) { setIntValue(StandardEndpoint.FREQUENCY_CHANNEL, channel); } } /** * Set the security option of the device. * * @param option The security option to set. * @throws NetworkException Thrown if there is a problem setting the option. */ public void setSecurityOption(int option) throws NetworkException { if (option != getSecurityOption()) { setIntValue(StandardEndpoint.SECURITY_OPTION, option); } } /** * Set the transmit interval (in seconds) of the device. * * @param txInterval The interval to set. * @throws NetworkException Thrown if there is a problem setting the * interval. */ public void setTxInterval(int txInterval) throws NetworkException { if (txInterval != getTxInterval()) { setIntValue(StandardEndpoint.PERIODIC_TX_INTERVAL, txInterval); } } /** * Get the device manufacturer id. * * @return The manufacturer Id * @throws NetworkException Thrown if there is a problem reading the id. */ public int getManufacturerId() throws NetworkException { return manufacturerId; } /** * Get the device product id. * * @return The product Id * @throws NetworkException Thrown if there is a problem reading the id. */ public int getProductId() throws NetworkException { return productId; } /** * Get the gateway this device is attached to. * * @return The gateway */ public Network getGateway() { return gw; } /** * Get the device name (as defined by the endpoint definition). * * @return The name of the device */ public String getName() { if (def != null) { return def.getProduct(); } return "Unknown"; } /** * Get the register with the given register ID for this device. * * * @return the register for the given id * @param id ID of register to return */ public Register getRegister(int id) { Register reg; synchronized (registers) { reg = registers.get(id); if (reg == null) { reg = new Register(this, id); registers.put(id, reg); } } return reg; } /** * Get the list of registers defined for this device * * @return The list of registers. */ public List<Register> getRegisters() { List<Register> all = new ArrayList<>(); all.addAll(registers.values()); Collections.sort(all, new Comparator() { @Override public int compare(Object o1, Object o2) { return ((Register) o1).getId() - ((Register) o2).getId(); } }); return all; } /** * Determine if the device has a register with the given ID. * * @param id The id of the register required. * @return True if the panStamp has the register. */ public boolean hasRegister(int id) { return registers.get(id) != null; } /** * add an event listener * * @param l The listener to add */ public void addListener(PanStampListener l) { listeners.add(l); } /** * remove an event listener * * @param l The listener to remove */ public void removeListener(PanStampListener l) { listeners.remove(l); } /** * create a new mote for the given address in the given network * * @param gw The gateway to which this device is connected * @param address The address of the device * @throws me.legrange.panstamp.NetworkException Thrown if there is a * problem creating the device. */ public PanStamp(Network gw, int address) throws NetworkException { this.gw = gw; this.address = address; extended = address > 255; for (StandardRegister reg : StandardRegister.ALL) { Register impl = new Register(this, reg); registers.put(reg.getId(), impl); } getRegister(StandardRegister.PRODUCT_CODE.getId()).addListener(productCodeListener()); getRegister(StandardRegister.SYSTEM_STATE.getId()).getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()).addListener(systemStateListener()); } void destroy() { for (Register reg : registers.values()) { reg.destroy(); } listeners.clear(); registers.clear(); } DeviceDefinition getDefinition() { return def; } /** * send a query message to the remote node */ void sendQueryMessage(int id) throws ModemException { gw.sendQueryMessage(this, id); } /** * send a command message to the remote node * * @param value Value to send */ void sendCommandMessage(int id, byte[] value) throws NetworkException { if (isSleeper()) { queue(id, value); } else { gw.sendCommandMessage(this, id, value); } } /** * Receive a status message from the remote node. */ void statusMessageReceived(SwapMessage msg) { Register reg = (Register) getRegister(msg.getRegisterID()); boolean isNew = !reg.hasValue(); reg.valueReceived(msg.getRegisterValue()); if (isNew) { fireRegisterDetected(reg); } } boolean hasExtendedAddress() { return extended; } ExecutorService getPool() { return gw.getPool(); } private void fireRegisterDetected(final Register reg) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.registerDetected(PanStamp.this, reg); } }); } } private int getIntValue(StandardEndpoint epDef, int defaultValue) throws NetworkException { Integer v = getIntValue(epDef); if (v != null) { return v; } return defaultValue; } private void setIntValue(StandardEndpoint epDef, int val) throws NetworkException { Register reg = getRegister(epDef.getRegister().getId()); Endpoint<Integer> ep = reg.getEndpoint(epDef.getName()); ep.setValue(val); } private Integer getIntValue(StandardEndpoint epDef) throws NetworkException { Register reg = getRegister(epDef.getRegister().getId()); if (reg.hasValue()) { Endpoint<Integer> ep = reg.getEndpoint(epDef.getName()); return ep.getValue(); } return null; } private void queue(int id, byte[] value) { addListener(new UpdateOnSync(id, value)); fireSyncRequired(); } private boolean isSleeper() throws NetworkException { if (def != null) { return def.isPowerDownMode(); } else { Endpoint<Integer> ep = getRegister(StandardRegister.SYSTEM_STATE.getId()).getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()); if (!ep.hasValue()) { // if we can't confirm sleep mode, we assume it is true so we rather ask for sync return true; } int v = ep.getValue(); return (v != 3) && (v != 1); } } private EndpointListener systemStateListener() { return new EndpointListener<Integer>() { @Override public void valueReceived(Endpoint<Integer> ep, Integer syncState) { fireSyncStateChanged(syncState); } }; } private void fireSyncRequired() { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.syncStateChange(PanStamp.this, syncState); } }); } } private void fireSyncStateChanged(final int syncState) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.syncStateChange(PanStamp.this, syncState); } }); } } private void fireProductCodeChange(final int manufacturerId, final int productId) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.productCodeChange(PanStamp.this, manufacturerId, productId); } }); } } private RegisterListener productCodeListener() { return new AbstractRegisterListener() { @Override public void valueReceived(Register reg, byte value[]) { try { int mfId = getManufacturerIdFromRegister(); int pdId = getProductIdFromRegister(); if ((mfId != getManufacturerId()) || (pdId != getProductId())) { manufacturerId = mfId; productId = pdId; if ((manufacturerId != 0) && (productId != 0)) { loadDefinition(); } fireProductCodeChange(mfId, pdId); } } catch (NetworkException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } } @Override public void valueSet(Register reg, byte[] value) { try { int mfId = getManufacturerIdFromRegister(); int pdId = getProductIdFromRegister(); if ((mfId != getManufacturerId()) || (pdId != getProductId())) { manufacturerId = mfId; productId = pdId; if ((manufacturerId != 0) && (productId != 0)) { loadDefinition(); } fireProductCodeChange(manufacturerId, productId); } } catch (NetworkException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } } }; } /** * load all endpoints and parameters */ private void loadDefinition() throws NetworkException { def = gw.getDeviceDefinition(getManufacturerId(), getProductId()); List<RegisterDefinition> rpDefs = def.getRegisters(); for (RegisterDefinition rpDef : rpDefs) { Register reg = (Register) getRegister(rpDef.getId()); for (EndpointDefinition epDef : rpDef.getEndpoints()) { reg.addEndpoint(epDef); } for (ParameterDefinition par : rpDef.getParameters()) { reg.addParameter(par); } } } /** * get the manufacturer id for this panStamp from the actual register */ private int getManufacturerIdFromRegister() throws NetworkException { Register reg = getRegister(StandardRegister.PRODUCT_CODE.getId()); if (reg.hasValue()) { byte val[] = reg.getValue(); return val[0] << 24 | val[1] << 16 | val[2] << 8 | val[3]; } return 0; } /** * get the product id for this panStamp from the actual register */ private int getProductIdFromRegister() throws NetworkException { Register reg = getRegister(StandardRegister.PRODUCT_CODE.getId()); if (reg.hasValue()) { byte val[] = reg.getValue(); return val[4] << 24 | val[5] << 16 | val[6] << 8 | val[7]; } return 0; } private final int address; private DeviceDefinition def; private final Network gw; private int manufacturerId; private int productId; private int syncState; private boolean extended; private final Map<Integer, Register> registers = new ConcurrentHashMap<>(); private transient final List<PanStampListener> listeners = new CopyOnWriteArrayList<>(); private class UpdateOnSync extends AbstractPanStampListener { private UpdateOnSync(int id, byte[] val) { this.id = id; this.val = val; } private final int id; private final byte[] val; @Override public void syncStateChange(PanStamp dev, int syncState) { switch (syncState) { case 1: case 3: try { gw.sendCommandMessage(PanStamp.this, id, val); } catch (ModemException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } finally { removeListener(this); } break; default: } } } }
src/main/java/me/legrange/panstamp/PanStamp.java
package me.legrange.panstamp; import me.legrange.panstamp.event.AbstractPanStampListener; import me.legrange.panstamp.event.AbstractRegisterListener; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutorService; import java.util.logging.Level; import java.util.logging.Logger; import me.legrange.panstamp.definition.DeviceDefinition; import me.legrange.panstamp.definition.EndpointDefinition; import me.legrange.panstamp.definition.ParameterDefinition; import me.legrange.panstamp.definition.RegisterDefinition; import me.legrange.swap.SwapMessage; /** * An implementation of a panStamp abstraction. Instances of this class * represent instances of panStamp devices connected to the network behind the * gateway. * * @since 1.0 * @author Gideon le Grange https://github.com/GideonLeGrange * */ public final class PanStamp { /** * Get the address of this device. * * @return The address */ public int getAddress() { return address; } /** * Return the network channel * * @return The channel * @throws NetworkException Thrown if there is a problem getting the channel * value */ public int getChannel() throws NetworkException { Integer v = getIntValue(StandardEndpoint.FREQUENCY_CHANNEL); if (v != null) { return v; } return getGateway().getChannel(); } /** * Return the transmit interval * * @return The interval * @throws NetworkException Thrown if there is a problem getting the * interval value */ public int getTxInterval() throws NetworkException { return getIntValue(StandardEndpoint.PERIODIC_TX_INTERVAL, 0); } /** * Return the current security option * * @return The security option * @throws NetworkException Thrown if there is a problem getting the * security option value */ public int getSecurityOption() throws NetworkException { return getIntValue(StandardEndpoint.SECURITY_OPTION, 0); } /** * Return the network ID * * @return The network ID * @throws NetworkException Thrown if there is a problem getting the network * ID value */ public int getNetwork() throws NetworkException { Integer v = getIntValue(StandardEndpoint.NETWORK_ID); if (v != null) { return v; } return getGateway().getNetworkId(); } /** * Set the address of the panStamp * * @param addr The address to set * @throws NetworkException Thrown if there is a problem reading the * interval. */ public void setAddress(int addr) throws NetworkException { if (addr != getAddress()) { setIntValue(StandardEndpoint.DEVICE_ADDRESS, addr); } } /** * Set the network id of the device * * @param network The network id * @throws NetworkException Thrown if there is a problem setting the ID */ public void setNetwork(int network) throws NetworkException { if (network != getNetwork()) { setIntValue(StandardEndpoint.NETWORK_ID, network); } } /** * Set the network channel of the device * * @param channel The channel to set. * @throws NetworkException Thrown if there is a problem setting the channel */ public void setChannel(int channel) throws NetworkException { if (channel != getChannel()) { setIntValue(StandardEndpoint.FREQUENCY_CHANNEL, channel); } } /** * Set the security option of the device. * * @param option The security option to set. * @throws NetworkException Thrown if there is a problem setting the option. */ public void setSecurityOption(int option) throws NetworkException { if (option != getSecurityOption()) { setIntValue(StandardEndpoint.SECURITY_OPTION, option); } } /** * Set the transmit interval (in seconds) of the device. * * @param txInterval The interval to set. * @throws NetworkException Thrown if there is a problem setting the * interval. */ public void setTxInterval(int txInterval) throws NetworkException { if (txInterval != getTxInterval()) { setIntValue(StandardEndpoint.PERIODIC_TX_INTERVAL, txInterval); } } /** * Get the device manufacturer id. * * @return The manufacturer Id * @throws NetworkException Thrown if there is a problem reading the id. */ public int getManufacturerId() throws NetworkException { return manufacturerId; } /** * Get the device product id. * * @return The product Id * @throws NetworkException Thrown if there is a problem reading the id. */ public int getProductId() throws NetworkException { return productId; } /** * Get the gateway this device is attached to. * * @return The gateway */ public Network getGateway() { return gw; } /** * Get the device name (as defined by the endpoint definition). * * @return The name of the device */ public String getName() { if (def != null) { return def.getProduct(); } return "Unknown"; } /** * Get the register with the given register ID for this device. * * * @return the register for the given id * @param id ID of register to return */ public Register getRegister(int id) { Register reg; synchronized (registers) { reg = registers.get(id); if (reg == null) { reg = new Register(this, id); registers.put(id, reg); } } return reg; } /** * Get the list of registers defined for this device * * @return The list of registers. */ public List<Register> getRegisters() { List<Register> all = new ArrayList<>(); all.addAll(registers.values()); Collections.sort(all, new Comparator() { @Override public int compare(Object o1, Object o2) { return ((Register) o1).getId() - ((Register) o2).getId(); } }); return all; } /** * Determine if the device has a register with the given ID. * * @param id The id of the register required. * @return True if the panStamp has the register. */ public boolean hasRegister(int id) { return registers.get(id) != null; } /** * add an event listener * * @param l The listener to add */ public void addListener(PanStampListener l) { listeners.add(l); } /** * remove an event listener * * @param l The listener to remove */ public void removeListener(PanStampListener l) { listeners.remove(l); } /** * create a new mote for the given address in the given network * * @param gw The gateway to which this device is connected * @param address The address of the device * @throws me.legrange.panstamp.NetworkException Thrown if there is a * problem creating the device. */ public PanStamp(Network gw, int address) throws NetworkException { this.gw = gw; this.address = address; extended = address > 255; for (StandardRegister reg : StandardRegister.ALL) { Register impl = new Register(this, reg); registers.put(reg.getId(), impl); if (StandardRegister.PRODUCT_CODE.getId() == reg.getId()) { impl.addListener(productCodeListener()); } else if (StandardRegister.SYSTEM_STATE.getId() == reg.getId()) { impl.getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()).addListener(systemStateListener()); } } } void destroy() { for (Register reg : registers.values()) { reg.destroy(); } listeners.clear(); registers.clear(); } DeviceDefinition getDefinition() { return def; } /** * send a query message to the remote node */ void sendQueryMessage(int id) throws ModemException { gw.sendQueryMessage(this, id); } /** * send a command message to the remote node * * @param value Value to send */ void sendCommandMessage(int id, byte[] value) throws NetworkException { if (isSleeper()) { queue(id, value); } else { gw.sendCommandMessage(this, id, value); } } /** * Receive a status message from the remote node. */ void statusMessageReceived(SwapMessage msg) { Register reg = (Register) getRegister(msg.getRegisterID()); boolean isNew = !reg.hasValue(); reg.valueReceived(msg.getRegisterValue()); if (isNew) { fireRegisterDetected(reg); } } boolean hasExtendedAddress() { return extended; } ExecutorService getPool() { return gw.getPool(); } private void fireRegisterDetected(final Register reg) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.registerDetected(PanStamp.this, reg); } }); } } private int getIntValue(StandardEndpoint epDef, int defaultValue) throws NetworkException { Integer v = getIntValue(epDef); if (v != null) { return v; } return defaultValue; } private void setIntValue(StandardEndpoint epDef, int val) throws NetworkException { Register reg = getRegister(epDef.getRegister().getId()); Endpoint<Integer> ep = reg.getEndpoint(epDef.getName()); ep.setValue(val); } private Integer getIntValue(StandardEndpoint epDef) throws NetworkException { Register reg = getRegister(epDef.getRegister().getId()); if (reg.hasValue()) { Endpoint<Integer> ep = reg.getEndpoint(epDef.getName()); return ep.getValue(); } return null; } private void queue(int id, byte[] value) { addListener(new UpdateOnSync(id, value)); fireSyncRequired(); } private boolean isSleeper() throws NetworkException { if (def != null) { return def.isPowerDownMode(); } else { Endpoint<Integer> ep = getRegister(StandardRegister.SYSTEM_STATE.getId()).getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()); if (!ep.hasValue()) { // if we can't confirm sleep mode, we assume it is true so we rather ask for sync return true; } int v = ep.getValue(); return (v != 3) && (v != 1); } } private EndpointListener systemStateListener() { return new EndpointListener<Integer>() { @Override public void valueReceived(Endpoint<Integer> ep, Integer syncState) { fireSyncStateChanged(syncState); } }; } private void fireSyncRequired() { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.syncStateChange(PanStamp.this, syncState); } }); } } private void fireSyncStateChanged(final int syncState) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.syncStateChange(PanStamp.this, syncState); } }); } } private void fireProductCodeChange(final int manufacturerId, final int productId) { for (final PanStampListener l : listeners) { getPool().submit(new Runnable() { @Override public void run() { l.productCodeChange(PanStamp.this, manufacturerId, productId); } }); } } private RegisterListener productCodeListener() { return new AbstractRegisterListener() { @Override public void valueReceived(Register reg, byte value[]) { try { int mfId = getManufacturerIdFromRegister(); int pdId = getProductIdFromRegister(); if ((mfId != getManufacturerId()) || (pdId != getProductId())) { manufacturerId = mfId; productId = pdId; if ((manufacturerId != 0) && (productId != 0)) { loadDefinition(); } fireProductCodeChange(mfId, pdId); } } catch (NetworkException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } } @Override public void valueSet(Register reg, byte[] value) { try { int mfId = getManufacturerIdFromRegister(); int pdId = getProductIdFromRegister(); if ((mfId != getManufacturerId()) || (pdId != getProductId())) { manufacturerId = mfId; productId = pdId; if ((manufacturerId != 0) && (productId != 0)) { loadDefinition(); } } } catch (NetworkException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } } }; } /** * load all endpoints and parameters */ private void loadDefinition() throws NetworkException { def = gw.getDeviceDefinition(getManufacturerId(), getProductId()); List<RegisterDefinition> rpDefs = def.getRegisters(); for (RegisterDefinition rpDef : rpDefs) { Register reg = (Register) getRegister(rpDef.getId()); for (EndpointDefinition epDef : rpDef.getEndpoints()) { reg.addEndpoint(epDef); } for (ParameterDefinition par : rpDef.getParameters()) { reg.addParameter(par); } } } /** * get the manufacturer id for this panStamp from the actual register */ private int getManufacturerIdFromRegister() throws NetworkException { Register reg = getRegister(StandardRegister.PRODUCT_CODE.getId()); if (reg.hasValue()) { byte val[] = reg.getValue(); return val[0] << 24 | val[1] << 16 | val[2] << 8 | val[3]; } return 0; } /** * get the product id for this panStamp from the actual register */ private int getProductIdFromRegister() throws NetworkException { Register reg = getRegister(StandardRegister.PRODUCT_CODE.getId()); if (reg.hasValue()) { byte val[] = reg.getValue(); return val[4] << 24 | val[5] << 16 | val[6] << 8 | val[7]; } return 0; } private final int address; private DeviceDefinition def; private final Network gw; private int manufacturerId; private int productId; private int syncState; private boolean extended; private final Map<Integer, Register> registers = new ConcurrentHashMap<>(); private transient final List<PanStampListener> listeners = new CopyOnWriteArrayList<>(); private class UpdateOnSync extends AbstractPanStampListener { private UpdateOnSync(int id, byte[] val) { this.id = id; this.val = val; } private final int id; private final byte[] val; @Override public void syncStateChange(PanStamp dev, int syncState) { switch (syncState) { case 1: case 3: try { gw.sendCommandMessage(PanStamp.this, id, val); } catch (ModemException ex) { Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); } finally { removeListener(this); } break; default: } } } }
Modified PanStamp constructor to be a bit more understandable
src/main/java/me/legrange/panstamp/PanStamp.java
Modified PanStamp constructor to be a bit more understandable
<ide><path>rc/main/java/me/legrange/panstamp/PanStamp.java <ide> for (StandardRegister reg : StandardRegister.ALL) { <ide> Register impl = new Register(this, reg); <ide> registers.put(reg.getId(), impl); <del> if (StandardRegister.PRODUCT_CODE.getId() == reg.getId()) { <del> impl.addListener(productCodeListener()); <del> <del> } else if (StandardRegister.SYSTEM_STATE.getId() == reg.getId()) { <del> impl.getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()).addListener(systemStateListener()); <del> <del> } <del> } <add> } <add> getRegister(StandardRegister.PRODUCT_CODE.getId()).addListener(productCodeListener()); <add> getRegister(StandardRegister.SYSTEM_STATE.getId()).getEndpoint(StandardEndpoint.SYSTEM_STATE.getName()).addListener(systemStateListener()); <ide> } <ide> <ide> void destroy() { <ide> if ((manufacturerId != 0) && (productId != 0)) { <ide> loadDefinition(); <ide> } <add> fireProductCodeChange(manufacturerId, productId); <ide> } <ide> } catch (NetworkException ex) { <ide> Logger.getLogger(PanStamp.class.getName()).log(Level.SEVERE, null, ex); <ide> } <del> <ide> } <ide> }; <ide>
Java
apache-2.0
2a0b8725c9242639e05b837702b1184e176412ab
0
mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData
/******************************************************************************* * Copyright 2015 EMBL - European Bioinformatics Institute * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. *******************************************************************************/ package org.mousephenotype.cda.indexers; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; import org.mousephenotype.cda.constants.ParameterConstants; import org.mousephenotype.cda.db.pojo.OntologyTerm; import org.mousephenotype.cda.db.pojo.Parameter; import org.mousephenotype.cda.db.pojo.PhenotypeAnnotationType; import org.mousephenotype.cda.db.repositories.OntologyTermRepository; import org.mousephenotype.cda.db.repositories.ParameterRepository; import org.mousephenotype.cda.db.statistics.MpTermService; import org.mousephenotype.cda.db.statistics.ResultDTO; import org.mousephenotype.cda.db.utilities.SqlUtils; import org.mousephenotype.cda.enumerations.SexType; import org.mousephenotype.cda.enumerations.ZygosityType; import org.mousephenotype.cda.indexers.exceptions.IndexerException; import org.mousephenotype.cda.indexers.utils.IndexerMap; import org.mousephenotype.cda.owl.OntologyParser; import org.mousephenotype.cda.owl.OntologyParserFactory; import org.mousephenotype.cda.owl.OntologyTermDTO; import org.mousephenotype.cda.solr.service.GenotypePhenotypeService; import org.mousephenotype.cda.solr.service.StatisticalResultService; import org.mousephenotype.cda.solr.service.dto.ImpressBaseDTO; import org.mousephenotype.cda.solr.service.dto.ParameterDTO; import org.mousephenotype.cda.solr.service.dto.StatisticalResultDTO; import org.mousephenotype.cda.utilities.RunStatus; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.Banner; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.ConfigurableApplicationContext; import javax.inject.Inject; import javax.sql.DataSource; import javax.validation.constraints.NotNull; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; /** * Load documents into the statistical-results SOLR core */ @EnableAutoConfiguration public class StatisticalResultsIndexer extends AbstractIndexer implements CommandLineRunner { private final Logger logger = LoggerFactory.getLogger(StatisticalResultsIndexer.class); private Boolean SAVE = Boolean.TRUE; private Map<String, List<String>> impressAbnormals = new HashMap<>(); private Double SIGNIFICANCE_THRESHOLD = GenotypePhenotypeService.P_VALUE_THRESHOLD; private final double REPORT_INTERVAL = 100000; static final String RESOURCE_3I = "3i"; private final List<String> EMBRYO_PROCEDURES_NO_VIA = Arrays.asList("IMPC_GPL", "IMPC_GEL", "IMPC_GPM", "IMPC_GEM", "IMPC_GPO", "IMPC_GEO", "IMPC_GPP", "IMPC_GEP"); private final List<String> EMBRYO_PROCEDURES_VIA = Arrays.asList("IMPC_EVL_001_001", "IMPC_EVM_001_001", "IMPC_EVO_001_001", "IMPC_EVP_001_001"); private Map<Long, ImpressBaseDTO> pipelineMap = new HashMap<>(); private Map<Long, ImpressBaseDTO> procedureMap = new HashMap<>(); private Map<Long, ParameterDTO> parameterMap = new HashMap<>(); private Map<String, ResourceBean> resourceMap = new HashMap<>(); private Map<String, List<String>> sexesMap = new HashMap<>(); private Set<String> alreadyReported = new HashSet<>(); private Map<Long, BiologicalDataBean> biologicalDataMap = new HashMap<>(); private Map<String, Set<String>> parameterMpTermMap = new HashMap<>(); private Map<String, String> embryoSignificantResults = new HashMap<>(); private Set<String> VIA_SIGNIFICANT = new HashSet<>(); private Set<String> MALE_FER_SIGNIFICANT = new HashSet<>(); private Set<String> FEMALE_FER_SIGNIFICANT = new HashSet<>(); private List<String> shouldHaveAdded = new ArrayList<>(); private Set<String> uniqueSRKeys = new ConcurrentSkipListSet<>(); public void setPipelineMap(Map<Long, ImpressBaseDTO> pipelineMap) { this.pipelineMap = pipelineMap; } public void setProcedureMap(Map<Long, ImpressBaseDTO> procedureMap) { this.procedureMap = procedureMap; } public void setParameterMap(Map<Long, ParameterDTO> parameterMap) { this.parameterMap = parameterMap; } private OntologyParser mpParser; private OntologyParser mpMaParser; private OntologyParser maParser; private OntologyParserFactory ontologyParserFactory; private MpTermService mpTermService; private ParameterRepository parameterRepository; private SolrClient statisticalResultCore; protected StatisticalResultsIndexer() { } @Inject public StatisticalResultsIndexer( @NotNull DataSource komp2DataSource, @NotNull OntologyTermRepository ontologyTermRepository, @NotNull MpTermService mpTermService, @NotNull ParameterRepository parameterRepository, @NotNull SolrClient statisticalResultCore) { super(komp2DataSource, ontologyTermRepository); this.mpTermService = mpTermService; this.parameterRepository = parameterRepository; this.statisticalResultCore = statisticalResultCore; } public void setMpParser(OntologyParser mpParser) { this.mpParser = mpParser; } public void setMpMaParser(OntologyParser mpMaParser) { this.mpMaParser = mpMaParser; } public void setMaParser(OntologyParser maParser) { this.maParser = maParser; } public OntologyParserFactory getOntologyParserFactory() { return ontologyParserFactory; } public void setOntologyParserFactory(OntologyParserFactory ontologyParserFactory) { this.ontologyParserFactory = ontologyParserFactory; } @Override public RunStatus validateBuild() throws IndexerException { return super.validateBuild(statisticalResultCore); } @Override public RunStatus run() throws IndexerException, IOException { long start = System.currentTimeMillis(); RunStatus runStatus = new RunStatus(); try { Connection connection = komp2DataSource.getConnection(); synchronized(this) { ontologyParserFactory = new OntologyParserFactory(komp2DataSource, owlpath); mpParser = ontologyParserFactory.getMpParser(); mpMaParser = ontologyParserFactory.getMpMaParser(); maParser = ontologyParserFactory.getMaParser(); pipelineMap = IndexerMap.getImpressPipelines(connection); procedureMap = IndexerMap.getImpressProcedures(connection); parameterMap = IndexerMap.getImpressParameters(connection); } populateBiologicalDataMap(); populateResourceDataMap(); populateSexesMap(); populateParameterMpTermMap(); populateEmbryoSignificanceMap(); populateAdultLineLevelSignificanceMap(); } catch (SQLException | OWLOntologyCreationException | OWLOntologyStorageException e) { throw new IndexerException(e); } expectedDocumentCount = populateStatisticalResultsSolrCore(); logger.info((SAVE?"":"Would have") + " Added {} total beans in {}", expectedDocumentCount, commonUtils.msToHms(System.currentTimeMillis() - start)); return runStatus; } private int populateStatisticalResultsSolrCore() throws IndexerException { int count = 0; try { if (SAVE) statisticalResultCore.deleteByQuery("*:*"); if (SAVE) statisticalResultCore.commit(); List<Callable<List<StatisticalResultDTO>>> resultGenerators = Arrays.asList( getViabilityResults() , getFertilityResults() , getReferenceRangePlusResults() , getEmbryoViabilityResults() , getEmbryoResults() , getGrossPathologyResults() , getUnidimensionalResults() , getCategoricalResults() ); ExecutorService pool = Executors.newFixedThreadPool(4); List<Future<List<StatisticalResultDTO>>> producers = new ArrayList<>(); for (Callable<List<StatisticalResultDTO>> r : resultGenerators) { Future<List<StatisticalResultDTO>> future = pool.submit(r); producers.add(future); } AtomicInteger atomicInt = new AtomicInteger(0); for (Future<List<StatisticalResultDTO>> future : producers) { try { atomicInt.addAndGet(future.get().size()); } catch (ExecutionException | InterruptedException e) { e.printStackTrace(); } } // Stop threadpool pool.shutdown(); count = atomicInt.get(); if (SAVE) statisticalResultCore.commit(); checkSolrCount(count); logger.info((SAVE?"":"Would have") + " Added {} statistical result documents", count); } catch (IOException | SolrServerException e) { throw new IndexerException(e); } return count; } public ViabilityResults getViabilityResults() {return new ViabilityResults(); } public FertilityResults getFertilityResults() {return new FertilityResults(); } public ReferenceRangePlusResults getReferenceRangePlusResults() {return new ReferenceRangePlusResults(); } public UnidimensionalResults getUnidimensionalResults() {return new UnidimensionalResults(); } public CategoricalResults getCategoricalResults() {return new CategoricalResults(); } public EmbryoViabilityResults getEmbryoViabilityResults() {return new EmbryoViabilityResults(); } public EmbryoResults getEmbryoResults() {return new EmbryoResults(); } public GrossPathologyResults getGrossPathologyResults() {return new GrossPathologyResults(); } /** * Check to see if the count of documents we think have been added actually matches * the number of documents in solr * * @param documentsAddedCount The number of documents added */ private void checkSolrCount(Integer documentsAddedCount) throws SolrServerException, IOException { SolrQuery query = new SolrQuery(); query.setQuery("*:*").setRows(0); QueryResponse response = statisticalResultCore.query(query); Long solrDocumentCount = response.getResults().getNumFound(); if (documentsAddedCount - solrDocumentCount != 0) { logger.warn(" Count of documents in solr: {}, count added by indexer: {}, Difference: {}", solrDocumentCount, documentsAddedCount, documentsAddedCount - solrDocumentCount); } if (documentsAddedCount - solrDocumentCount > 0) { // The java Set.add() method returns false when attempting to add an element that already exists in // the set so the filter will remove all non-duplicate elements leaving only those document IDs that // have been added twice Set<String> uniques = new HashSet<>(); Set<String> diff = shouldHaveAdded .stream() .filter(e -> ! uniques.add(e)) .collect(Collectors.toSet()); logger.warn(" Should have added these {} doc IDs, but missing from solr {}", diff.size(), StringUtils.join(diff, ", ")); } } private Double nullCheckResult(ResultSet r, String field) throws SQLException { double v = r.getDouble(field); return r.wasNull() ? null : v; } private StatisticalResultDTO parseResultCommonFields(ResultSet r) throws SQLException { StatisticalResultDTO doc = new StatisticalResultDTO(); doc.setDocId(r.getString("doc_id")); doc.setDataType(r.getString("data_type")); // Experiment details // Use the procedure prefix to associated with the result to find the procedure prefix String procedurePrefix = StringUtils.join(Arrays.asList(procedureMap.get(r.getLong("procedure_id")).getStableId().split("_")).subList(0, 2), "_"); if (ParameterConstants.source3iProcedurePrefixes.contains(procedurePrefix)) { // Override the resource for the 3i procedures doc.setResourceId(resourceMap.get(RESOURCE_3I).id); doc.setResourceName(resourceMap.get(RESOURCE_3I).shortName); doc.setResourceFullname(resourceMap.get(RESOURCE_3I).name); } else { doc.setResourceId(r.getLong("resource_id")); doc.setResourceName(r.getString("resource_name")); doc.setResourceFullname(r.getString("resource_fullname")); } doc.setProjectId(r.getLong("project_id")); doc.setProjectName(r.getString("project_name")); doc.setPhenotypingCenter(r.getString("phenotyping_center")); doc.setControlBiologicalModelId(r.getLong("control_id")); doc.setMutantBiologicalModelId(r.getLong("experimental_id")); doc.setZygosity(r.getString("experimental_zygosity")); doc.setDependentVariable(r.getString("dependent_variable")); doc.setExternalDbId(r.getLong("external_db_id")); doc.setDbId(r.getLong("db_id")); doc.setOrganisationId(r.getLong("organisation_id")); doc.setPhenotypingCenterId(r.getLong("phenotyping_center_id")); doc.setControlSelectionMethod(r.getString("control_selection_strategy")); doc.setStatisticalMethod(r.getString("statistical_method")); doc.setWorkflow(r.getString("workflow")); doc.setMaleControlCount(r.getInt("male_controls")); doc.setFemaleControlCount(r.getInt("female_controls")); doc.setMaleMutantCount(r.getInt("male_mutants")); doc.setFemaleMutantCount(r.getInt("female_mutants")); doc.setColonyId(r.getString("colony_id")); doc.setStatus(r.getString("status")); if (doc.getPhenotypeSex() == null) { doc.setPhenotypeSex(new ArrayList<>()); } if (doc.getMaleMutantCount()>0) { doc.getPhenotypeSex().add(SexType.male.getName()); } if (doc.getFemaleMutantCount()>0) { doc.getPhenotypeSex().add(SexType.female.getName()); } // Always set a metadata group here to allow for simpler searching for // unique results and to maintain parity with the observation index // where "empty string" metadata group means no required metadata. if (StringUtils.isNotEmpty(r.getString("metadata_group"))) { doc.setMetadataGroup(r.getString("metadata_group")); } else { doc.setMetadataGroup(""); } addImpressData(r, doc); // Biological details addBiologicalData(doc, doc.getMutantBiologicalModelId()); final OntologyTerm lifeStage = getLifeStage(doc.getParameterStableId()); if (lifeStage != null) { doc.setLifeStageAcc(lifeStage.getId().getAccession()); doc.setLifeStageName(lifeStage.getName()); } else { logger.info(" Life stage is NULL for doc id " + doc.getDocId()); } // MP Terms must come after setting the stage as it's used for selecting MA or EMAPA addMpTermData(r, doc); return doc; } /** * parseLineResult changes a database result set for a line into a solr document * * @param r the result set * @return a solr document */ private StatisticalResultDTO parseLineResult(ResultSet r) throws SQLException { StatisticalResultDTO doc = new StatisticalResultDTO(); String docId = r.getString("doc_id"); if (docId == null) { docId = String.valueOf(Math.random()); } doc.setDocId(docId); doc.setDataType(r.getString("data_type")); doc.setResourceId(r.getLong("resource_id")); doc.setResourceName(r.getString("resource_name")); doc.setResourceFullname(r.getString("resource_fullname")); doc.setProjectId(r.getLong("project_id")); doc.setProjectName(r.getString("project_name")); doc.setPhenotypingCenter(r.getString("phenotyping_center")); doc.setMutantBiologicalModelId(r.getLong("biological_model_id")); doc.setZygosity(r.getString("experimental_zygosity")); doc.setDependentVariable(r.getString("dependent_variable")); doc.setExternalDbId(r.getLong("external_db_id")); doc.setDbId(r.getLong("db_id")); doc.setPhenotypingCenterId(r.getLong("phenotyping_center_id")); doc.setStatisticalMethod("Supplied as data"); doc.setColonyId(r.getString("colony_id")); doc.setStatus("Success"); // Need to set sgnificance if a phenotype association has been made for this data set doc.setSignificant(false); // Always set a metadata group here to allow for simpler searching for // unique results and to maintain parity with the observation index // where "empty string" metadata group means no required metadata. if (StringUtils.isNotEmpty(r.getString("metadata_group"))) { doc.setMetadataGroup(r.getString("metadata_group")); } else { doc.setMetadataGroup(""); } // Fertility results DO NOT contain the counts of controls/mutants switch (r.getString("dependent_variable")) { case "IMPC_VIA_001_001": doc.setMaleMutantCount(r.getInt("male_mutants")); doc.setFemaleMutantCount(r.getInt("female_mutants")); // Viability parameter significant for both sexes doc.setPhenotypeSex(Arrays.asList("female", "male")); if (VIA_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; case "IMPC_FER_001_001": // Fertility significant for Males doc.setPhenotypeSex(Collections.singletonList("male")); if (MALE_FER_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; case "IMPC_FER_019_001": // Fertility significant for females doc.setPhenotypeSex(Collections.singletonList("female")); if (FEMALE_FER_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; } // Impress pipeline data details addImpressData(r, doc); // Biological details addBiologicalData(doc, doc.getMutantBiologicalModelId()); OntologyTerm lifeStage = getLifeStage(doc.getParameterStableId()); if (lifeStage != null) { doc.setLifeStageAcc(lifeStage.getId().getAccession()); doc.setLifeStageName(lifeStage.getName()); } else { logger.info(" Line result stage is NULL for doc id " + doc.getDocId()); } // MP Term details addMpTermData(r, doc); try { String category = r.getString("category"); if (!r.wasNull() && category.equals("Insufficient numbers to make a call")) { doc.setStatus("Failed - " + category); } } catch (java.sql.SQLException e) { // do nothing. Result set did not have "category" in it } try { r.getString("experimental_zygosity"); if (r.wasNull()) { String category = r.getString("category"); if (!r.wasNull()) { String[] fields = category.split("-"); ZygosityType zygosity; switch (fields[0].trim().toLowerCase()) { case "heterozygous": zygosity = ZygosityType.heterozygote; break; case "hemizygous": zygosity = ZygosityType.hemizygote; break; case "homozygous": default: zygosity = ZygosityType.homozygote; break; } doc.setZygosity(zygosity.getName()); } } } catch (java.sql.SQLException e) { // do nothing. Result set did not have "category" in it } String sex = r.getString("sex"); if (!r.wasNull()) { doc.setSex(sex); // Do not attempt to add to the phenotye_sex fuield if it has been manually set as it is for // the viability and fertility parameters listed here if ( ! Arrays.asList("IMPC_VIA_001_001", "IMPC_FER_001_001", "IMPC_FER_019_001").contains(doc.getDependentVariable())) { // Add the sex to the phenotype_sexes field if (doc.getPhenotypeSex() == null) { doc.setPhenotypeSex(new ArrayList<>()); } if (!doc.getPhenotypeSex().contains(sex)) { doc.getPhenotypeSex().add(sex); } } } Double p_value = r.getDouble("p_value"); if (!r.wasNull() && doc.getMpTermId()!=null) { doc.setpValue(p_value); } Double effect_size = r.getDouble("effect_size"); if (!r.wasNull() && doc.getMpTermId()!=null) { doc.setEffectSize(effect_size); } return doc; } /** * Add the appropriate MP term associations to the document * This is only used for the embryo data for the moment (2016-04-07) * * @param mpId the mp term accession id * @param doc the solr document to update */ private void addMpTermData(String mpId, StatisticalResultDTO doc) { // Add the appropriate fields for the global MP term if (mpId != null) { OntologyTermDTO mpTerm = mpParser.getOntologyTerm(mpId); if (mpTerm != null) { doc.setMpTermId(mpTerm.getAccessionId()); doc.setMpTermName(mpTerm.getName()); // if the mpId itself is a top level, add itself as a top level if (mpTerm.getTopLevelIds() == null ){ // if the mpId itself is a top level, add itself as a top level doc.addTopLevelMpTermId(mpTerm.getAccessionId()); doc.addTopLevelMpTermName(mpTerm.getName()); } else { doc.addTopLevelMpTermId(mpTerm.getTopLevelIds()); doc.addTopLevelMpTermName(mpTerm.getTopLevelNames()); } doc.addIntermediateMpTermId(mpTerm.getIntermediateIds()); doc.addIntermediateMpTermName(mpTerm.getIntermediateNames()); addAnatomyMapping(doc, mpTerm); } } } private void addAnatomyMapping(StatisticalResultDTO doc, OntologyTermDTO mpTerm){ // mp-anatomy mappings (all MA at the moment) // For all non-embryo life stages indicated by not containing a digit if (doc.getLifeStageAcc() != null && ! doc.getLifeStageAcc().matches("[0-9]")) { Set<String> referencedClasses = mpMaParser.getReferencedClasses(doc.getMpTermId(), OntologyParserFactory.VIA_PROPERTIES, "MA"); if (referencedClasses != null && referencedClasses.size() > 0) { for (String id : referencedClasses) { OntologyTermDTO maTerm = maParser.getOntologyTerm(id); if (maTerm != null) { doc.addAnatomyTermId(id); doc.addAnatomyTermName(maTerm.getName()); if (maTerm.getIntermediateIds() != null) { doc.addIntermediateAnatomyTermId(maTerm.getIntermediateIds()); doc.addIntermediateAnatomyTermName(maTerm.getIntermediateNames()); } if (maTerm.getTopLevelIds() != null) { doc.addTopLevelAnatomyTermId(maTerm.getTopLevelIds()); doc.addTopLevelAnatomyTermName(maTerm.getTopLevelNames()); } }else{ logger.info("MA term is null for id:"+doc.getMpTermId()); } } } // Also check mappings up the tree, as a leaf term might not have a // mapping, but the parents might. Set<String> anatomyIdsForAncestors = new HashSet<>(); for (String mpAncestorId : mpTerm.getIntermediateIds()) { if (mpMaParser.getReferencedClasses(mpAncestorId, OntologyParserFactory.VIA_PROPERTIES, "MA") != null) { anatomyIdsForAncestors.addAll( mpMaParser.getReferencedClasses(mpAncestorId, OntologyParserFactory.VIA_PROPERTIES, "MA")); } } for (String id : anatomyIdsForAncestors) { OntologyTermDTO maTerm = maParser.getOntologyTerm(id); if (maTerm != null) { doc.addIntermediateAnatomyTermId(id); doc.addIntermediateAnatomyTermName(maTerm.getName()); if (maTerm.getIntermediateIds() != null) { doc.addIntermediateAnatomyTermId(maTerm.getIntermediateIds()); doc.addIntermediateAnatomyTermName(maTerm.getIntermediateNames()); } if (maTerm.getTopLevelIds() != null) { doc.addTopLevelAnatomyTermId(maTerm.getTopLevelIds()); doc.addTopLevelAnatomyTermName(maTerm.getTopLevelNames()); } }else{ logger.info("maTerm is null when looking for anatomyIdsForAncestors id:"+id); } } } } String getResult(StatisticalResultDTO doc, ResultSet resultSet) { String mpTerm = null; ResultDTO result = new ResultDTO(); try { result.setPipelineId(resultSet.getLong("pipeline_id")); result.setProcedureId(resultSet.getLong("procedure_id")); result.setParameterId(resultSet.getLong("parameter_id")); result.setParameterStableId(resultSet.getString("dependent_variable")); result.setNullTestPvalue(resultSet.getDouble("categorical_p_value")); result.setGenotypeEffectSize(resultSet.getDouble("categorical_effect_size")); try { result.setSex(SexType.valueOf(resultSet.getString("sex"))); } catch (Exception e) { result.setSex(null); } SqlUtils sqlUtils = new SqlUtils(); Boolean additionalColumns; try (Connection conn = komp2DataSource.getConnection()) { additionalColumns = sqlUtils.columnInSchemaMysql(conn, "stats_categorical_result", "male_p_value"); } if (additionalColumns) { result.setMalePvalue(resultSet.getDouble("male_p_value")); result.setMaleEffectSize(resultSet.getDouble("male_effect_size")); result.setFemalePvalue(resultSet.getDouble("female_p_value")); result.setFemaleEffectSize(resultSet.getDouble("female_effect_size")); } result.setCategoryA(resultSet.getString("category_a")); result.setCategoryB(resultSet.getString("category_b")); if (result.getCategoryA() == null) { result.setCategoryA("abnormal"); result.setCategoryB("normal"); } try (Connection connection = komp2DataSource.getConnection()) { OntologyTerm term = mpTermService.getMPTerm( doc.getParameterStableId(), result, doc.getSex() == null ? null : SexType.valueOf(doc.getSex()), connection, 0.0001f, Boolean.TRUE); if (term != null) { mpTerm = term.getId().getAccession(); } } } catch (SQLException e) { e.printStackTrace(); } return mpTerm; } /** * Add the appropriate MP term associations to the document * * @param r the result set to pull the relevant fields from * @param doc the solr document to update * @throws SQLException if the query fields do not exist */ private void addMpTermData(ResultSet r, StatisticalResultDTO doc) throws SQLException { String mpTerm = r.getString("mp_acc"); // For reference range plus results only, test that the MP term has been set, if not, try to set the abnormal termif (doc.getStatisticalMethod() != null && doc.getStatisticalMethod().equals("Reference Ranges Plus framework")) { // Sometimes, the stats result generator doesn't set the MP term (also not for either sex), in that case, // try to set the abnormal term for the parameter if (r.wasNull()) { // If there is a not male MP term set r.getString("male_mp_acc"); if (r.wasNull()) { // And, If there is a not female MP term set r.getString("female_mp_acc"); if (r.wasNull()) { // Lookup and cache the impress object corresponding to the parameter in question if (!impressAbnormals.containsKey(doc.getParameterStableId())) { Parameter parameter = parameterRepository.getFirstByStableId(doc.getParameterStableId()); List<String> abnormalMpIds = parameter.getAnnotations() .stream() .filter(x -> x.getType().equals(PhenotypeAnnotationType.abnormal)) .map(x -> x.getOntologyTerm().getId().getAccession()) .collect(Collectors.toList()); impressAbnormals.put(doc.getParameterStableId(), abnormalMpIds); } // Get the first abnormal term ID as that is likely the real "abnormal" term if (impressAbnormals.containsKey(doc.getParameterStableId())) { List<String> abnormals = impressAbnormals.get(doc.getParameterStableId()); if (CollectionUtils.isNotEmpty(abnormals)) { mpTerm = abnormals.get(0); } } } } } // If after all that the mp_term is still null, this is probably a poorly loaded statistical result // Try to determine the correct MP term by parsing the result again if (mpTerm == null && doc.getDataType().contains("categorical")) { mpTerm = getResult(doc, r); } // Add the appropriate fields for the global MP term if (mpTerm != null) { addMpTermData(mpTerm, doc); } // Process the male MP term mpTerm = r.getString("male_mp_acc"); if (!r.wasNull()) { OntologyTermDTO term = mpParser.getOntologyTerm(mpTerm); if (term != null) { doc.setMaleMpTermId(term.getAccessionId()); doc.setMaleMpTermName(term.getName()); doc.addMaleTopLevelMpTermId(term.getTopLevelIds()); doc.addMaleTopLevelMpTermName(term.getTopLevelNames()); doc.addMaleIntermediateMpTermId(term.getIntermediateIds()); doc.addMaleIntermediateMpTermName(term.getIntermediateNames()); } } // Process the female MP term mpTerm = r.getString("female_mp_acc"); if (!r.wasNull()) { OntologyTermDTO term = mpParser.getOntologyTerm(mpTerm); if (term != null) { doc.setFemaleMpTermId(term.getAccessionId()); doc.setFemaleMpTermName(term.getName()); doc.addFemaleTopLevelMpTermId(term.getTopLevelIds()); doc.addFemaleTopLevelMpTermName(term.getTopLevelNames()); doc.addFemaleIntermediateMpTermId(term.getIntermediateIds()); doc.addFemaleIntermediateMpTermName(term.getIntermediateNames()); } } } private void addImpressData(ResultSet r, StatisticalResultDTO doc) throws SQLException { doc.setPipelineId(pipelineMap.get(r.getLong("pipeline_id")).getId()); doc.setPipelineStableKey(pipelineMap.get(r.getLong("pipeline_id")).getStableKey()); doc.setPipelineName(pipelineMap.get(r.getLong("pipeline_id")).getName()); doc.setPipelineStableId(pipelineMap.get(r.getLong("pipeline_id")).getStableId()); doc.setProcedureId(procedureMap.get(r.getLong("procedure_id")).getId()); doc.setProcedureStableKey(procedureMap.get(r.getLong("procedure_id")).getStableKey()); doc.setProcedureName(procedureMap.get(r.getLong("procedure_id")).getName()); doc.setProcedureStableId(procedureMap.get(r.getLong("procedure_id")).getStableId()); doc.setParameterId(parameterMap.get(r.getLong("parameter_id")).getId()); doc.setParameterStableKey(parameterMap.get(r.getLong("parameter_id")).getStableKey()); doc.setParameterName(parameterMap.get(r.getLong("parameter_id")).getName()); doc.setParameterStableId(parameterMap.get(r.getLong("parameter_id")).getStableId()); // Create field that contains all possible MP terms (including intermediate and top level terms) // that this parameter can produce Set<String> mpIds = parameterMpTermMap.get(doc.getParameterStableId()); if (mpIds != null) { mpIds.forEach(mpId -> { OntologyTermDTO term = mpParser.getOntologyTerm(mpId); if (term !=null && term.getAccessionId() != null){ doc.addMpTermIdOptions(term.getAccessionId()); doc.addMpTermNameOptions(term.getName()); doc.addMpTermIdOptions(term.getIntermediateIds()); doc.addMpTermNameOptions(term.getIntermediateNames()); }else{ logger.debug("term is null in indexer for mpId"+mpId); } }); } else { String p = doc.getParameterStableId(); if ( ! alreadyReported.contains(p)) { alreadyReported.add(p); logger.debug(" Cannot find MP terms for parameter {}", p); } } } private void addBiologicalData(StatisticalResultDTO doc, Long biologicalModelId) { BiologicalDataBean b = biologicalDataMap.get(biologicalModelId); if (b == null) { logger.error(" Cannot find genomic information for biological_model_id {}", biologicalModelId); return; } doc.setMarkerAccessionId(b.geneAcc); doc.setMarkerSymbol(b.geneSymbol); doc.setAlleleAccessionId(b.alleleAccession); doc.setAlleleName(b.alleleName); doc.setAlleleSymbol(b.alleleSymbol); doc.setStrainAccessionId(b.strainAcc); doc.setStrainName(b.strainName); doc.setGeneticBackground(b.geneticBackground); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateBiologicalDataMap() throws SQLException { String query = "SELECT bm.id, " + "strain.acc AS strain_acc, strain.name AS strain_name, bm.genetic_background, " + "(SELECT DISTINCT allele_acc FROM biological_model_allele bma WHERE bma.biological_model_id=bm.id) AS allele_accession, " + "(SELECT DISTINCT a.symbol FROM biological_model_allele bma INNER JOIN allele a ON (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) AS allele_symbol, " + "(SELECT DISTINCT a.name FROM biological_model_allele bma INNER JOIN allele a ON (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) AS allele_name, " + "(SELECT DISTINCT gf_acc FROM biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) AS acc, " + "(SELECT DISTINCT gf.symbol FROM biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf ON gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) AS symbol " + "FROM biological_model bm " + "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bm.id " + "INNER JOIN strain ON strain.acc=bmstrain.strain_acc " + "WHERE exists(SELECT DISTINCT gf.symbol FROM biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf ON gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id)"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.alleleName = resultSet.getString("allele_name"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); b.geneticBackground = resultSet.getString("genetic_background"); biologicalDataMap.put(resultSet.getLong("id"), b); } } logger.info(" Mapped {} biological data entries", biologicalDataMap.size()); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateResourceDataMap() throws SQLException { String query = "SELECT id, name, short_name FROM external_db"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { ResourceBean b = new ResourceBean(); b.id = resultSet.getLong("id"); b.name = resultSet.getString("name"); b.shortName = resultSet.getString("short_name"); resourceMap.put(resultSet.getString("short_name"), b); } } logger.info(" Mapped {} resource data entries", resourceMap.size()); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateSexesMap() throws SQLException { List<String> queries = Arrays.asList( "SELECT CONCAT('unidimensional-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_unidimensional_results s INNER JOIN stat_result_phenotype_call_summary r ON r.unidimensional_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id", "SELECT CONCAT('categorical-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_categorical_results s INNER JOIN stat_result_phenotype_call_summary r ON r.categorical_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id", "SELECT CONCAT('rrplus-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_rrplus_results s INNER JOIN stat_result_phenotype_call_summary r ON r.rrplus_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id" ); for (String query : queries) { try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { List<String> sexes = new ArrayList<>( Arrays.asList( resultSet.getString("sexes") .replaceAll(" ", "") .split(","))); sexesMap.put(resultSet.getString("id"), sexes); } } } logger.info(" Mapped {} sexes data entries", sexesMap.size()); } /** * The embryo significance map keys are document IDs that should match the embryo documents and the key is the MP * acc */ public void populateEmbryoSignificanceMap() throws SQLException { // Generate MySQL REGEX string to include all embryo parameters Set<String> allEmbryoProcedures = new HashSet<>(); allEmbryoProcedures.addAll(EMBRYO_PROCEDURES_NO_VIA); allEmbryoProcedures.addAll(EMBRYO_PROCEDURES_VIA); String embryoProcedures = StringUtils.join(allEmbryoProcedures, "|"); // Populate the significant results map with this query String sigResultsQuery = "SELECT CONCAT(parameter.stable_id, '_', pcs.colony_id, pcs.organisation_id) AS doc_id, mp_acc " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id REGEXP '" + embryoProcedures + "' AND pcs.mp_acc IS NOT NULL"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(sigResultsQuery)) { ResultSet r = p.executeQuery(); while (r.next()) { String docId = r.getString("doc_id"); String mpAcc = r.getString("mp_acc"); embryoSignificantResults.put(docId, mpAcc); } } logger.info(" Mapped {} embryo significant data entries", embryoSignificantResults.size()); } void populateAdultLineLevelSignificanceMap() throws SQLException { class SignificantQuery { private String label; private String query; private Set<String> set; private SignificantQuery(String label, String query, Set<String> set) { this.label = label; this.query = query; this.set = set; } } List<SignificantQuery> sigQueries = new ArrayList<>(); String query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_VIA_001_001' "; sigQueries.add(new SignificantQuery("Viability", query, VIA_SIGNIFICANT)); query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_FER_001_001' "; sigQueries.add(new SignificantQuery("Male fertility", query, MALE_FER_SIGNIFICANT)); query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_FER_019_001' "; sigQueries.add(new SignificantQuery("Female fertility", query, FEMALE_FER_SIGNIFICANT)); for (SignificantQuery sq : sigQueries) { try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(sq.query)) { ResultSet r = p.executeQuery(); while (r.next()) { sq.set.add(r.getString("colony_id")); } } logger.info(" Mapped {} {} significant data entries", sq.set.size(), sq.label); } } public void populateParameterMpTermMap() throws SQLException { String query = "SELECT stable_id, ontology_acc FROM phenotype_parameter p " + "INNER JOIN phenotype_parameter_lnk_ontology_annotation l ON l.parameter_id=p.id " + "INNER JOIN phenotype_parameter_ontology_annotation o ON o.id=l.annotation_id WHERE ontology_acc like 'MP:%'" ; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { String parameter = resultSet.getString("stable_id"); String ontologyTerm = resultSet.getString("ontology_acc"); if( ! parameterMpTermMap.containsKey(parameter)) { parameterMpTermMap.put(parameter, new HashSet<>()); } parameterMpTermMap.get(parameter).add(ontologyTerm); } } logger.info(" Mapped {} parameterMpTerm data entries", parameterMpTermMap.size()); } static class ResourceBean { Long id; String name; String shortName; @Override public String toString() { return "ResourceBean{" + "id=" + id + ", name='" + name + '\'' + ", shortName='" + shortName + '\'' + '}'; } } /** * Internal class to act as Map value DTO for biological data */ private static class BiologicalDataBean { private String alleleAccession; private String alleleSymbol; private String alleleName; private String geneAcc; private String geneSymbol; private String strainAcc; private String strainName; private String geneticBackground; } public class CategoricalResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_CAT_', sr.id) AS doc_id, " + " 'categorical' AS data_type, sr.id AS db_id, control_id, " + " experimental_id, experimental_sex AS sex, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, male_controls, " + " male_mutants, female_controls, female_mutants, " + " metadata_group, statistical_method, workflow, status, " + " category_a, category_b, " + " SEX_SPECIFIC_STATS " + " p_value AS categorical_p_value, effect_size AS categorical_effect_size, " + " mp_acc, NULL AS male_mp_acc, NULL AS female_mp_acc, " + " db.short_name AS resource_name, db.name AS resource_fullname, db.id AS resource_id, " + " proj.name AS project_name, proj.id AS project_id, " + " org.name AS phenotyping_center, org.id AS phenotyping_center_id " + "FROM stats_categorical_results sr " + "INNER JOIN external_db db ON db.id=sr.external_db_id " + "INNER JOIN project proj ON proj.id=sr.project_id " + "INNER JOIN organisation org ON org.id=sr.organisation_id " + "WHERE dependent_variable NOT LIKE '%FER%' AND dependent_variable NOT LIKE '%VIA%'"; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); String additionalColumns = "male_p_value, male_effect_size, female_p_value, female_effect_size, classification_tag, "; try (Connection connection = komp2DataSource.getConnection()) { SqlUtils sqlUtils = new SqlUtils(); Boolean sexSpecificStats = sqlUtils.columnInSchemaMysql(connection, "stats_categorical_result", "male_p_value"); query = query.replaceAll("SEX_SPECIFIC_STATS", sexSpecificStats ? additionalColumns : ""); try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseCategoricalResult(r, sexSpecificStats); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); if (docs.size() % REPORT_INTERVAL == 0) { logger.info((SAVE?"":"Would have") + " Added {} categorical doucments", docs.size()); } } } catch (Exception e) { logger.warn(" Error occurred getting categorical results", e); } } catch (Exception e) { logger.warn(" Error occurred getting categorical results", e); } logger.info((SAVE?"":"Would have") + " Added {} categorical documents", docs.size()); return docs; } private StatisticalResultDTO parseCategoricalResult(ResultSet r, Boolean additionalColumns) throws SQLException { StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("categorical-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("categorical-" + doc.getDbId())); } doc.setSex(r.getString("sex")); doc.setpValue(r.getDouble("categorical_p_value")); doc.setEffectSize(r.getDouble("categorical_effect_size")); if (additionalColumns) { doc.setMaleKoEffectPValue(r.getDouble("male_p_value")); doc.setMaleKoParameterEstimate(r.getDouble("male_effect_size")); doc.setFemaleKoEffectPValue(r.getDouble("female_p_value")); doc.setFemaleKoParameterEstimate(r.getDouble("female_effect_size")); doc.setClassificationTag(r.getString("classification_tag")); } setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); Set<String> categories = new HashSet<>(); if (StringUtils.isNotEmpty(r.getString("category_a"))) { categories.addAll(Arrays.asList(r.getString("category_a").split("\\|"))); } if (StringUtils.isNotEmpty(r.getString("category_b"))) { categories.addAll(Arrays.asList(r.getString("category_b") .split("\\|"))); } doc.setCategories(new ArrayList<>(categories)); if (! doc.getStatus().equals("Success")) { doc.setpValue(1.0); doc.setEffectSize(0.0); } return doc; } } /** * Populate unidimensional statistic results */ public class UnidimensionalResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_CONT_', sr.id) as doc_id, " + " 'unidimensional' AS data_type, " + " sr.id AS db_id, control_id, experimental_id, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, " + " male_controls, male_mutants, female_controls, female_mutants, " + " male_control_mean, male_experimental_mean, female_control_mean, female_experimental_mean, " + " metadata_group, statistical_method, workflow, status, " + " batch_significance, " + " variance_significance, null_test_significance, genotype_parameter_estimate, " + " genotype_percentage_change, " + " genotype_stderr_estimate, genotype_effect_pvalue, gender_parameter_estimate, " + " gender_stderr_estimate, gender_effect_pvalue, weight_parameter_estimate, " + " weight_stderr_estimate, weight_effect_pvalue, gp1_genotype, " + " gp1_residuals_normality_test, gp2_genotype, gp2_residuals_normality_test, " + " blups_test, rotated_residuals_normality_test, intercept_estimate, " + " intercept_stderr_estimate, interaction_significance, interaction_effect_pvalue, " + " gender_female_ko_estimate, gender_female_ko_stderr_estimate, gender_female_ko_pvalue, " + " gender_male_ko_estimate, gender_male_ko_stderr_estimate, gender_male_ko_pvalue, " + " classification_tag, additional_information, " + " mp_acc, male_mp_acc, female_mp_acc, " + " db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + " proj.name as project_name, proj.id as project_id, " + " org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM stats_unidimensional_results sr " + "INNER JOIN external_db db on db.id=sr.external_db_id " + "INNER JOIN project proj on proj.id=sr.project_id " + "INNER JOIN organisation org on org.id=sr.organisation_id " + "WHERE dependent_variable NOT LIKE '%FER%' AND dependent_variable NOT LIKE '%VIA%'"; @Override public List<StatisticalResultDTO> call() { logger.info(" Starting unidimensional documents generation"); List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseUnidimensionalResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); if (docs.size()% REPORT_INTERVAL ==0) { logger.info((SAVE?"":"Would have") + " Added {} unidimensional doucments", docs.size()); } } } catch (Exception e) { logger.warn(" Error occurred getting unidimensional results", e); } logger.info((SAVE?"":"Would have") + " Added {} unidimensional documents", docs.size()); return docs; } private StatisticalResultDTO parseUnidimensionalResult(ResultSet r) throws SQLException { StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("unidimensional-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("unidimensional-" + doc.getDbId())); } // Index the mean fields doc.setMaleControlMean(r.getDouble("male_control_mean")); doc.setMaleMutantMean(r.getDouble("male_experimental_mean")); doc.setFemaleControlMean(r.getDouble("female_control_mean")); doc.setFemaleMutantMean(r.getDouble("female_experimental_mean")); doc.setNullTestPValue(nullCheckResult(r, "null_test_significance")); // If PhenStat did not run, then the result will have a NULL for the null_test_significance field // In that case, fall back to Wilcoxon test Double pv = doc.getNullTestPValue(); if (pv==null && doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Wilcoxon")) { // Wilcoxon test. Choose the most significant pvalue from the sexes pv = 1.0; double fPv = r.getDouble("gender_female_ko_pvalue"); if (!r.wasNull() && fPv < pv) { pv = fPv; } double mPv = r.getDouble("gender_male_ko_pvalue"); if (!r.wasNull() && mPv < pv) { pv = mPv; } } if ( ! doc.getStatus().equals("Success")) { pv = 1.0; } doc.setpValue(pv); setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); doc.setGroup1Genotype(r.getString("gp1_genotype")); doc.setGroup1ResidualsNormalityTest(nullCheckResult(r, "gp1_residuals_normality_test")); doc.setGroup2Genotype(r.getString("gp2_genotype")); doc.setGroup2ResidualsNormalityTest(nullCheckResult(r, "gp2_residuals_normality_test")); doc.setBatchSignificant(r.getBoolean("batch_significance")); doc.setVarianceSignificant(r.getBoolean("variance_significance")); doc.setInteractionSignificant(r.getBoolean("interaction_significance")); doc.setGenotypeEffectParameterEstimate(nullCheckResult(r, "genotype_parameter_estimate")); String percentageChange = r.getString("genotype_percentage_change"); if (!r.wasNull()) { Double femalePercentageChange = StatisticalResultService.getFemalePercentageChange(percentageChange); if (femalePercentageChange != null) { doc.setFemalePercentageChange(femalePercentageChange.toString() + "%"); } Double malePercentageChange = StatisticalResultService.getMalePercentageChange(percentageChange); if (malePercentageChange != null) { doc.setMalePercentageChange(malePercentageChange.toString() + "%"); } } doc.setGenotypeEffectStderrEstimate(nullCheckResult(r, "genotype_stderr_estimate")); doc.setGenotypeEffectPValue(nullCheckResult(r, "genotype_effect_pvalue")); doc.setSexEffectParameterEstimate(nullCheckResult(r, "gender_parameter_estimate")); doc.setSexEffectStderrEstimate(nullCheckResult(r, "gender_stderr_estimate")); doc.setSexEffectPValue(nullCheckResult(r, "gender_effect_pvalue")); doc.setWeightEffectParameterEstimate(nullCheckResult(r, "weight_parameter_estimate")); doc.setWeightEffectStderrEstimate(nullCheckResult(r, "weight_stderr_estimate")); doc.setWeightEffectPValue(nullCheckResult(r, "weight_effect_pvalue")); doc.setInterceptEstimate(nullCheckResult(r, "intercept_estimate")); doc.setInterceptEstimateStderrEstimate(nullCheckResult(r, "intercept_stderr_estimate")); doc.setInteractionEffectPValue(nullCheckResult(r, "interaction_effect_pvalue")); doc.setFemaleKoParameterEstimate(nullCheckResult(r, "gender_female_ko_estimate")); doc.setFemaleKoEffectStderrEstimate(nullCheckResult(r, "gender_female_ko_stderr_estimate")); doc.setFemaleKoEffectPValue(nullCheckResult(r, "gender_female_ko_pvalue")); doc.setMaleKoParameterEstimate(nullCheckResult(r, "gender_male_ko_estimate")); doc.setMaleKoEffectStderrEstimate(nullCheckResult(r, "gender_male_ko_stderr_estimate")); doc.setMaleKoEffectPValue(nullCheckResult(r, "gender_male_ko_pvalue")); doc.setBlupsTest(nullCheckResult(r, "blups_test")); doc.setRotatedResidualsTest(nullCheckResult(r, "rotated_residuals_normality_test")); doc.setClassificationTag(r.getString("classification_tag")); doc.setAdditionalInformation(r.getString("additional_information")); return doc; } } /** * Generate reference range plus statistic result DTOs */ public class ReferenceRangePlusResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_RR_', sr.id) as doc_id, " + " 'unidimensional-ReferenceRange' AS data_type, " + " sr.id AS db_id, control_id, experimental_id, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, " + " male_controls, male_mutants, female_controls, female_mutants, " + " male_control_mean, male_experimental_mean, female_control_mean, female_experimental_mean, " + " metadata_group, statistical_method, workflow, status, " + " genotype_parameter_estimate, genotype_effect_pvalue, " + " gp1_genotype, gp2_genotype, " + " gender_female_ko_estimate, gender_female_ko_pvalue, " + " gender_male_ko_estimate, gender_male_ko_pvalue, " + " classification_tag, additional_information, " + " mp_acc, male_mp_acc, female_mp_acc, " + " db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + " proj.name as project_name, proj.id as project_id, " + " org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM stats_rrplus_results sr " + "INNER JOIN external_db db on db.id=sr.external_db_id " + "INNER JOIN project proj on proj.id=sr.project_id " + "INNER JOIN organisation org on org.id=sr.organisation_id "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseReferenceRangeResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting RR plus results", e); } logger.info((SAVE?"":"Would have") + " Added {} RR plus documents", docs.size()); return docs; } private StatisticalResultDTO parseReferenceRangeResult(ResultSet r) throws SQLException { List<Double> mins = new ArrayList<>(); StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("rrplus-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("rrplus-" + doc.getDbId())); } // Index the mean fields doc.setMaleControlMean(r.getDouble("male_control_mean")); doc.setMaleMutantMean(r.getDouble("male_experimental_mean")); doc.setFemaleControlMean(r.getDouble("female_control_mean")); doc.setFemaleMutantMean(r.getDouble("female_experimental_mean")); doc.setGroup1Genotype(r.getString("gp1_genotype")); doc.setGroup2Genotype(r.getString("gp2_genotype")); // Set the overall genotype effect fields String genotypePvalue = r.getString("genotype_effect_pvalue"); if (! r.wasNull()) { String [] fields = genotypePvalue.split(","); // Low vs normal&high genotype pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setGenotypePvalueLowVsNormalHigh(pvalue); // High vs low&normal genotype pvalue pvalue = Double.parseDouble(fields[1]); doc.setGenotypePvalueLowNormalVsHigh(pvalue); doc.setNullTestPValue(Math.min(doc.getGenotypePvalueLowNormalVsHigh(), doc.getGenotypePvalueLowVsNormalHigh())); doc.setpValue(doc.getNullTestPValue()); mins.add(pvalue); String genotypeEffectSize = r.getString("genotype_parameter_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high genotype effect size double es = Double.parseDouble(fields[0]); doc.setGenotypeEffectSizeLowVsNormalHigh(es); // High vs low&normal genotype effect size es = Double.parseDouble(fields[1]); doc.setGenotypeEffectSizeLowNormalVsHigh(es); } } // Set the female female effect fields genotypePvalue = r.getString("gender_female_ko_pvalue"); if (! r.wasNull() && ! genotypePvalue.equals("NA")) { String [] fields = genotypePvalue.split(","); // Low vs normal&high female pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setFemalePvalueLowVsNormalHigh(pvalue); mins.add(pvalue); // High vs low&normal female pvalue pvalue = Double.parseDouble(fields[1]); doc.setFemalePvalueLowNormalVsHigh(pvalue); mins.add(pvalue); String genotypeEffectSize = r.getString("gender_female_ko_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high female effect size double es = Double.parseDouble(fields[0]); doc.setFemaleEffectSizeLowVsNormalHigh(es); // High vs low&normal female effect size es = Double.parseDouble(fields[1]); doc.setFemaleEffectSizeLowNormalVsHigh(es); } } // Set the male effect fields genotypePvalue = r.getString("gender_male_ko_pvalue"); if (! r.wasNull() && ! genotypePvalue.equals("NA")) { String [] fields = genotypePvalue.split(","); // Low vs normal&high male pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setMalePvalueLowVsNormalHigh(pvalue); mins.add(pvalue); // High vs low&normal male pvalue pvalue = Double.parseDouble(fields[1]); doc.setMalePvalueLowNormalVsHigh(pvalue); mins.add(pvalue); String genotypeEffectSize = r.getString("gender_male_ko_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high male effect size double es = Double.parseDouble(fields[0]); doc.setMaleEffectSizeLowVsNormalHigh(es); // High vs low&normal male effect size es = Double.parseDouble(fields[1]); doc.setMaleEffectSizeLowNormalVsHigh(es); } } Double minimumPvalue = Collections.min(mins); doc.setpValue(minimumPvalue); setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); // If not already set, ensure that the document has all possible top level MP terms defined if (doc.getTopLevelMpTermId() == null && mpParser.getOntologyTerm(doc.getMpTermId()) != null) { OntologyTermDTO term = mpParser.getOntologyTerm(doc.getMpTermId()); doc.addTopLevelMpTermIds(term.getTopLevelIds()); doc.addTopLevelMpTermNames(term.getTopLevelNames()); } if (! doc.getStatus().equals("Success")) { doc.setpValue(1.0); doc.setEffectSize(0.0); } doc.setClassificationTag(r.getString("classification_tag")); doc.setAdditionalInformation(r.getString("additional_information")); return doc; } } /** * If the result is significant (indicated by having a more significant p_value than pValueThreshold) * then if there has not been a previous result (sex specific or genotype effect) which is significant * then mark this as significant, otherwise, not. * * @param pValueThreshold The p value to indicate significance threshould * @param doc the solr document to update */ private void setSignificantFlag(Double pValueThreshold, StatisticalResultDTO doc) { doc.setSignificant(false); // do not override significant == true if (doc.getSignificant()!=null && doc.getSignificant()) { return; } if (doc.getNullTestPValue() != null) { // PhenStat result if (doc.getNullTestPValue() <= pValueThreshold) { doc.setSignificant(true); } else if (doc.getStatus().equals("Success") && doc.getSignificant() == null) { doc.setSignificant(false); } } else if (doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Wilcoxon")) { // Wilcoxon test. Choose the most significant pvalue from the sexes, already tcalculated and stored // in the Pvalue field of the doc if (doc.getpValue() <= pValueThreshold) { doc.setSignificant(true); } else { doc.setSignificant(false); } } else if (doc.getNullTestPValue() == null && doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Fisher")) { // Fisher's exact test. Choose the most significant pvalue from the sexes, already tcalculated and stored // in the Pvalue field of the doc if (doc.getpValue() <= pValueThreshold) { doc.setSignificant(true); } else { doc.setSignificant(false); } } } class FertilityResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(parameter.stable_id, '_', exp.id, '_', IF(sex IS NULL,'both',sex)) as doc_id, co.category, " + "'line' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, obs.parameter_id, exp.colony_id, sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "p_value as p_value, effect_size AS effect_size, " + "mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM phenotype_parameter parameter " + "INNER JOIN phenotype_procedure_parameter pproc ON pproc.parameter_id=parameter.id " + "INNER JOIN phenotype_procedure proc ON proc.id=pproc.procedure_id " + "INNER JOIN observation obs ON obs.parameter_stable_id=parameter.stable_id AND obs.parameter_stable_id IN ('IMPC_FER_001_001', 'IMPC_FER_019_001') " + "INNER JOIN categorical_observation co ON co.id=obs.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=obs.id " + "INNER JOIN experiment exp ON eo.experiment_id=exp.id " + "INNER JOIN external_db db ON db.id=obs.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "LEFT OUTER JOIN phenotype_call_summary sr ON (exp.colony_id=sr.colony_id AND sr.parameter_id=parameter.id) " + "WHERE parameter.stable_id IN ('IMPC_FER_001_001', 'IMPC_FER_019_001') AND exp.procedure_id=proc.id"; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { // Skip processing females for male infertility parameter if (r.getString("dependent_variable") != null && r.getString("dependent_variable").equals("IMPC_FER_001_001") && r.getString("sex") != null && r.getString("sex").equals("female")) { continue; } // Skip processing males for female infertility parameter if (r.getString("dependent_variable") != null && r.getString("dependent_variable").equals("IMPC_FER_019_001") && r.getString("sex") != null && r.getString("sex").equals("male")) { continue; } StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setCategories(Collections.singletonList(r.getString("category"))); r.getString("p_value"); if (r.wasNull()) { doc.setpValue(1.0); doc.setEffectSize(0.0); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting fertility results", e); } logger.info((SAVE?"":"Would have") + " Added {} fertility parameter documents", docs.size()); return docs; } } class ViabilityResults implements Callable<List<StatisticalResultDTO>> { // Populate viability results String query = "SELECT CONCAT(parameter.stable_id, '_', exp.id, '_', CASE WHEN sex IS NULL THEN 'na' ELSE sex END) as doc_id, co.category, " + "'line' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, obs.parameter_id, exp.colony_id, sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "p_value as p_value, effect_size AS effect_size, " + "mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id, " + "0 AS male_controls, " + "(SELECT uobs2.data_point " + " FROM observation obs2 " + " INNER JOIN unidimensional_observation uobs2 ON obs2.id=uobs2.id " + " INNER JOIN experiment_observation eo2 ON eo2.observation_id=obs2.id " + " INNER JOIN experiment exp2 ON eo2.experiment_id=exp2.id " + " WHERE exp2.colony_id=exp.colony_id AND obs2.parameter_stable_id='IMPC_VIA_010_001' limit 1) AS male_mutants, " + "0 AS female_controls, " + "(SELECT uobs2.data_point " + " FROM observation obs2 " + " INNER JOIN unidimensional_observation uobs2 ON obs2.id=uobs2.id " + " INNER JOIN experiment_observation eo2 ON eo2.observation_id=obs2.id " + " INNER JOIN experiment exp2 ON eo2.experiment_id=exp2.id " + " WHERE exp2.colony_id=exp.colony_id AND obs2.parameter_stable_id='IMPC_VIA_014_001' limit 1) AS female_mutants " + "FROM phenotype_parameter parameter " + "INNER JOIN phenotype_procedure_parameter pproc ON pproc.parameter_id=parameter.id " + "INNER JOIN phenotype_procedure proc ON proc.id=pproc.procedure_id " + "INNER JOIN observation obs ON obs.parameter_stable_id=parameter.stable_id AND obs.parameter_stable_id = 'IMPC_VIA_001_001' " + "INNER JOIN categorical_observation co ON co.id=obs.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=obs.id " + "INNER JOIN experiment exp ON eo.experiment_id=exp.id " + "INNER JOIN external_db db ON db.id=obs.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "LEFT OUTER JOIN phenotype_call_summary sr ON (exp.colony_id=sr.colony_id AND sr.parameter_id=parameter.id) " + "WHERE parameter.stable_id = 'IMPC_VIA_001_001' AND exp.procedure_id=proc.id" ; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setCategories(Collections.singletonList(r.getString("category"))); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting viability results", e); } logger.info((SAVE?"":"Would have") + " Added {} viability parameter documents", docs.size()); return docs; } } public class EmbryoResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT DISTINCT " + " CONCAT_WS('-', exp.procedure_stable_id, parameter.stable_id, ls.colony_id, bm.zygosity, sex, exp.organisation_id, exp.metadata_group) AS doc_id, " + " CONCAT(parameter.stable_id, '_', ls.colony_id, exp.organisation_id) AS significant_id, " + " 'embryo' AS data_type, 'Success' AS status, " + " exp.metadata_group, exp.pipeline_id, exp.procedure_id, obs.parameter_id, parameter.stable_id AS dependent_variable, " + " bm.id AS biological_model_id, bm.zygosity AS experimental_zygosity, ls.colony_id, sex, " + " NULL AS p_value, NULL AS effect_size, NULL AS mp_acc, NULL AS male_mp_acc, NULL AS female_mp_acc, " + " db.short_name AS resource_name, db.name AS resource_fullname, db.id AS db_id, db.id AS resource_id, db.id AS external_db_id, " + " proj.name AS project_name, proj.id AS project_id, " + " org.name AS phenotyping_center, org.id AS phenotyping_center_id " + "FROM observation obs INNER JOIN phenotype_parameter parameter ON parameter.id = obs.parameter_id " + " INNER JOIN live_sample ls ON ls.id = obs.biological_sample_id " + " INNER JOIN biological_sample bs ON bs.id = obs.biological_sample_id " + " INNER JOIN biological_model_sample bms ON bms.biological_sample_id = obs.biological_sample_id " + " INNER JOIN biological_model bm ON bm.id = bms.biological_model_id " + " INNER JOIN experiment_observation eo ON eo.observation_id = obs.id " + " INNER JOIN experiment exp ON exp.id = eo.experiment_id " + " INNER JOIN (SELECT id FROM phenotype_procedure WHERE stable_id REGEXP '" + StringUtils.join(EMBRYO_PROCEDURES_NO_VIA, "|") + "') B ON B.id = exp.procedure_id " + " INNER JOIN external_db db ON db.id = obs.db_id " + " INNER JOIN project proj ON proj.id = exp.project_id " + " INNER JOIN organisation org ON org.id = exp.organisation_id " + "WHERE bs.sample_group = 'experimental' "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); int i = 0; while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); doc.setDocId(doc.getDocId() + "-" + (i++)); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); if (embryoSignificantResults.containsKey(r.getString("significant_id"))) { addMpTermData(embryoSignificantResults.get(r.getString("significant_id")), doc); doc.setSignificant(true); } else { doc.setSignificant(false); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { e.printStackTrace(); logger.warn(" Error occurred getting embryo results", e); } logger.info(" Generated {} embryo parameter documents", docs.size()); return docs; } } public class EmbryoViabilityResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT co.category, " + " CONCAT(parameter.stable_id, '_', exp.id, '_embryo') as doc_id, " + " CONCAT(parameter.stable_id, '_', exp.colony_id, org.id) as significant_id, " + "'embryo' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, " + "parameter.id as parameter_id, exp.colony_id, null as sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "0.0 as p_value, 1.0 AS effect_size, " + "ontology_acc AS mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM phenotype_parameter parameter " + " INNER JOIN observation o ON o.parameter_stable_id=parameter.stable_id " + " INNER JOIN categorical_observation co ON co.id=o.id " + " INNER JOIN experiment_observation eo ON eo.observation_id=o.id " + " INNER JOIN experiment exp ON eo.experiment_id=exp.id " + " INNER JOIN biological_model bm ON bm.id=exp.biological_model_id " + " INNER JOIN external_db db ON db.id=o.db_id " + " INNER JOIN project proj ON proj.id=exp.project_id " + " INNER JOIN organisation org ON org.id=exp.organisation_id " + " LEFT OUTER JOIN ( " + " SELECT parameter_id, name, ontology_acc FROM phenotype_parameter_lnk_option lnkopt " + " INNER JOIN phenotype_parameter_option opt ON opt.id=lnkopt.option_id " + " INNER JOIN phenotype_parameter_ontology_annotation oa ON oa.option_id=opt.id " + " ) b ON b.parameter_id=parameter.id AND b.name=co.category " + "WHERE parameter.stable_id in ('" + StringUtils.join(EMBRYO_PROCEDURES_VIA, "','") + "') "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); if (embryoSignificantResults.containsKey(r.getString("significant_id"))) { addMpTermData(embryoSignificantResults.get(r.getString("significant_id")), doc); doc.setSignificant(true); } else { doc.setSignificant(false); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting embryo results", e); } logger.info( (SAVE?"":"Would have") + " Added {} embryo viability parameter documents", docs.size()); return docs; } } public class GrossPathologyResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT DISTINCT CONCAT(parameter.stable_id, '_', o.id, '_', term, '_', ls.sex, '_grosspath') as doc_id, " + "'adult-gross-path' AS data_type, db.id AS db_id, " + "ls.zygosity as experimental_zygosity, ls.id, bs.sample_group, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, " + "parameter.id as parameter_id, ls.colony_id, ls.sex as sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, bm.id AS biological_model_id, " + "null as p_value, null AS effect_size, " + "oe.term as mp_acc , null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM observation o " + "INNER JOIN ontology_entity oe on oe.ontology_observation_id=o.id " + "INNER JOIN biological_model_sample bms ON bms.biological_sample_id = o.biological_sample_id " + "INNER JOIN biological_model bm ON bms.biological_model_id = bm.id " + "INNER JOIN biological_sample bs ON bs.id = bms.biological_sample_id " + "INNER JOIN live_sample ls ON bms.biological_sample_id = ls.id " + "INNER JOIN experiment_observation eo ON eo.observation_id = o.id " + "INNER JOIN experiment exp ON exp.id = eo.experiment_id " + "INNER JOIN external_db db ON db.id=o.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "INNER JOIN phenotype_parameter parameter ON parameter.id = o.parameter_id " + "WHERE o.parameter_stable_id like '%PAT%' and term_value != 'normal' and term like 'MP%' AND bs.sample_group!='control' " ; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); int i = 0; while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); doc.setDocId(doc.getDocId()+"-"+(i++)); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setSignificant(true); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting gross pathology results", e); } logger.info((SAVE?"":"Would have") + " Added {} gross pathology parameter documents", docs.size()); return docs; } } public void setSAVE(Boolean SAVE) { this.SAVE = SAVE; } public Map<String, String> getEmbryoSignificantResults() { return embryoSignificantResults; } public static void main(String[] args) { ConfigurableApplicationContext context = new SpringApplicationBuilder(StatisticalResultsIndexer.class) .web(WebApplicationType.NONE) .bannerMode(Banner.Mode.OFF) .logStartupInfo(false) .run(args); context.close(); } }
indexers/src/main/java/org/mousephenotype/cda/indexers/StatisticalResultsIndexer.java
/******************************************************************************* * Copyright 2015 EMBL - European Bioinformatics Institute * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. *******************************************************************************/ package org.mousephenotype.cda.indexers; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; import org.mousephenotype.cda.constants.ParameterConstants; import org.mousephenotype.cda.db.pojo.OntologyTerm; import org.mousephenotype.cda.db.pojo.Parameter; import org.mousephenotype.cda.db.pojo.PhenotypeAnnotationType; import org.mousephenotype.cda.db.repositories.OntologyTermRepository; import org.mousephenotype.cda.db.repositories.ParameterRepository; import org.mousephenotype.cda.db.statistics.MpTermService; import org.mousephenotype.cda.db.statistics.ResultDTO; import org.mousephenotype.cda.db.utilities.SqlUtils; import org.mousephenotype.cda.enumerations.SexType; import org.mousephenotype.cda.enumerations.ZygosityType; import org.mousephenotype.cda.indexers.exceptions.IndexerException; import org.mousephenotype.cda.indexers.utils.IndexerMap; import org.mousephenotype.cda.owl.OntologyParser; import org.mousephenotype.cda.owl.OntologyParserFactory; import org.mousephenotype.cda.owl.OntologyTermDTO; import org.mousephenotype.cda.solr.service.GenotypePhenotypeService; import org.mousephenotype.cda.solr.service.StatisticalResultService; import org.mousephenotype.cda.solr.service.dto.ImpressBaseDTO; import org.mousephenotype.cda.solr.service.dto.ParameterDTO; import org.mousephenotype.cda.solr.service.dto.StatisticalResultDTO; import org.mousephenotype.cda.utilities.RunStatus; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.Banner; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.ConfigurableApplicationContext; import javax.inject.Inject; import javax.sql.DataSource; import javax.validation.constraints.NotNull; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; /** * Load documents into the statistical-results SOLR core */ @EnableAutoConfiguration public class StatisticalResultsIndexer extends AbstractIndexer implements CommandLineRunner { private final Logger logger = LoggerFactory.getLogger(StatisticalResultsIndexer.class); private Boolean SAVE = Boolean.TRUE; private Map<String, List<String>> impressAbnormals = new HashMap<>(); private Double SIGNIFICANCE_THRESHOLD = GenotypePhenotypeService.P_VALUE_THRESHOLD; private final double REPORT_INTERVAL = 100000; static final String RESOURCE_3I = "3i"; private final List<String> EMBRYO_PROCEDURES_NO_VIA = Arrays.asList("IMPC_GPL", "IMPC_GEL", "IMPC_GPM", "IMPC_GEM", "IMPC_GPO", "IMPC_GEO", "IMPC_GPP", "IMPC_GEP"); private final List<String> EMBRYO_PROCEDURES_VIA = Arrays.asList("IMPC_EVL_001_001", "IMPC_EVM_001_001", "IMPC_EVO_001_001", "IMPC_EVP_001_001"); private Map<Long, ImpressBaseDTO> pipelineMap = new HashMap<>(); private Map<Long, ImpressBaseDTO> procedureMap = new HashMap<>(); private Map<Long, ParameterDTO> parameterMap = new HashMap<>(); private Map<String, ResourceBean> resourceMap = new HashMap<>(); private Map<String, List<String>> sexesMap = new HashMap<>(); private Set<String> alreadyReported = new HashSet<>(); private Map<Long, BiologicalDataBean> biologicalDataMap = new HashMap<>(); private Map<String, Set<String>> parameterMpTermMap = new HashMap<>(); private Map<String, String> embryoSignificantResults = new HashMap<>(); private Set<String> VIA_SIGNIFICANT = new HashSet<>(); private Set<String> MALE_FER_SIGNIFICANT = new HashSet<>(); private Set<String> FEMALE_FER_SIGNIFICANT = new HashSet<>(); private List<String> shouldHaveAdded = new ArrayList<>(); private Set<String> uniqueSRKeys = new ConcurrentSkipListSet<>(); public void setPipelineMap(Map<Long, ImpressBaseDTO> pipelineMap) { this.pipelineMap = pipelineMap; } public void setProcedureMap(Map<Long, ImpressBaseDTO> procedureMap) { this.procedureMap = procedureMap; } public void setParameterMap(Map<Long, ParameterDTO> parameterMap) { this.parameterMap = parameterMap; } private OntologyParser mpParser; private OntologyParser mpMaParser; private OntologyParser maParser; private OntologyParserFactory ontologyParserFactory; private MpTermService mpTermService; private ParameterRepository parameterRepository; private SolrClient statisticalResultCore; protected StatisticalResultsIndexer() { } @Inject public StatisticalResultsIndexer( @NotNull DataSource komp2DataSource, @NotNull OntologyTermRepository ontologyTermRepository, @NotNull MpTermService mpTermService, @NotNull ParameterRepository parameterRepository, @NotNull SolrClient statisticalResultCore) { super(komp2DataSource, ontologyTermRepository); this.mpTermService = mpTermService; this.parameterRepository = parameterRepository; this.statisticalResultCore = statisticalResultCore; } public void setMpParser(OntologyParser mpParser) { this.mpParser = mpParser; } public void setMpMaParser(OntologyParser mpMaParser) { this.mpMaParser = mpMaParser; } public void setMaParser(OntologyParser maParser) { this.maParser = maParser; } public OntologyParserFactory getOntologyParserFactory() { return ontologyParserFactory; } public void setOntologyParserFactory(OntologyParserFactory ontologyParserFactory) { this.ontologyParserFactory = ontologyParserFactory; } @Override public RunStatus validateBuild() throws IndexerException { return super.validateBuild(statisticalResultCore); } @Override public RunStatus run() throws IndexerException, IOException { long start = System.currentTimeMillis(); RunStatus runStatus = new RunStatus(); try { Connection connection = komp2DataSource.getConnection(); synchronized(this) { ontologyParserFactory = new OntologyParserFactory(komp2DataSource, owlpath); mpParser = ontologyParserFactory.getMpParser(); mpMaParser = ontologyParserFactory.getMpMaParser(); maParser = ontologyParserFactory.getMaParser(); pipelineMap = IndexerMap.getImpressPipelines(connection); procedureMap = IndexerMap.getImpressProcedures(connection); parameterMap = IndexerMap.getImpressParameters(connection); } populateBiologicalDataMap(); populateResourceDataMap(); populateSexesMap(); populateParameterMpTermMap(); populateEmbryoSignificanceMap(); populateAdultLineLevelSignificanceMap(); } catch (SQLException | OWLOntologyCreationException | OWLOntologyStorageException e) { throw new IndexerException(e); } expectedDocumentCount = populateStatisticalResultsSolrCore(); logger.info((SAVE?"":"Would have") + " Added {} total beans in {}", expectedDocumentCount, commonUtils.msToHms(System.currentTimeMillis() - start)); return runStatus; } private int populateStatisticalResultsSolrCore() throws IndexerException { int count = 0; try { if (SAVE) statisticalResultCore.deleteByQuery("*:*"); if (SAVE) statisticalResultCore.commit(); List<Callable<List<StatisticalResultDTO>>> resultGenerators = Arrays.asList( getViabilityResults() , getFertilityResults() , getReferenceRangePlusResults() , getEmbryoViabilityResults() , getEmbryoResults() , getGrossPathologyResults() , getUnidimensionalResults() , getCategoricalResults() ); ExecutorService pool = Executors.newFixedThreadPool(4); List<Future<List<StatisticalResultDTO>>> producers = new ArrayList<>(); for (Callable<List<StatisticalResultDTO>> r : resultGenerators) { Future<List<StatisticalResultDTO>> future = pool.submit(r); producers.add(future); } AtomicInteger atomicInt = new AtomicInteger(0); for (Future<List<StatisticalResultDTO>> future : producers) { try { atomicInt.addAndGet(future.get().size()); } catch (ExecutionException | InterruptedException e) { e.printStackTrace(); } } // Stop threadpool pool.shutdown(); count = atomicInt.get(); if (SAVE) statisticalResultCore.commit(); checkSolrCount(count); logger.info((SAVE?"":"Would have") + " Added {} statistical result documents", count); } catch (IOException | SolrServerException e) { throw new IndexerException(e); } return count; } public ViabilityResults getViabilityResults() {return new ViabilityResults(); } public FertilityResults getFertilityResults() {return new FertilityResults(); } public ReferenceRangePlusResults getReferenceRangePlusResults() {return new ReferenceRangePlusResults(); } public UnidimensionalResults getUnidimensionalResults() {return new UnidimensionalResults(); } public CategoricalResults getCategoricalResults() {return new CategoricalResults(); } public EmbryoViabilityResults getEmbryoViabilityResults() {return new EmbryoViabilityResults(); } public EmbryoResults getEmbryoResults() {return new EmbryoResults(); } public GrossPathologyResults getGrossPathologyResults() {return new GrossPathologyResults(); } /** * Check to see if the count of documents we think have been added actually matches * the number of documents in solr * * @param documentsAddedCount The number of documents added */ private void checkSolrCount(Integer documentsAddedCount) throws SolrServerException, IOException { SolrQuery query = new SolrQuery(); query.setQuery("*:*").setRows(0); QueryResponse response = statisticalResultCore.query(query); Long solrDocumentCount = response.getResults().getNumFound(); logger.info(" Count of documents in solr: {}, count added by indexer: {}, Difference: {}", solrDocumentCount, documentsAddedCount, documentsAddedCount - solrDocumentCount); if (documentsAddedCount - solrDocumentCount > 0) { // The java Set.add() method returns false when attempting to add an element that already exists in // the set so the filter will remove all non-duplicate elements leaving only those document IDs that // have been added twice Set<String> uniques = new HashSet<>(); Set<String> diff = shouldHaveAdded .stream() .filter(e -> ! uniques.add(e)) .collect(Collectors.toSet()); logger.warn(" Should have added these {} doc IDs, but missing from solr {}", diff.size(), StringUtils.join(diff, ", ")); } } private Double nullCheckResult(ResultSet r, String field) throws SQLException { double v = r.getDouble(field); return r.wasNull() ? null : v; } private StatisticalResultDTO parseResultCommonFields(ResultSet r) throws SQLException { StatisticalResultDTO doc = new StatisticalResultDTO(); doc.setDocId(r.getString("doc_id")); doc.setDataType(r.getString("data_type")); // Experiment details // Use the procedure prefix to associated with the result to find the procedure prefix String procedurePrefix = StringUtils.join(Arrays.asList(procedureMap.get(r.getLong("procedure_id")).getStableId().split("_")).subList(0, 2), "_"); if (ParameterConstants.source3iProcedurePrefixes.contains(procedurePrefix)) { // Override the resource for the 3i procedures doc.setResourceId(resourceMap.get(RESOURCE_3I).id); doc.setResourceName(resourceMap.get(RESOURCE_3I).shortName); doc.setResourceFullname(resourceMap.get(RESOURCE_3I).name); } else { doc.setResourceId(r.getLong("resource_id")); doc.setResourceName(r.getString("resource_name")); doc.setResourceFullname(r.getString("resource_fullname")); } doc.setProjectId(r.getLong("project_id")); doc.setProjectName(r.getString("project_name")); doc.setPhenotypingCenter(r.getString("phenotyping_center")); doc.setControlBiologicalModelId(r.getLong("control_id")); doc.setMutantBiologicalModelId(r.getLong("experimental_id")); doc.setZygosity(r.getString("experimental_zygosity")); doc.setDependentVariable(r.getString("dependent_variable")); doc.setExternalDbId(r.getLong("external_db_id")); doc.setDbId(r.getLong("db_id")); doc.setOrganisationId(r.getLong("organisation_id")); doc.setPhenotypingCenterId(r.getLong("phenotyping_center_id")); doc.setControlSelectionMethod(r.getString("control_selection_strategy")); doc.setStatisticalMethod(r.getString("statistical_method")); doc.setWorkflow(r.getString("workflow")); doc.setMaleControlCount(r.getInt("male_controls")); doc.setFemaleControlCount(r.getInt("female_controls")); doc.setMaleMutantCount(r.getInt("male_mutants")); doc.setFemaleMutantCount(r.getInt("female_mutants")); doc.setColonyId(r.getString("colony_id")); doc.setStatus(r.getString("status")); if (doc.getPhenotypeSex() == null) { doc.setPhenotypeSex(new ArrayList<>()); } if (doc.getMaleMutantCount()>0) { doc.getPhenotypeSex().add(SexType.male.getName()); } if (doc.getFemaleMutantCount()>0) { doc.getPhenotypeSex().add(SexType.female.getName()); } // Always set a metadata group here to allow for simpler searching for // unique results and to maintain parity with the observation index // where "empty string" metadata group means no required metadata. if (StringUtils.isNotEmpty(r.getString("metadata_group"))) { doc.setMetadataGroup(r.getString("metadata_group")); } else { doc.setMetadataGroup(""); } addImpressData(r, doc); // Biological details addBiologicalData(doc, doc.getMutantBiologicalModelId()); final OntologyTerm lifeStage = getLifeStage(doc.getParameterStableId()); if (lifeStage != null) { doc.setLifeStageAcc(lifeStage.getId().getAccession()); doc.setLifeStageName(lifeStage.getName()); } else { logger.info(" Life stage is NULL for doc id " + doc.getDocId()); } // MP Terms must come after setting the stage as it's used for selecting MA or EMAPA addMpTermData(r, doc); return doc; } /** * parseLineResult changes a database result set for a line into a solr document * * @param r the result set * @return a solr document */ private StatisticalResultDTO parseLineResult(ResultSet r) throws SQLException { StatisticalResultDTO doc = new StatisticalResultDTO(); String docId = r.getString("doc_id"); if (docId == null) { docId = String.valueOf(Math.random()); } doc.setDocId(docId); doc.setDataType(r.getString("data_type")); doc.setResourceId(r.getLong("resource_id")); doc.setResourceName(r.getString("resource_name")); doc.setResourceFullname(r.getString("resource_fullname")); doc.setProjectId(r.getLong("project_id")); doc.setProjectName(r.getString("project_name")); doc.setPhenotypingCenter(r.getString("phenotyping_center")); doc.setMutantBiologicalModelId(r.getLong("biological_model_id")); doc.setZygosity(r.getString("experimental_zygosity")); doc.setDependentVariable(r.getString("dependent_variable")); doc.setExternalDbId(r.getLong("external_db_id")); doc.setDbId(r.getLong("db_id")); doc.setPhenotypingCenterId(r.getLong("phenotyping_center_id")); doc.setStatisticalMethod("Supplied as data"); doc.setColonyId(r.getString("colony_id")); doc.setStatus("Success"); // Need to set sgnificance if a phenotype association has been made for this data set doc.setSignificant(false); // Always set a metadata group here to allow for simpler searching for // unique results and to maintain parity with the observation index // where "empty string" metadata group means no required metadata. if (StringUtils.isNotEmpty(r.getString("metadata_group"))) { doc.setMetadataGroup(r.getString("metadata_group")); } else { doc.setMetadataGroup(""); } // Fertility results DO NOT contain the counts of controls/mutants switch (r.getString("dependent_variable")) { case "IMPC_VIA_001_001": doc.setMaleMutantCount(r.getInt("male_mutants")); doc.setFemaleMutantCount(r.getInt("female_mutants")); // Viability parameter significant for both sexes doc.setPhenotypeSex(Arrays.asList("female", "male")); if (VIA_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; case "IMPC_FER_001_001": // Fertility significant for Males doc.setPhenotypeSex(Collections.singletonList("male")); if (MALE_FER_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; case "IMPC_FER_019_001": // Fertility significant for females doc.setPhenotypeSex(Collections.singletonList("female")); if (FEMALE_FER_SIGNIFICANT.contains(doc.getColonyId())) { doc.setSignificant(true); } break; } // Impress pipeline data details addImpressData(r, doc); // Biological details addBiologicalData(doc, doc.getMutantBiologicalModelId()); OntologyTerm lifeStage = getLifeStage(doc.getParameterStableId()); if (lifeStage != null) { doc.setLifeStageAcc(lifeStage.getId().getAccession()); doc.setLifeStageName(lifeStage.getName()); } else { logger.info(" Line result stage is NULL for doc id " + doc.getDocId()); } // MP Term details addMpTermData(r, doc); try { String category = r.getString("category"); if (!r.wasNull() && category.equals("Insufficient numbers to make a call")) { doc.setStatus("Failed - " + category); } } catch (java.sql.SQLException e) { // do nothing. Result set did not have "category" in it } try { r.getString("experimental_zygosity"); if (r.wasNull()) { String category = r.getString("category"); if (!r.wasNull()) { String[] fields = category.split("-"); ZygosityType zygosity; switch (fields[0].trim().toLowerCase()) { case "heterozygous": zygosity = ZygosityType.heterozygote; break; case "hemizygous": zygosity = ZygosityType.hemizygote; break; case "homozygous": default: zygosity = ZygosityType.homozygote; break; } doc.setZygosity(zygosity.getName()); } } } catch (java.sql.SQLException e) { // do nothing. Result set did not have "category" in it } String sex = r.getString("sex"); if (!r.wasNull()) { doc.setSex(sex); // Do not attempt to add to the phenotye_sex fuield if it has been manually set as it is for // the viability and fertility parameters listed here if ( ! Arrays.asList("IMPC_VIA_001_001", "IMPC_FER_001_001", "IMPC_FER_019_001").contains(doc.getDependentVariable())) { // Add the sex to the phenotype_sexes field if (doc.getPhenotypeSex() == null) { doc.setPhenotypeSex(new ArrayList<>()); } if (!doc.getPhenotypeSex().contains(sex)) { doc.getPhenotypeSex().add(sex); } } } Double p_value = r.getDouble("p_value"); if (!r.wasNull() && doc.getMpTermId()!=null) { doc.setpValue(p_value); } Double effect_size = r.getDouble("effect_size"); if (!r.wasNull() && doc.getMpTermId()!=null) { doc.setEffectSize(effect_size); } return doc; } /** * Add the appropriate MP term associations to the document * This is only used for the embryo data for the moment (2016-04-07) * * @param mpId the mp term accession id * @param doc the solr document to update */ private void addMpTermData(String mpId, StatisticalResultDTO doc) { // Add the appropriate fields for the global MP term if (mpId != null) { OntologyTermDTO mpTerm = mpParser.getOntologyTerm(mpId); if (mpTerm != null) { doc.setMpTermId(mpTerm.getAccessionId()); doc.setMpTermName(mpTerm.getName()); // if the mpId itself is a top level, add itself as a top level if (mpTerm.getTopLevelIds() == null ){ // if the mpId itself is a top level, add itself as a top level doc.addTopLevelMpTermId(mpTerm.getAccessionId()); doc.addTopLevelMpTermName(mpTerm.getName()); } else { doc.addTopLevelMpTermId(mpTerm.getTopLevelIds()); doc.addTopLevelMpTermName(mpTerm.getTopLevelNames()); } doc.addIntermediateMpTermId(mpTerm.getIntermediateIds()); doc.addIntermediateMpTermName(mpTerm.getIntermediateNames()); addAnatomyMapping(doc, mpTerm); } } } private void addAnatomyMapping(StatisticalResultDTO doc, OntologyTermDTO mpTerm){ // mp-anatomy mappings (all MA at the moment) // For all non-embryo life stages indicated by not containing a digit if (doc.getLifeStageAcc() != null && ! doc.getLifeStageAcc().matches("[0-9]")) { Set<String> referencedClasses = mpMaParser.getReferencedClasses(doc.getMpTermId(), OntologyParserFactory.VIA_PROPERTIES, "MA"); if (referencedClasses != null && referencedClasses.size() > 0) { for (String id : referencedClasses) { OntologyTermDTO maTerm = maParser.getOntologyTerm(id); if (maTerm != null) { doc.addAnatomyTermId(id); doc.addAnatomyTermName(maTerm.getName()); if (maTerm.getIntermediateIds() != null) { doc.addIntermediateAnatomyTermId(maTerm.getIntermediateIds()); doc.addIntermediateAnatomyTermName(maTerm.getIntermediateNames()); } if (maTerm.getTopLevelIds() != null) { doc.addTopLevelAnatomyTermId(maTerm.getTopLevelIds()); doc.addTopLevelAnatomyTermName(maTerm.getTopLevelNames()); } }else{ logger.info("MA term is null for id:"+doc.getMpTermId()); } } } // Also check mappings up the tree, as a leaf term might not have a // mapping, but the parents might. Set<String> anatomyIdsForAncestors = new HashSet<>(); for (String mpAncestorId : mpTerm.getIntermediateIds()) { if (mpMaParser.getReferencedClasses(mpAncestorId, OntologyParserFactory.VIA_PROPERTIES, "MA") != null) { anatomyIdsForAncestors.addAll( mpMaParser.getReferencedClasses(mpAncestorId, OntologyParserFactory.VIA_PROPERTIES, "MA")); } } for (String id : anatomyIdsForAncestors) { OntologyTermDTO maTerm = maParser.getOntologyTerm(id); if (maTerm != null) { doc.addIntermediateAnatomyTermId(id); doc.addIntermediateAnatomyTermName(maTerm.getName()); if (maTerm.getIntermediateIds() != null) { doc.addIntermediateAnatomyTermId(maTerm.getIntermediateIds()); doc.addIntermediateAnatomyTermName(maTerm.getIntermediateNames()); } if (maTerm.getTopLevelIds() != null) { doc.addTopLevelAnatomyTermId(maTerm.getTopLevelIds()); doc.addTopLevelAnatomyTermName(maTerm.getTopLevelNames()); } }else{ logger.info("maTerm is null when looking for anatomyIdsForAncestors id:"+id); } } } } String getResult(StatisticalResultDTO doc, ResultSet resultSet) { String mpTerm = null; ResultDTO result = new ResultDTO(); try { result.setPipelineId(resultSet.getLong("pipeline_id")); result.setProcedureId(resultSet.getLong("procedure_id")); result.setParameterId(resultSet.getLong("parameter_id")); result.setParameterStableId(resultSet.getString("dependent_variable")); result.setNullTestPvalue(resultSet.getDouble("categorical_p_value")); result.setGenotypeEffectSize(resultSet.getDouble("categorical_effect_size")); try { result.setSex(SexType.valueOf(resultSet.getString("sex"))); } catch (Exception e) { result.setSex(null); } SqlUtils sqlUtils = new SqlUtils(); Boolean additionalColumns; try (Connection conn = komp2DataSource.getConnection()) { additionalColumns = sqlUtils.columnInSchemaMysql(conn, "stats_categorical_result", "male_p_value"); } if (additionalColumns) { result.setMalePvalue(resultSet.getDouble("male_p_value")); result.setMaleEffectSize(resultSet.getDouble("male_effect_size")); result.setFemalePvalue(resultSet.getDouble("female_p_value")); result.setFemaleEffectSize(resultSet.getDouble("female_effect_size")); } result.setCategoryA(resultSet.getString("category_a")); result.setCategoryB(resultSet.getString("category_b")); if (result.getCategoryA() == null) { result.setCategoryA("abnormal"); result.setCategoryB("normal"); } try (Connection connection = komp2DataSource.getConnection()) { OntologyTerm term = mpTermService.getMPTerm( doc.getParameterStableId(), result, doc.getSex() == null ? null : SexType.valueOf(doc.getSex()), connection, 0.0001f, Boolean.TRUE); if (term != null) { mpTerm = term.getId().getAccession(); } } } catch (SQLException e) { e.printStackTrace(); } return mpTerm; } /** * Add the appropriate MP term associations to the document * * @param r the result set to pull the relevant fields from * @param doc the solr document to update * @throws SQLException if the query fields do not exist */ private void addMpTermData(ResultSet r, StatisticalResultDTO doc) throws SQLException { String mpTerm = r.getString("mp_acc"); // For reference range plus results only, test that the MP term has been set, if not, try to set the abnormal termif (doc.getStatisticalMethod() != null && doc.getStatisticalMethod().equals("Reference Ranges Plus framework")) { // Sometimes, the stats result generator doesn't set the MP term (also not for either sex), in that case, // try to set the abnormal term for the parameter if (r.wasNull()) { // If there is a not male MP term set r.getString("male_mp_acc"); if (r.wasNull()) { // And, If there is a not female MP term set r.getString("female_mp_acc"); if (r.wasNull()) { // Lookup and cache the impress object corresponding to the parameter in question if (!impressAbnormals.containsKey(doc.getParameterStableId())) { Parameter parameter = parameterRepository.getFirstByStableId(doc.getParameterStableId()); List<String> abnormalMpIds = parameter.getAnnotations() .stream() .filter(x -> x.getType().equals(PhenotypeAnnotationType.abnormal)) .map(x -> x.getOntologyTerm().getId().getAccession()) .collect(Collectors.toList()); impressAbnormals.put(doc.getParameterStableId(), abnormalMpIds); } // Get the first abnormal term ID as that is likely the real "abnormal" term if (impressAbnormals.containsKey(doc.getParameterStableId())) { List<String> abnormals = impressAbnormals.get(doc.getParameterStableId()); if (CollectionUtils.isNotEmpty(abnormals)) { mpTerm = abnormals.get(0); } } } } } // If after all that the mp_term is still null, this is probably a poorly loaded statistical result // Try to determine the correct MP term by parsing the result again if (mpTerm == null && doc.getDataType().contains("categorical")) { mpTerm = getResult(doc, r); } // Add the appropriate fields for the global MP term if (mpTerm != null) { addMpTermData(mpTerm, doc); } // Process the male MP term mpTerm = r.getString("male_mp_acc"); if (!r.wasNull()) { OntologyTermDTO term = mpParser.getOntologyTerm(mpTerm); if (term != null) { doc.setMaleMpTermId(term.getAccessionId()); doc.setMaleMpTermName(term.getName()); doc.addMaleTopLevelMpTermId(term.getTopLevelIds()); doc.addMaleTopLevelMpTermName(term.getTopLevelNames()); doc.addMaleIntermediateMpTermId(term.getIntermediateIds()); doc.addMaleIntermediateMpTermName(term.getIntermediateNames()); } } // Process the female MP term mpTerm = r.getString("female_mp_acc"); if (!r.wasNull()) { OntologyTermDTO term = mpParser.getOntologyTerm(mpTerm); if (term != null) { doc.setFemaleMpTermId(term.getAccessionId()); doc.setFemaleMpTermName(term.getName()); doc.addFemaleTopLevelMpTermId(term.getTopLevelIds()); doc.addFemaleTopLevelMpTermName(term.getTopLevelNames()); doc.addFemaleIntermediateMpTermId(term.getIntermediateIds()); doc.addFemaleIntermediateMpTermName(term.getIntermediateNames()); } } } private void addImpressData(ResultSet r, StatisticalResultDTO doc) throws SQLException { doc.setPipelineId(pipelineMap.get(r.getLong("pipeline_id")).getId()); doc.setPipelineStableKey(pipelineMap.get(r.getLong("pipeline_id")).getStableKey()); doc.setPipelineName(pipelineMap.get(r.getLong("pipeline_id")).getName()); doc.setPipelineStableId(pipelineMap.get(r.getLong("pipeline_id")).getStableId()); doc.setProcedureId(procedureMap.get(r.getLong("procedure_id")).getId()); doc.setProcedureStableKey(procedureMap.get(r.getLong("procedure_id")).getStableKey()); doc.setProcedureName(procedureMap.get(r.getLong("procedure_id")).getName()); doc.setProcedureStableId(procedureMap.get(r.getLong("procedure_id")).getStableId()); doc.setParameterId(parameterMap.get(r.getLong("parameter_id")).getId()); doc.setParameterStableKey(parameterMap.get(r.getLong("parameter_id")).getStableKey()); doc.setParameterName(parameterMap.get(r.getLong("parameter_id")).getName()); doc.setParameterStableId(parameterMap.get(r.getLong("parameter_id")).getStableId()); // Create field that contains all possible MP terms (including intermediate and top level terms) // that this parameter can produce Set<String> mpIds = parameterMpTermMap.get(doc.getParameterStableId()); if (mpIds != null) { mpIds.forEach(mpId -> { OntologyTermDTO term = mpParser.getOntologyTerm(mpId); if (term !=null && term.getAccessionId() != null){ doc.addMpTermIdOptions(term.getAccessionId()); doc.addMpTermNameOptions(term.getName()); doc.addMpTermIdOptions(term.getIntermediateIds()); doc.addMpTermNameOptions(term.getIntermediateNames()); }else{ logger.debug("term is null in indexer for mpId"+mpId); } }); } else { String p = doc.getParameterStableId(); if ( ! alreadyReported.contains(p)) { alreadyReported.add(p); logger.debug(" Cannot find MP terms for parameter {}", p); } } } private void addBiologicalData(StatisticalResultDTO doc, Long biologicalModelId) { BiologicalDataBean b = biologicalDataMap.get(biologicalModelId); if (b == null) { logger.error(" Cannot find genomic information for biological_model_id {}", biologicalModelId); return; } doc.setMarkerAccessionId(b.geneAcc); doc.setMarkerSymbol(b.geneSymbol); doc.setAlleleAccessionId(b.alleleAccession); doc.setAlleleName(b.alleleName); doc.setAlleleSymbol(b.alleleSymbol); doc.setStrainAccessionId(b.strainAcc); doc.setStrainName(b.strainName); doc.setGeneticBackground(b.geneticBackground); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateBiologicalDataMap() throws SQLException { String query = "SELECT bm.id, " + "strain.acc AS strain_acc, strain.name AS strain_name, bm.genetic_background, " + "(SELECT DISTINCT allele_acc FROM biological_model_allele bma WHERE bma.biological_model_id=bm.id) AS allele_accession, " + "(SELECT DISTINCT a.symbol FROM biological_model_allele bma INNER JOIN allele a ON (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) AS allele_symbol, " + "(SELECT DISTINCT a.name FROM biological_model_allele bma INNER JOIN allele a ON (a.acc=bma.allele_acc AND a.db_id=bma.allele_db_id) WHERE bma.biological_model_id=bm.id) AS allele_name, " + "(SELECT DISTINCT gf_acc FROM biological_model_genomic_feature bmgf WHERE bmgf.biological_model_id=bm.id) AS acc, " + "(SELECT DISTINCT gf.symbol FROM biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf ON gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id) AS symbol " + "FROM biological_model bm " + "INNER JOIN biological_model_strain bmstrain ON bmstrain.biological_model_id=bm.id " + "INNER JOIN strain ON strain.acc=bmstrain.strain_acc " + "WHERE exists(SELECT DISTINCT gf.symbol FROM biological_model_genomic_feature bmgf INNER JOIN genomic_feature gf ON gf.acc=bmgf.gf_acc WHERE bmgf.biological_model_id=bm.id)"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { BiologicalDataBean b = new BiologicalDataBean(); b.alleleAccession = resultSet.getString("allele_accession"); b.alleleSymbol = resultSet.getString("allele_symbol"); b.alleleName = resultSet.getString("allele_name"); b.geneAcc = resultSet.getString("acc"); b.geneSymbol = resultSet.getString("symbol"); b.strainAcc = resultSet.getString("strain_acc"); b.strainName = resultSet.getString("strain_name"); b.geneticBackground = resultSet.getString("genetic_background"); biologicalDataMap.put(resultSet.getLong("id"), b); } } logger.info(" Mapped {} biological data entries", biologicalDataMap.size()); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateResourceDataMap() throws SQLException { String query = "SELECT id, name, short_name FROM external_db"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { ResourceBean b = new ResourceBean(); b.id = resultSet.getLong("id"); b.name = resultSet.getString("name"); b.shortName = resultSet.getString("short_name"); resourceMap.put(resultSet.getString("short_name"), b); } } logger.info(" Mapped {} resource data entries", resourceMap.size()); } /** * Add all the relevant data required quickly looking up biological data * associated to a biological sample * * @throws SQLException when a database exception occurs */ public void populateSexesMap() throws SQLException { List<String> queries = Arrays.asList( "SELECT CONCAT('unidimensional-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_unidimensional_results s INNER JOIN stat_result_phenotype_call_summary r ON r.unidimensional_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id", "SELECT CONCAT('categorical-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_categorical_results s INNER JOIN stat_result_phenotype_call_summary r ON r.categorical_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id", "SELECT CONCAT('rrplus-', s.id) AS id, GROUP_CONCAT(distinct p.sex) as sexes FROM stats_rrplus_results s INNER JOIN stat_result_phenotype_call_summary r ON r.rrplus_result_id=s.id INNER JOIN phenotype_call_summary p ON p.id=r.phenotype_call_summary_id GROUP BY s.id" ); for (String query : queries) { try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { List<String> sexes = new ArrayList<>( Arrays.asList( resultSet.getString("sexes") .replaceAll(" ", "") .split(","))); sexesMap.put(resultSet.getString("id"), sexes); } } } logger.info(" Mapped {} sexes data entries", sexesMap.size()); } /** * The embryo significance map keys are document IDs that should match the embryo documents and the key is the MP * acc */ public void populateEmbryoSignificanceMap() throws SQLException { // Generate MySQL REGEX string to include all embryo parameters Set<String> allEmbryoProcedures = new HashSet<>(); allEmbryoProcedures.addAll(EMBRYO_PROCEDURES_NO_VIA); allEmbryoProcedures.addAll(EMBRYO_PROCEDURES_VIA); String embryoProcedures = StringUtils.join(allEmbryoProcedures, "|"); // Populate the significant results map with this query String sigResultsQuery = "SELECT CONCAT(parameter.stable_id, '_', pcs.colony_id, pcs.organisation_id) AS doc_id, mp_acc " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id REGEXP '" + embryoProcedures + "' AND pcs.mp_acc IS NOT NULL"; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(sigResultsQuery)) { ResultSet r = p.executeQuery(); while (r.next()) { String docId = r.getString("doc_id"); String mpAcc = r.getString("mp_acc"); embryoSignificantResults.put(docId, mpAcc); } } logger.info(" Mapped {} embryo significant data entries", embryoSignificantResults.size()); } void populateAdultLineLevelSignificanceMap() throws SQLException { class SignificantQuery { private String label; private String query; private Set<String> set; private SignificantQuery(String label, String query, Set<String> set) { this.label = label; this.query = query; this.set = set; } } List<SignificantQuery> sigQueries = new ArrayList<>(); String query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_VIA_001_001' "; sigQueries.add(new SignificantQuery("Viability", query, VIA_SIGNIFICANT)); query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_FER_001_001' "; sigQueries.add(new SignificantQuery("Male fertility", query, MALE_FER_SIGNIFICANT)); query = "SELECT colony_id " + "FROM phenotype_call_summary pcs " + "INNER JOIN phenotype_parameter parameter ON parameter.id = pcs.parameter_id " + "WHERE parameter.stable_id = 'IMPC_FER_019_001' "; sigQueries.add(new SignificantQuery("Female fertility", query, FEMALE_FER_SIGNIFICANT)); for (SignificantQuery sq : sigQueries) { try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(sq.query)) { ResultSet r = p.executeQuery(); while (r.next()) { sq.set.add(r.getString("colony_id")); } } logger.info(" Mapped {} {} significant data entries", sq.set.size(), sq.label); } } public void populateParameterMpTermMap() throws SQLException { String query = "SELECT stable_id, ontology_acc FROM phenotype_parameter p " + "INNER JOIN phenotype_parameter_lnk_ontology_annotation l ON l.parameter_id=p.id " + "INNER JOIN phenotype_parameter_ontology_annotation o ON o.id=l.annotation_id WHERE ontology_acc like 'MP:%'" ; try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet resultSet = p.executeQuery(); while (resultSet.next()) { String parameter = resultSet.getString("stable_id"); String ontologyTerm = resultSet.getString("ontology_acc"); if( ! parameterMpTermMap.containsKey(parameter)) { parameterMpTermMap.put(parameter, new HashSet<>()); } parameterMpTermMap.get(parameter).add(ontologyTerm); } } logger.info(" Mapped {} parameterMpTerm data entries", parameterMpTermMap.size()); } static class ResourceBean { Long id; String name; String shortName; @Override public String toString() { return "ResourceBean{" + "id=" + id + ", name='" + name + '\'' + ", shortName='" + shortName + '\'' + '}'; } } /** * Internal class to act as Map value DTO for biological data */ private static class BiologicalDataBean { private String alleleAccession; private String alleleSymbol; private String alleleName; private String geneAcc; private String geneSymbol; private String strainAcc; private String strainName; private String geneticBackground; } public class CategoricalResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_CAT_', sr.id) AS doc_id, " + " 'categorical' AS data_type, sr.id AS db_id, control_id, " + " experimental_id, experimental_sex AS sex, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, male_controls, " + " male_mutants, female_controls, female_mutants, " + " metadata_group, statistical_method, workflow, status, " + " category_a, category_b, " + " SEX_SPECIFIC_STATS " + " p_value AS categorical_p_value, effect_size AS categorical_effect_size, " + " mp_acc, NULL AS male_mp_acc, NULL AS female_mp_acc, " + " db.short_name AS resource_name, db.name AS resource_fullname, db.id AS resource_id, " + " proj.name AS project_name, proj.id AS project_id, " + " org.name AS phenotyping_center, org.id AS phenotyping_center_id " + "FROM stats_categorical_results sr " + "INNER JOIN external_db db ON db.id=sr.external_db_id " + "INNER JOIN project proj ON proj.id=sr.project_id " + "INNER JOIN organisation org ON org.id=sr.organisation_id " + "WHERE dependent_variable NOT LIKE '%FER%' AND dependent_variable NOT LIKE '%VIA%'"; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); String additionalColumns = "male_p_value, male_effect_size, female_p_value, female_effect_size, classification_tag, "; try (Connection connection = komp2DataSource.getConnection()) { SqlUtils sqlUtils = new SqlUtils(); Boolean sexSpecificStats = sqlUtils.columnInSchemaMysql(connection, "stats_categorical_result", "male_p_value"); query = query.replaceAll("SEX_SPECIFIC_STATS", sexSpecificStats ? additionalColumns : ""); try (PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseCategoricalResult(r, sexSpecificStats); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); if (docs.size() % REPORT_INTERVAL == 0) { logger.info((SAVE?"":"Would have") + " Added {} categorical doucments", docs.size()); } } } catch (Exception e) { logger.warn(" Error occurred getting categorical results", e); } } catch (Exception e) { logger.warn(" Error occurred getting categorical results", e); } logger.info((SAVE?"":"Would have") + " Added {} categorical documents", docs.size()); return docs; } private StatisticalResultDTO parseCategoricalResult(ResultSet r, Boolean additionalColumns) throws SQLException { StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("categorical-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("categorical-" + doc.getDbId())); } doc.setSex(r.getString("sex")); doc.setpValue(r.getDouble("categorical_p_value")); doc.setEffectSize(r.getDouble("categorical_effect_size")); if (additionalColumns) { doc.setMaleKoEffectPValue(r.getDouble("male_p_value")); doc.setMaleKoParameterEstimate(r.getDouble("male_effect_size")); doc.setFemaleKoEffectPValue(r.getDouble("female_p_value")); doc.setFemaleKoParameterEstimate(r.getDouble("female_effect_size")); doc.setClassificationTag(r.getString("classification_tag")); } setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); Set<String> categories = new HashSet<>(); if (StringUtils.isNotEmpty(r.getString("category_a"))) { categories.addAll(Arrays.asList(r.getString("category_a").split("\\|"))); } if (StringUtils.isNotEmpty(r.getString("category_b"))) { categories.addAll(Arrays.asList(r.getString("category_b") .split("\\|"))); } doc.setCategories(new ArrayList<>(categories)); if (! doc.getStatus().equals("Success")) { doc.setpValue(1.0); doc.setEffectSize(0.0); } return doc; } } /** * Populate unidimensional statistic results */ public class UnidimensionalResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_CONT_', sr.id) as doc_id, " + " 'unidimensional' AS data_type, " + " sr.id AS db_id, control_id, experimental_id, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, " + " male_controls, male_mutants, female_controls, female_mutants, " + " male_control_mean, male_experimental_mean, female_control_mean, female_experimental_mean, " + " metadata_group, statistical_method, workflow, status, " + " batch_significance, " + " variance_significance, null_test_significance, genotype_parameter_estimate, " + " genotype_percentage_change, " + " genotype_stderr_estimate, genotype_effect_pvalue, gender_parameter_estimate, " + " gender_stderr_estimate, gender_effect_pvalue, weight_parameter_estimate, " + " weight_stderr_estimate, weight_effect_pvalue, gp1_genotype, " + " gp1_residuals_normality_test, gp2_genotype, gp2_residuals_normality_test, " + " blups_test, rotated_residuals_normality_test, intercept_estimate, " + " intercept_stderr_estimate, interaction_significance, interaction_effect_pvalue, " + " gender_female_ko_estimate, gender_female_ko_stderr_estimate, gender_female_ko_pvalue, " + " gender_male_ko_estimate, gender_male_ko_stderr_estimate, gender_male_ko_pvalue, " + " classification_tag, additional_information, " + " mp_acc, male_mp_acc, female_mp_acc, " + " db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + " proj.name as project_name, proj.id as project_id, " + " org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM stats_unidimensional_results sr " + "INNER JOIN external_db db on db.id=sr.external_db_id " + "INNER JOIN project proj on proj.id=sr.project_id " + "INNER JOIN organisation org on org.id=sr.organisation_id " + "WHERE dependent_variable NOT LIKE '%FER%' AND dependent_variable NOT LIKE '%VIA%'"; @Override public List<StatisticalResultDTO> call() { logger.info(" Starting unidimensional documents generation"); List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseUnidimensionalResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); if (docs.size()% REPORT_INTERVAL ==0) { logger.info((SAVE?"":"Would have") + " Added {} unidimensional doucments", docs.size()); } } } catch (Exception e) { logger.warn(" Error occurred getting unidimensional results", e); } logger.info((SAVE?"":"Would have") + " Added {} unidimensional documents", docs.size()); return docs; } private StatisticalResultDTO parseUnidimensionalResult(ResultSet r) throws SQLException { StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("unidimensional-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("unidimensional-" + doc.getDbId())); } // Index the mean fields doc.setMaleControlMean(r.getDouble("male_control_mean")); doc.setMaleMutantMean(r.getDouble("male_experimental_mean")); doc.setFemaleControlMean(r.getDouble("female_control_mean")); doc.setFemaleMutantMean(r.getDouble("female_experimental_mean")); doc.setNullTestPValue(nullCheckResult(r, "null_test_significance")); // If PhenStat did not run, then the result will have a NULL for the null_test_significance field // In that case, fall back to Wilcoxon test Double pv = doc.getNullTestPValue(); if (pv==null && doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Wilcoxon")) { // Wilcoxon test. Choose the most significant pvalue from the sexes pv = 1.0; double fPv = r.getDouble("gender_female_ko_pvalue"); if (!r.wasNull() && fPv < pv) { pv = fPv; } double mPv = r.getDouble("gender_male_ko_pvalue"); if (!r.wasNull() && mPv < pv) { pv = mPv; } } if ( ! doc.getStatus().equals("Success")) { pv = 1.0; } doc.setpValue(pv); setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); doc.setGroup1Genotype(r.getString("gp1_genotype")); doc.setGroup1ResidualsNormalityTest(nullCheckResult(r, "gp1_residuals_normality_test")); doc.setGroup2Genotype(r.getString("gp2_genotype")); doc.setGroup2ResidualsNormalityTest(nullCheckResult(r, "gp2_residuals_normality_test")); doc.setBatchSignificant(r.getBoolean("batch_significance")); doc.setVarianceSignificant(r.getBoolean("variance_significance")); doc.setInteractionSignificant(r.getBoolean("interaction_significance")); doc.setGenotypeEffectParameterEstimate(nullCheckResult(r, "genotype_parameter_estimate")); String percentageChange = r.getString("genotype_percentage_change"); if (!r.wasNull()) { Double femalePercentageChange = StatisticalResultService.getFemalePercentageChange(percentageChange); if (femalePercentageChange != null) { doc.setFemalePercentageChange(femalePercentageChange.toString() + "%"); } Double malePercentageChange = StatisticalResultService.getMalePercentageChange(percentageChange); if (malePercentageChange != null) { doc.setMalePercentageChange(malePercentageChange.toString() + "%"); } } doc.setGenotypeEffectStderrEstimate(nullCheckResult(r, "genotype_stderr_estimate")); doc.setGenotypeEffectPValue(nullCheckResult(r, "genotype_effect_pvalue")); doc.setSexEffectParameterEstimate(nullCheckResult(r, "gender_parameter_estimate")); doc.setSexEffectStderrEstimate(nullCheckResult(r, "gender_stderr_estimate")); doc.setSexEffectPValue(nullCheckResult(r, "gender_effect_pvalue")); doc.setWeightEffectParameterEstimate(nullCheckResult(r, "weight_parameter_estimate")); doc.setWeightEffectStderrEstimate(nullCheckResult(r, "weight_stderr_estimate")); doc.setWeightEffectPValue(nullCheckResult(r, "weight_effect_pvalue")); doc.setInterceptEstimate(nullCheckResult(r, "intercept_estimate")); doc.setInterceptEstimateStderrEstimate(nullCheckResult(r, "intercept_stderr_estimate")); doc.setInteractionEffectPValue(nullCheckResult(r, "interaction_effect_pvalue")); doc.setFemaleKoParameterEstimate(nullCheckResult(r, "gender_female_ko_estimate")); doc.setFemaleKoEffectStderrEstimate(nullCheckResult(r, "gender_female_ko_stderr_estimate")); doc.setFemaleKoEffectPValue(nullCheckResult(r, "gender_female_ko_pvalue")); doc.setMaleKoParameterEstimate(nullCheckResult(r, "gender_male_ko_estimate")); doc.setMaleKoEffectStderrEstimate(nullCheckResult(r, "gender_male_ko_stderr_estimate")); doc.setMaleKoEffectPValue(nullCheckResult(r, "gender_male_ko_pvalue")); doc.setBlupsTest(nullCheckResult(r, "blups_test")); doc.setRotatedResidualsTest(nullCheckResult(r, "rotated_residuals_normality_test")); doc.setClassificationTag(r.getString("classification_tag")); doc.setAdditionalInformation(r.getString("additional_information")); return doc; } } /** * Generate reference range plus statistic result DTOs */ public class ReferenceRangePlusResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(dependent_variable, '_RR_', sr.id) as doc_id, " + " 'unidimensional-ReferenceRange' AS data_type, " + " sr.id AS db_id, control_id, experimental_id, experimental_zygosity, " + " external_db_id, organisation_id, " + " pipeline_id, procedure_id, parameter_id, colony_id, " + " dependent_variable, control_selection_strategy, " + " male_controls, male_mutants, female_controls, female_mutants, " + " male_control_mean, male_experimental_mean, female_control_mean, female_experimental_mean, " + " metadata_group, statistical_method, workflow, status, " + " genotype_parameter_estimate, genotype_effect_pvalue, " + " gp1_genotype, gp2_genotype, " + " gender_female_ko_estimate, gender_female_ko_pvalue, " + " gender_male_ko_estimate, gender_male_ko_pvalue, " + " classification_tag, additional_information, " + " mp_acc, male_mp_acc, female_mp_acc, " + " db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + " proj.name as project_name, proj.id as project_id, " + " org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM stats_rrplus_results sr " + "INNER JOIN external_db db on db.id=sr.external_db_id " + "INNER JOIN project proj on proj.id=sr.project_id " + "INNER JOIN organisation org on org.id=sr.organisation_id "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseReferenceRangeResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting RR plus results", e); } logger.info((SAVE?"":"Would have") + " Added {} RR plus documents", docs.size()); return docs; } private StatisticalResultDTO parseReferenceRangeResult(ResultSet r) throws SQLException { List<Double> mins = new ArrayList<>(); StatisticalResultDTO doc = parseResultCommonFields(r); if (sexesMap.containsKey("rrplus-" + doc.getDbId())) { doc.setPhenotypeSex(sexesMap.get("rrplus-" + doc.getDbId())); } // Index the mean fields doc.setMaleControlMean(r.getDouble("male_control_mean")); doc.setMaleMutantMean(r.getDouble("male_experimental_mean")); doc.setFemaleControlMean(r.getDouble("female_control_mean")); doc.setFemaleMutantMean(r.getDouble("female_experimental_mean")); doc.setGroup1Genotype(r.getString("gp1_genotype")); doc.setGroup2Genotype(r.getString("gp2_genotype")); // Set the overall genotype effect fields String genotypePvalue = r.getString("genotype_effect_pvalue"); if (! r.wasNull()) { String [] fields = genotypePvalue.split(","); // Low vs normal&high genotype pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setGenotypePvalueLowVsNormalHigh(pvalue); // High vs low&normal genotype pvalue pvalue = Double.parseDouble(fields[1]); doc.setGenotypePvalueLowNormalVsHigh(pvalue); doc.setNullTestPValue(Math.min(doc.getGenotypePvalueLowNormalVsHigh(), doc.getGenotypePvalueLowVsNormalHigh())); doc.setpValue(doc.getNullTestPValue()); mins.add(pvalue); String genotypeEffectSize = r.getString("genotype_parameter_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high genotype effect size double es = Double.parseDouble(fields[0]); doc.setGenotypeEffectSizeLowVsNormalHigh(es); // High vs low&normal genotype effect size es = Double.parseDouble(fields[1]); doc.setGenotypeEffectSizeLowNormalVsHigh(es); } } // Set the female female effect fields genotypePvalue = r.getString("gender_female_ko_pvalue"); if (! r.wasNull() && ! genotypePvalue.equals("NA")) { String [] fields = genotypePvalue.split(","); // Low vs normal&high female pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setFemalePvalueLowVsNormalHigh(pvalue); mins.add(pvalue); // High vs low&normal female pvalue pvalue = Double.parseDouble(fields[1]); doc.setFemalePvalueLowNormalVsHigh(pvalue); mins.add(pvalue); String genotypeEffectSize = r.getString("gender_female_ko_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high female effect size double es = Double.parseDouble(fields[0]); doc.setFemaleEffectSizeLowVsNormalHigh(es); // High vs low&normal female effect size es = Double.parseDouble(fields[1]); doc.setFemaleEffectSizeLowNormalVsHigh(es); } } // Set the male effect fields genotypePvalue = r.getString("gender_male_ko_pvalue"); if (! r.wasNull() && ! genotypePvalue.equals("NA")) { String [] fields = genotypePvalue.split(","); // Low vs normal&high male pvalue Double pvalue = Double.parseDouble(fields[0]); doc.setMalePvalueLowVsNormalHigh(pvalue); mins.add(pvalue); // High vs low&normal male pvalue pvalue = Double.parseDouble(fields[1]); doc.setMalePvalueLowNormalVsHigh(pvalue); mins.add(pvalue); String genotypeEffectSize = r.getString("gender_male_ko_estimate"); if (! r.wasNull()) { fields = genotypeEffectSize.replaceAll("%", "").split(","); // Low vs normal&high male effect size double es = Double.parseDouble(fields[0]); doc.setMaleEffectSizeLowVsNormalHigh(es); // High vs low&normal male effect size es = Double.parseDouble(fields[1]); doc.setMaleEffectSizeLowNormalVsHigh(es); } } Double minimumPvalue = Collections.min(mins); doc.setpValue(minimumPvalue); setSignificantFlag(SIGNIFICANCE_THRESHOLD, doc); // If not already set, ensure that the document has all possible top level MP terms defined if (doc.getTopLevelMpTermId() == null && mpParser.getOntologyTerm(doc.getMpTermId()) != null) { OntologyTermDTO term = mpParser.getOntologyTerm(doc.getMpTermId()); doc.addTopLevelMpTermIds(term.getTopLevelIds()); doc.addTopLevelMpTermNames(term.getTopLevelNames()); } if (! doc.getStatus().equals("Success")) { doc.setpValue(1.0); doc.setEffectSize(0.0); } doc.setClassificationTag(r.getString("classification_tag")); doc.setAdditionalInformation(r.getString("additional_information")); return doc; } } /** * If the result is significant (indicated by having a more significant p_value than pValueThreshold) * then if there has not been a previous result (sex specific or genotype effect) which is significant * then mark this as significant, otherwise, not. * * @param pValueThreshold The p value to indicate significance threshould * @param doc the solr document to update */ private void setSignificantFlag(Double pValueThreshold, StatisticalResultDTO doc) { doc.setSignificant(false); // do not override significant == true if (doc.getSignificant()!=null && doc.getSignificant()) { return; } if (doc.getNullTestPValue() != null) { // PhenStat result if (doc.getNullTestPValue() <= pValueThreshold) { doc.setSignificant(true); } else if (doc.getStatus().equals("Success") && doc.getSignificant() == null) { doc.setSignificant(false); } } else if (doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Wilcoxon")) { // Wilcoxon test. Choose the most significant pvalue from the sexes, already tcalculated and stored // in the Pvalue field of the doc if (doc.getpValue() <= pValueThreshold) { doc.setSignificant(true); } else { doc.setSignificant(false); } } else if (doc.getNullTestPValue() == null && doc.getStatus().equals("Success") && doc.getStatisticalMethod() != null && doc.getStatisticalMethod().startsWith("Fisher")) { // Fisher's exact test. Choose the most significant pvalue from the sexes, already tcalculated and stored // in the Pvalue field of the doc if (doc.getpValue() <= pValueThreshold) { doc.setSignificant(true); } else { doc.setSignificant(false); } } } class FertilityResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT CONCAT(parameter.stable_id, '_', exp.id, '_', IF(sex IS NULL,'both',sex)) as doc_id, co.category, " + "'line' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, obs.parameter_id, exp.colony_id, sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "p_value as p_value, effect_size AS effect_size, " + "mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM phenotype_parameter parameter " + "INNER JOIN phenotype_procedure_parameter pproc ON pproc.parameter_id=parameter.id " + "INNER JOIN phenotype_procedure proc ON proc.id=pproc.procedure_id " + "INNER JOIN observation obs ON obs.parameter_stable_id=parameter.stable_id AND obs.parameter_stable_id IN ('IMPC_FER_001_001', 'IMPC_FER_019_001') " + "INNER JOIN categorical_observation co ON co.id=obs.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=obs.id " + "INNER JOIN experiment exp ON eo.experiment_id=exp.id " + "INNER JOIN external_db db ON db.id=obs.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "LEFT OUTER JOIN phenotype_call_summary sr ON (exp.colony_id=sr.colony_id AND sr.parameter_id=parameter.id) " + "WHERE parameter.stable_id IN ('IMPC_FER_001_001', 'IMPC_FER_019_001') AND exp.procedure_id=proc.id"; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { // Skip processing females for male infertility parameter if (r.getString("dependent_variable") != null && r.getString("dependent_variable").equals("IMPC_FER_001_001") && r.getString("sex") != null && r.getString("sex").equals("female")) { continue; } // Skip processing males for female infertility parameter if (r.getString("dependent_variable") != null && r.getString("dependent_variable").equals("IMPC_FER_019_001") && r.getString("sex") != null && r.getString("sex").equals("male")) { continue; } StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setCategories(Collections.singletonList(r.getString("category"))); r.getString("p_value"); if (r.wasNull()) { doc.setpValue(1.0); doc.setEffectSize(0.0); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting fertility results", e); } logger.info((SAVE?"":"Would have") + " Added {} fertility parameter documents", docs.size()); return docs; } } class ViabilityResults implements Callable<List<StatisticalResultDTO>> { // Populate viability results String query = "SELECT CONCAT(parameter.stable_id, '_', exp.id, '_', CASE WHEN sex IS NULL THEN 'na' ELSE sex END) as doc_id, co.category, " + "'line' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, obs.parameter_id, exp.colony_id, sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "p_value as p_value, effect_size AS effect_size, " + "mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id, " + "0 AS male_controls, " + "(SELECT uobs2.data_point " + " FROM observation obs2 " + " INNER JOIN unidimensional_observation uobs2 ON obs2.id=uobs2.id " + " INNER JOIN experiment_observation eo2 ON eo2.observation_id=obs2.id " + " INNER JOIN experiment exp2 ON eo2.experiment_id=exp2.id " + " WHERE exp2.colony_id=exp.colony_id AND obs2.parameter_stable_id='IMPC_VIA_010_001' limit 1) AS male_mutants, " + "0 AS female_controls, " + "(SELECT uobs2.data_point " + " FROM observation obs2 " + " INNER JOIN unidimensional_observation uobs2 ON obs2.id=uobs2.id " + " INNER JOIN experiment_observation eo2 ON eo2.observation_id=obs2.id " + " INNER JOIN experiment exp2 ON eo2.experiment_id=exp2.id " + " WHERE exp2.colony_id=exp.colony_id AND obs2.parameter_stable_id='IMPC_VIA_014_001' limit 1) AS female_mutants " + "FROM phenotype_parameter parameter " + "INNER JOIN phenotype_procedure_parameter pproc ON pproc.parameter_id=parameter.id " + "INNER JOIN phenotype_procedure proc ON proc.id=pproc.procedure_id " + "INNER JOIN observation obs ON obs.parameter_stable_id=parameter.stable_id AND obs.parameter_stable_id = 'IMPC_VIA_001_001' " + "INNER JOIN categorical_observation co ON co.id=obs.id " + "INNER JOIN experiment_observation eo ON eo.observation_id=obs.id " + "INNER JOIN experiment exp ON eo.experiment_id=exp.id " + "INNER JOIN external_db db ON db.id=obs.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "LEFT OUTER JOIN phenotype_call_summary sr ON (exp.colony_id=sr.colony_id AND sr.parameter_id=parameter.id) " + "WHERE parameter.stable_id = 'IMPC_VIA_001_001' AND exp.procedure_id=proc.id" ; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setCategories(Collections.singletonList(r.getString("category"))); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting viability results", e); } logger.info((SAVE?"":"Would have") + " Added {} viability parameter documents", docs.size()); return docs; } } public class EmbryoResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT DISTINCT " + " CONCAT_WS('-', exp.procedure_stable_id, parameter.stable_id, ls.colony_id, bm.zygosity, sex, exp.organisation_id, exp.metadata_group) AS doc_id, " + " CONCAT(parameter.stable_id, '_', ls.colony_id, exp.organisation_id) AS significant_id, " + " 'embryo' AS data_type, 'Success' AS status, " + " exp.metadata_group, exp.pipeline_id, exp.procedure_id, obs.parameter_id, parameter.stable_id AS dependent_variable, " + " bm.id AS biological_model_id, bm.zygosity AS experimental_zygosity, ls.colony_id, sex, " + " NULL AS p_value, NULL AS effect_size, NULL AS mp_acc, NULL AS male_mp_acc, NULL AS female_mp_acc, " + " db.short_name AS resource_name, db.name AS resource_fullname, db.id AS db_id, db.id AS resource_id, db.id AS external_db_id, " + " proj.name AS project_name, proj.id AS project_id, " + " org.name AS phenotyping_center, org.id AS phenotyping_center_id " + "FROM observation obs INNER JOIN phenotype_parameter parameter ON parameter.id = obs.parameter_id " + " INNER JOIN live_sample ls ON ls.id = obs.biological_sample_id " + " INNER JOIN biological_sample bs ON bs.id = obs.biological_sample_id " + " INNER JOIN biological_model_sample bms ON bms.biological_sample_id = obs.biological_sample_id " + " INNER JOIN biological_model bm ON bm.id = bms.biological_model_id " + " INNER JOIN experiment_observation eo ON eo.observation_id = obs.id " + " INNER JOIN experiment exp ON exp.id = eo.experiment_id " + " INNER JOIN (SELECT id FROM phenotype_procedure WHERE stable_id REGEXP '" + StringUtils.join(EMBRYO_PROCEDURES_NO_VIA, "|") + "') B ON B.id = exp.procedure_id " + " INNER JOIN external_db db ON db.id = obs.db_id " + " INNER JOIN project proj ON proj.id = exp.project_id " + " INNER JOIN organisation org ON org.id = exp.organisation_id " + "WHERE bs.sample_group = 'experimental' "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); int i = 0; while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); doc.setDocId(doc.getDocId() + "-" + (i++)); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); if (embryoSignificantResults.containsKey(r.getString("significant_id"))) { addMpTermData(embryoSignificantResults.get(r.getString("significant_id")), doc); doc.setSignificant(true); } else { doc.setSignificant(false); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { e.printStackTrace(); logger.warn(" Error occurred getting embryo results", e); } logger.info(" Generated {} embryo parameter documents", docs.size()); return docs; } } public class EmbryoViabilityResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT co.category, " + " CONCAT(parameter.stable_id, '_', exp.id, '_embryo') as doc_id, " + " CONCAT(parameter.stable_id, '_', exp.colony_id, org.id) as significant_id, " + "'embryo' AS data_type, db.id AS db_id, " + "zygosity as experimental_zygosity, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, " + "parameter.id as parameter_id, exp.colony_id, null as sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, exp.biological_model_id, " + "0.0 as p_value, 1.0 AS effect_size, " + "ontology_acc AS mp_acc, null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM phenotype_parameter parameter " + " INNER JOIN observation o ON o.parameter_stable_id=parameter.stable_id " + " INNER JOIN categorical_observation co ON co.id=o.id " + " INNER JOIN experiment_observation eo ON eo.observation_id=o.id " + " INNER JOIN experiment exp ON eo.experiment_id=exp.id " + " INNER JOIN biological_model bm ON bm.id=exp.biological_model_id " + " INNER JOIN external_db db ON db.id=o.db_id " + " INNER JOIN project proj ON proj.id=exp.project_id " + " INNER JOIN organisation org ON org.id=exp.organisation_id " + " LEFT OUTER JOIN ( " + " SELECT parameter_id, name, ontology_acc FROM phenotype_parameter_lnk_option lnkopt " + " INNER JOIN phenotype_parameter_option opt ON opt.id=lnkopt.option_id " + " INNER JOIN phenotype_parameter_ontology_annotation oa ON oa.option_id=opt.id " + " ) b ON b.parameter_id=parameter.id AND b.name=co.category " + "WHERE parameter.stable_id in ('" + StringUtils.join(EMBRYO_PROCEDURES_VIA, "','") + "') "; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); if (embryoSignificantResults.containsKey(r.getString("significant_id"))) { addMpTermData(embryoSignificantResults.get(r.getString("significant_id")), doc); doc.setSignificant(true); } else { doc.setSignificant(false); } docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting embryo results", e); } logger.info( (SAVE?"":"Would have") + " Added {} embryo viability parameter documents", docs.size()); return docs; } } public class GrossPathologyResults implements Callable<List<StatisticalResultDTO>> { String query = "SELECT DISTINCT CONCAT(parameter.stable_id, '_', o.id, '_', term, '_', ls.sex, '_grosspath') as doc_id, " + "'adult-gross-path' AS data_type, db.id AS db_id, " + "ls.zygosity as experimental_zygosity, ls.id, bs.sample_group, db.id AS external_db_id, exp.pipeline_id, exp.procedure_id, " + "parameter.id as parameter_id, ls.colony_id, ls.sex as sex, " + "parameter.stable_id as dependent_variable, " + "'Success' as status, bm.id AS biological_model_id, " + "null as p_value, null AS effect_size, " + "oe.term as mp_acc , null as male_mp_acc, null as female_mp_acc, exp.metadata_group, " + "db.short_name as resource_name, db.name as resource_fullname, db.id as resource_id, " + "proj.name as project_name, proj.id as project_id, " + "org.name as phenotyping_center, org.id as phenotyping_center_id " + "FROM observation o " + "INNER JOIN ontology_entity oe on oe.ontology_observation_id=o.id " + "INNER JOIN biological_model_sample bms ON bms.biological_sample_id = o.biological_sample_id " + "INNER JOIN biological_model bm ON bms.biological_model_id = bm.id " + "INNER JOIN biological_sample bs ON bs.id = bms.biological_sample_id " + "INNER JOIN live_sample ls ON bms.biological_sample_id = ls.id " + "INNER JOIN experiment_observation eo ON eo.observation_id = o.id " + "INNER JOIN experiment exp ON exp.id = eo.experiment_id " + "INNER JOIN external_db db ON db.id=o.db_id " + "INNER JOIN project proj ON proj.id=exp.project_id " + "INNER JOIN organisation org ON org.id=exp.organisation_id " + "INNER JOIN phenotype_parameter parameter ON parameter.id = o.parameter_id " + "WHERE o.parameter_stable_id like '%PAT%' and term_value != 'normal' and term like 'MP%' AND bs.sample_group!='control' " ; @Override public List<StatisticalResultDTO> call() { List<StatisticalResultDTO> docs = new ArrayList<>(); try (Connection connection = komp2DataSource.getConnection(); PreparedStatement p = connection.prepareStatement(query, java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY)) { p.setFetchSize(Integer.MIN_VALUE); ResultSet r = p.executeQuery(); int i = 0; while (r.next()) { StatisticalResultDTO doc = parseLineResult(r); doc.setDocId(doc.getDocId()+"-"+(i++)); // Skip document if it has already been added if (uniqueSRKeys.contains(doc.getDocId())) { continue; } uniqueSRKeys.add(doc.getDocId()); doc.setSignificant(true); docs.add(doc); if (SAVE) statisticalResultCore.addBean(doc, 30000); shouldHaveAdded.add(doc.getDocId()); } } catch (Exception e) { logger.warn(" Error occurred getting gross pathology results", e); } logger.info((SAVE?"":"Would have") + " Added {} gross pathology parameter documents", docs.size()); return docs; } } public void setSAVE(Boolean SAVE) { this.SAVE = SAVE; } public Map<String, String> getEmbryoSignificantResults() { return embryoSignificantResults; } public static void main(String[] args) { ConfigurableApplicationContext context = new SpringApplicationBuilder(StatisticalResultsIndexer.class) .web(WebApplicationType.NONE) .bannerMode(Banner.Mode.OFF) .logStartupInfo(false) .run(args); context.close(); } }
Tweaked logging.
indexers/src/main/java/org/mousephenotype/cda/indexers/StatisticalResultsIndexer.java
Tweaked logging.
<ide><path>ndexers/src/main/java/org/mousephenotype/cda/indexers/StatisticalResultsIndexer.java <ide> QueryResponse response = statisticalResultCore.query(query); <ide> Long solrDocumentCount = response.getResults().getNumFound(); <ide> <del> logger.info(" Count of documents in solr: {}, count added by indexer: {}, Difference: {}", solrDocumentCount, documentsAddedCount, documentsAddedCount - solrDocumentCount); <add> if (documentsAddedCount - solrDocumentCount != 0) { <add> logger.warn(" Count of documents in solr: {}, count added by indexer: {}, Difference: {}", solrDocumentCount, documentsAddedCount, documentsAddedCount - solrDocumentCount); <add> } <ide> <ide> if (documentsAddedCount - solrDocumentCount > 0) { <ide>
Java
apache-2.0
af0ad38f2937ad9444273c8895e5389e7e02eccc
0
NSAmelchev/ignite,shroman/ignite,xtern/ignite,daradurvs/ignite,SomeFire/ignite,daradurvs/ignite,samaitra/ignite,samaitra/ignite,xtern/ignite,ilantukh/ignite,shroman/ignite,daradurvs/ignite,shroman/ignite,ilantukh/ignite,apache/ignite,nizhikov/ignite,apache/ignite,samaitra/ignite,andrey-kuznetsov/ignite,nizhikov/ignite,xtern/ignite,apache/ignite,apache/ignite,SomeFire/ignite,apache/ignite,samaitra/ignite,ascherbakoff/ignite,NSAmelchev/ignite,ilantukh/ignite,nizhikov/ignite,daradurvs/ignite,samaitra/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,nizhikov/ignite,ascherbakoff/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,xtern/ignite,chandresh-pancholi/ignite,nizhikov/ignite,andrey-kuznetsov/ignite,shroman/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,SomeFire/ignite,shroman/ignite,nizhikov/ignite,ascherbakoff/ignite,ascherbakoff/ignite,SomeFire/ignite,shroman/ignite,nizhikov/ignite,shroman/ignite,samaitra/ignite,NSAmelchev/ignite,nizhikov/ignite,daradurvs/ignite,ascherbakoff/ignite,NSAmelchev/ignite,samaitra/ignite,shroman/ignite,andrey-kuznetsov/ignite,chandresh-pancholi/ignite,andrey-kuznetsov/ignite,ascherbakoff/ignite,NSAmelchev/ignite,samaitra/ignite,samaitra/ignite,NSAmelchev/ignite,ascherbakoff/ignite,apache/ignite,apache/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,NSAmelchev/ignite,chandresh-pancholi/ignite,daradurvs/ignite,samaitra/ignite,NSAmelchev/ignite,ilantukh/ignite,apache/ignite,xtern/ignite,apache/ignite,ilantukh/ignite,daradurvs/ignite,nizhikov/ignite,SomeFire/ignite,SomeFire/ignite,shroman/ignite,ascherbakoff/ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,NSAmelchev/ignite,daradurvs/ignite,chandresh-pancholi/ignite,SomeFire/ignite,chandresh-pancholi/ignite,chandresh-pancholi/ignite,xtern/ignite,shroman/ignite,xtern/ignite,chandresh-pancholi/ignite,xtern/ignite,chandresh-pancholi/ignite,SomeFire/ignite,xtern/ignite,SomeFire/ignite,SomeFire/ignite,andrey-kuznetsov/ignite,daradurvs/ignite,daradurvs/ignite,chandresh-pancholi/ignite
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.concurrent.ForkJoinWorkerThread; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.ToLongFunction; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.ignite.DataRegionMetricsProvider; import org.apache.ignite.DataStorageMetrics; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.CheckpointWriteOrder; import org.apache.ignite.configuration.DataPageEvictionMode; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.failure.FailureContext; import org.apache.ignite.failure.FailureType; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.LongJVMPauseDetector; import org.apache.ignite.internal.NodeStoppingException; import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager; import org.apache.ignite.internal.mem.DirectMemoryProvider; import org.apache.ignite.internal.mem.DirectMemoryRegion; import org.apache.ignite.internal.pagemem.FullPageId; import org.apache.ignite.internal.pagemem.PageIdAllocator; import org.apache.ignite.internal.pagemem.PageIdUtils; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.pagemem.PageUtils; import org.apache.ignite.internal.pagemem.store.IgnitePageStoreManager; import org.apache.ignite.internal.pagemem.store.PageStore; import org.apache.ignite.internal.pagemem.wal.WALIterator; import org.apache.ignite.internal.pagemem.wal.WALPointer; import org.apache.ignite.internal.pagemem.wal.record.CacheState; import org.apache.ignite.internal.pagemem.wal.record.CheckpointRecord; import org.apache.ignite.internal.pagemem.wal.record.DataEntry; import org.apache.ignite.internal.pagemem.wal.record.DataRecord; import org.apache.ignite.internal.pagemem.wal.record.MemoryRecoveryRecord; import org.apache.ignite.internal.pagemem.wal.record.MetastoreDataRecord; import org.apache.ignite.internal.pagemem.wal.record.MvccDataEntry; import org.apache.ignite.internal.pagemem.wal.record.MvccTxRecord; import org.apache.ignite.internal.pagemem.wal.record.PageSnapshot; import org.apache.ignite.internal.pagemem.wal.record.WALRecord; import org.apache.ignite.internal.pagemem.wal.record.WalRecordCacheGroupAware; import org.apache.ignite.internal.pagemem.wal.record.delta.PageDeltaRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionDestroyRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionMetaStateRecord; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.CacheGroupDescriptor; import org.apache.ignite.internal.processors.cache.DynamicCacheDescriptor; import org.apache.ignite.internal.processors.cache.ExchangeActions; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState; import org.apache.ignite.internal.processors.cache.mvcc.txlog.TxLog; import org.apache.ignite.internal.processors.cache.mvcc.txlog.TxState; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntry; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntryType; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointHistory; import org.apache.ignite.internal.processors.cache.persistence.file.FileIO; import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStore; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetastorageLifecycleListener; import org.apache.ignite.internal.processors.cache.persistence.pagemem.CheckpointMetricsTracker; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryImpl; import org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId; import org.apache.ignite.internal.processors.cache.persistence.partstate.PartitionAllocationMap; import org.apache.ignite.internal.processors.cache.persistence.partstate.PartitionRecoverState; import org.apache.ignite.internal.processors.cache.persistence.snapshot.IgniteCacheSnapshotManager; import org.apache.ignite.internal.processors.cache.persistence.snapshot.SnapshotOperation; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PagePartitionMetaIO; import org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer; import org.apache.ignite.internal.processors.cache.persistence.wal.crc.IgniteDataIntegrityViolationException; import org.apache.ignite.internal.processors.port.GridPortRecord; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.stat.IoStatisticsHolderNoOp; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.GridMultiCollectionWrapper; import org.apache.ignite.internal.util.GridReadOnlyArrayView; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.StripedExecutor; import org.apache.ignite.internal.util.future.CountDownFuture; import org.apache.ignite.internal.util.future.GridCompoundFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.lang.GridInClosure3X; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.LT; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteOutClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.mxbean.DataStorageMetricsMXBean; import org.apache.ignite.thread.IgniteThread; import org.apache.ignite.thread.IgniteThreadPoolExecutor; import org.apache.ignite.transactions.TransactionState; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentLinkedHashMap; import static java.nio.file.StandardOpenOption.READ; import static org.apache.ignite.IgniteSystemProperties.IGNITE_CHECKPOINT_READ_LOCK_TIMEOUT; import static org.apache.ignite.IgniteSystemProperties.IGNITE_JVM_PAUSE_DETECTOR_THRESHOLD; import static org.apache.ignite.IgniteSystemProperties.IGNITE_PDS_WAL_REBALANCE_THRESHOLD; import static org.apache.ignite.IgniteSystemProperties.IGNITE_RECOVERY_SEMAPHORE_PERMITS; import static org.apache.ignite.IgniteSystemProperties.getBoolean; import static org.apache.ignite.IgniteSystemProperties.getInteger; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL_SNAPSHOT; import static org.apache.ignite.failure.FailureType.CRITICAL_ERROR; import static org.apache.ignite.failure.FailureType.SYSTEM_CRITICAL_OPERATION_TIMEOUT; import static org.apache.ignite.failure.FailureType.SYSTEM_WORKER_TERMINATION; import static org.apache.ignite.internal.LongJVMPauseDetector.DEFAULT_JVM_PAUSE_DETECTOR_THRESHOLD; import static org.apache.ignite.internal.pagemem.PageIdUtils.partId; import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CHECKPOINT_RECORD; import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.METASTORE_DATA_RECORD; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.fromOrdinal; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.TMP_FILE_MATCHER; import static org.apache.ignite.internal.util.IgniteUtils.checkpointBufferSize; /** * */ @SuppressWarnings({"unchecked", "NonPrivateFieldAccessedInSynchronizedContext"}) public class GridCacheDatabaseSharedManager extends IgniteCacheDatabaseSharedManager implements CheckpointWriteProgressSupplier { /** */ public static final String IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC = "IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC"; /** */ public static final String IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP = "IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP"; /** MemoryPolicyConfiguration name reserved for meta store. */ public static final String METASTORE_DATA_REGION_NAME = "metastoreMemPlc"; /** Skip sync. */ private final boolean skipSync = getBoolean(IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC); /** */ private final int walRebalanceThreshold = getInteger(IGNITE_PDS_WAL_REBALANCE_THRESHOLD, 500_000); /** Value of property for throttling policy override. */ private final String throttlingPolicyOverride = IgniteSystemProperties.getString( IgniteSystemProperties.IGNITE_OVERRIDE_WRITE_THROTTLING_ENABLED); /** */ private final boolean skipCheckpointOnNodeStop = getBoolean(IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP, false); /** * Starting from this number of dirty pages in checkpoint, array will be sorted with * {@link Arrays#parallelSort(Comparable[])} in case of {@link CheckpointWriteOrder#SEQUENTIAL}. */ private final int parallelSortThreshold = IgniteSystemProperties.getInteger( IgniteSystemProperties.CHECKPOINT_PARALLEL_SORT_THRESHOLD, 512 * 1024); /** Checkpoint lock hold count. */ private static final ThreadLocal<Integer> CHECKPOINT_LOCK_HOLD_COUNT = ThreadLocal.withInitial(() -> 0); /** Assertion enabled. */ private static final boolean ASSERTION_ENABLED = GridCacheDatabaseSharedManager.class.desiredAssertionStatus(); /** Checkpoint file name pattern. */ public static final Pattern CP_FILE_NAME_PATTERN = Pattern.compile("(\\d+)-(.*)-(START|END)\\.bin"); /** */ private static final String MBEAN_NAME = "DataStorageMetrics"; /** */ private static final String MBEAN_GROUP = "Persistent Store"; /** WAL marker prefix for meta store. */ private static final String WAL_KEY_PREFIX = "grp-wal-"; /** Prefix for meta store records which means that WAL was disabled globally for some group. */ private static final String WAL_GLOBAL_KEY_PREFIX = WAL_KEY_PREFIX + "disabled-"; /** Prefix for meta store records which means that WAL was disabled locally for some group. */ private static final String WAL_LOCAL_KEY_PREFIX = WAL_KEY_PREFIX + "local-disabled-"; /** Prefix for meta store records which means that checkpoint entry for some group is not applicable for WAL rebalance. */ private static final String CHECKPOINT_INAPPLICABLE_FOR_REBALANCE = "cp-wal-rebalance-inapplicable-"; /** Timeout between partition file destroy and checkpoint to handle it. */ private static final long PARTITION_DESTROY_CHECKPOINT_TIMEOUT = 30 * 1000; // 30 Seconds. /** */ private static final String CHECKPOINT_RUNNER_THREAD_PREFIX = "checkpoint-runner"; /** This number of threads will be created and used for parallel sorting. */ private static final int PARALLEL_SORT_THREADS = Math.min(Runtime.getRuntime().availableProcessors(), 8); /** Checkpoint thread. Needs to be volatile because it is created in exchange worker. */ private volatile Checkpointer checkpointer; /** Checkpointer thread instance. */ private volatile IgniteThread checkpointerThread; /** For testing only. */ private volatile boolean checkpointsEnabled = true; /** For testing only. */ private volatile GridFutureAdapter<Void> enableChangeApplied; /** */ ReentrantReadWriteLock checkpointLock = new ReentrantReadWriteLock(); /** */ private long checkpointFreq; /** */ private CheckpointHistory cpHistory; /** */ private FilePageStoreManager storeMgr; /** Checkpoint metadata directory ("cp"), contains files with checkpoint start and end */ private File cpDir; /** */ private volatile boolean printCheckpointStats = true; /** Database configuration. */ private final DataStorageConfiguration persistenceCfg; /** */ private final Collection<DbCheckpointListener> lsnrs = new CopyOnWriteArrayList<>(); /** */ private boolean stopping; /** * The position of last seen WAL pointer. Used for resumming logging from this pointer. * * If binary memory recovery pefrormed on node start, the checkpoint END pointer will store * not the last WAL pointer and can't be used for resumming logging. */ private volatile WALPointer walTail; /** Checkpoint runner thread pool. If null tasks are to be run in single thread */ @Nullable private IgniteThreadPoolExecutor asyncRunner; /** Thread local with buffers for the checkpoint threads. Each buffer represent one page for durable memory. */ private ThreadLocal<ByteBuffer> threadBuf; /** Map from a cacheId to a future indicating that there is an in-progress index rebuild for the given cache. */ private final ConcurrentMap<Integer, GridFutureAdapter<Void>> idxRebuildFuts = new ConcurrentHashMap<>(); /** * Lock holder for compatible folders mode. Null if lock holder was created at start node. <br> * In this case lock is held on PDS resover manager and it is not required to manage locking here */ @Nullable private FileLockHolder fileLockHolder; /** Lock wait time. */ private final long lockWaitTime; /** */ private final boolean truncateWalOnCpFinish; /** */ private Map</*grpId*/Integer, Map</*partId*/Integer, T2</*updCntr*/Long, WALPointer>>> reservedForExchange; /** */ private final ConcurrentMap<T2</*grpId*/Integer, /*partId*/Integer>, T2</*updCntr*/Long, WALPointer>> reservedForPreloading = new ConcurrentHashMap<>(); /** Snapshot manager. */ private IgniteCacheSnapshotManager snapshotMgr; /** */ private DataStorageMetricsImpl persStoreMetrics; /** Counter for written checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger writtenPagesCntr = null; /** Counter for fsynced checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger syncedPagesCntr = null; /** Counter for evicted checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger evictedPagesCntr = null; /** Number of pages in current checkpoint at the beginning of checkpoint. */ private volatile int currCheckpointPagesCnt; /** * MetaStorage instance. Value {@code null} means storage not initialized yet. * Guarded by {@link GridCacheDatabaseSharedManager#checkpointReadLock()} */ private MetaStorage metaStorage; /** */ private List<MetastorageLifecycleListener> metastorageLifecycleLsnrs; /** Initially disabled cache groups. */ private Collection<Integer> initiallyGlobalWalDisabledGrps = new HashSet<>(); /** Initially local wal disabled groups. */ private Collection<Integer> initiallyLocalWalDisabledGrps = new HashSet<>(); /** File I/O factory for writing checkpoint markers. */ private final FileIOFactory ioFactory; /** Timeout for checkpoint read lock acquisition in milliseconds. */ private volatile long checkpointReadLockTimeout; /** Flag allows to log additional information about partitions during recovery phases. */ private final boolean recoveryVerboseLogging = getBoolean(IgniteSystemProperties.IGNITE_RECOVERY_VERBOSE_LOGGING, true); /** Pointer to a memory recovery record that should be included into the next checkpoint record. */ private volatile WALPointer memoryRecoveryRecordPtr; /** * @param ctx Kernal context. */ public GridCacheDatabaseSharedManager(GridKernalContext ctx) { IgniteConfiguration cfg = ctx.config(); persistenceCfg = cfg.getDataStorageConfiguration(); assert persistenceCfg != null; checkpointFreq = persistenceCfg.getCheckpointFrequency(); truncateWalOnCpFinish = persistenceCfg.isWalHistorySizeParameterUsed() ? persistenceCfg.getWalHistorySize() != Integer.MAX_VALUE : persistenceCfg.getMaxWalArchiveSize() != Long.MAX_VALUE; lockWaitTime = persistenceCfg.getLockWaitTime(); persStoreMetrics = new DataStorageMetricsImpl( persistenceCfg.isMetricsEnabled(), persistenceCfg.getMetricsRateTimeInterval(), persistenceCfg.getMetricsSubIntervalCount() ); ioFactory = persistenceCfg.getFileIOFactory(); Long cfgCheckpointReadLockTimeout = ctx.config().getDataStorageConfiguration() != null ? ctx.config().getDataStorageConfiguration().getCheckpointReadLockTimeout() : null; checkpointReadLockTimeout = IgniteSystemProperties.getLong(IGNITE_CHECKPOINT_READ_LOCK_TIMEOUT, cfgCheckpointReadLockTimeout != null ? cfgCheckpointReadLockTimeout : (ctx.workersRegistry() != null ? ctx.workersRegistry().getSystemWorkerBlockedTimeout() : ctx.config().getFailureDetectionTimeout())); } /** * @return File store manager. */ public FilePageStoreManager getFileStoreManager() { return storeMgr; } /** */ private void notifyMetastorageReadyForRead() throws IgniteCheckedException { for (MetastorageLifecycleListener lsnr : metastorageLifecycleLsnrs) lsnr.onReadyForRead(metaStorage); } /** */ private void notifyMetastorageReadyForReadWrite() throws IgniteCheckedException { for (MetastorageLifecycleListener lsnr : metastorageLifecycleLsnrs) lsnr.onReadyForReadWrite(metaStorage); } /** * */ public Checkpointer getCheckpointer() { return checkpointer; } /** * For test use only. * * @return Checkpointer thread instance. */ public IgniteThread checkpointerThread() { return checkpointerThread; } /** * For test use only. */ public IgniteInternalFuture<Void> enableCheckpoints(boolean enable) { GridFutureAdapter<Void> fut = new GridFutureAdapter<>(); enableChangeApplied = fut; checkpointsEnabled = enable; wakeupForCheckpoint("enableCheckpoints()"); return fut; } /** {@inheritDoc} */ @Override protected void initDataRegions0(DataStorageConfiguration memCfg) throws IgniteCheckedException { super.initDataRegions0(memCfg); addDataRegion( memCfg, createMetastoreDataRegionConfig(memCfg), false ); persStoreMetrics.regionMetrics(memMetricsMap.values()); } /** * Create metastorage data region configuration with enabled persistence by default. * * @param storageCfg Data storage configuration. * @return Data region configuration. */ private DataRegionConfiguration createMetastoreDataRegionConfig(DataStorageConfiguration storageCfg) { DataRegionConfiguration cfg = new DataRegionConfiguration(); cfg.setName(METASTORE_DATA_REGION_NAME); cfg.setInitialSize(storageCfg.getSystemRegionInitialSize()); cfg.setMaxSize(storageCfg.getSystemRegionMaxSize()); cfg.setPersistenceEnabled(true); return cfg; } /** {@inheritDoc} */ @Override protected void start0() throws IgniteCheckedException { super.start0(); threadBuf = new ThreadLocal<ByteBuffer>() { /** {@inheritDoc} */ @Override protected ByteBuffer initialValue() { ByteBuffer tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); return tmpWriteBuf; } }; snapshotMgr = cctx.snapshot(); final GridKernalContext kernalCtx = cctx.kernalContext(); if (!kernalCtx.clientNode()) { kernalCtx.internalSubscriptionProcessor().registerDatabaseListener(new MetastorageRecoveryLifecycle()); checkpointer = new Checkpointer(cctx.igniteInstanceName(), "db-checkpoint-thread", log); cpHistory = new CheckpointHistory(kernalCtx); IgnitePageStoreManager store = cctx.pageStore(); assert store instanceof FilePageStoreManager : "Invalid page store manager was created: " + store; storeMgr = (FilePageStoreManager)store; cpDir = Paths.get(storeMgr.workDir().getAbsolutePath(), "cp").toFile(); if (!U.mkdirs(cpDir)) throw new IgniteCheckedException("Could not create directory for checkpoint metadata: " + cpDir); final FileLockHolder preLocked = kernalCtx.pdsFolderResolver() .resolveFolders() .getLockedFileLockHolder(); acquireFileLock(preLocked); cleanupTempCheckpointDirectory(); persStoreMetrics.wal(cctx.wal()); } } /** * Cleanup checkpoint directory from all temporary files. */ @Override public void cleanupTempCheckpointDirectory() throws IgniteCheckedException { try { try (DirectoryStream<Path> files = Files.newDirectoryStream(cpDir.toPath(), TMP_FILE_MATCHER::matches)) { for (Path path : files) Files.delete(path); } } catch (IOException e) { throw new IgniteCheckedException("Failed to cleanup checkpoint directory from temporary files: " + cpDir, e); } } /** {@inheritDoc} */ @Override public void cleanupRestoredCaches() { if (dataRegionMap.isEmpty()) return; boolean hasMvccCache = false; for (CacheGroupDescriptor grpDesc : cctx.cache().cacheGroupDescriptors().values()) { hasMvccCache |= grpDesc.config().getAtomicityMode() == TRANSACTIONAL_SNAPSHOT; String regionName = grpDesc.config().getDataRegionName(); DataRegion region = regionName != null ? dataRegionMap.get(regionName) : dfltDataRegion; if (region == null) continue; if (log.isInfoEnabled()) log.info("Page memory " + region.config().getName() + " for " + grpDesc + " has invalidated."); int partitions = grpDesc.config().getAffinity().partitions(); if (region.pageMemory() instanceof PageMemoryEx) { PageMemoryEx memEx = (PageMemoryEx)region.pageMemory(); for (int partId = 0; partId < partitions; partId++) memEx.invalidate(grpDesc.groupId(), partId); memEx.invalidate(grpDesc.groupId(), PageIdAllocator.INDEX_PARTITION); } } if (!hasMvccCache && dataRegionMap.containsKey(TxLog.TX_LOG_CACHE_NAME)) { PageMemory memory = dataRegionMap.get(TxLog.TX_LOG_CACHE_NAME).pageMemory(); if (memory instanceof PageMemoryEx) ((PageMemoryEx)memory).invalidate(TxLog.TX_LOG_CACHE_ID, PageIdAllocator.INDEX_PARTITION); } final boolean hasMvccCache0 = hasMvccCache; storeMgr.cleanupPageStoreIfMatch( new Predicate<Integer>() { @Override public boolean test(Integer grpId) { return MetaStorage.METASTORAGE_CACHE_ID != grpId && (TxLog.TX_LOG_CACHE_ID != grpId || !hasMvccCache0); } }, true); } /** {@inheritDoc} */ @Override public void cleanupCheckpointDirectory() throws IgniteCheckedException { if (cpHistory != null) cpHistory = new CheckpointHistory(cctx.kernalContext()); try { try (DirectoryStream<Path> files = Files.newDirectoryStream(cpDir.toPath())) { for (Path path : files) Files.delete(path); } } catch (IOException e) { throw new IgniteCheckedException("Failed to cleanup checkpoint directory: " + cpDir, e); } } /** * @param preLocked Pre-locked file lock holder. */ private void acquireFileLock(FileLockHolder preLocked) throws IgniteCheckedException { if (cctx.kernalContext().clientNode()) return; fileLockHolder = preLocked == null ? new FileLockHolder(storeMgr.workDir().getPath(), cctx.kernalContext(), log) : preLocked; if (!fileLockHolder.isLocked()) { if (log.isDebugEnabled()) log.debug("Try to capture file lock [nodeId=" + cctx.localNodeId() + " path=" + fileLockHolder.lockPath() + "]"); fileLockHolder.tryLock(lockWaitTime); } } /** * */ private void releaseFileLock() { if (cctx.kernalContext().clientNode() || fileLockHolder == null) return; if (log.isDebugEnabled()) log.debug("Release file lock [nodeId=" + cctx.localNodeId() + " path=" + fileLockHolder.lockPath() + "]"); fileLockHolder.close(); } /** * Retreives checkpoint history form specified {@code dir}. * * @return List of checkpoints. */ private List<CheckpointEntry> retreiveHistory() throws IgniteCheckedException { if (!cpDir.exists()) return Collections.emptyList(); try (DirectoryStream<Path> cpFiles = Files.newDirectoryStream( cpDir.toPath(), path -> CP_FILE_NAME_PATTERN.matcher(path.toFile().getName()).matches()) ) { List<CheckpointEntry> checkpoints = new ArrayList<>(); ByteBuffer buf = ByteBuffer.allocate(FileWALPointer.POINTER_SIZE); buf.order(ByteOrder.nativeOrder()); for (Path cpFile : cpFiles) { CheckpointEntry cp = parseFromFile(buf, cpFile.toFile()); if (cp != null) checkpoints.add(cp); } return checkpoints; } catch (IOException e) { throw new IgniteCheckedException("Failed to load checkpoint history.", e); } } /** * Parses checkpoint entry from given file. * * @param buf Temporary byte buffer. * @param file Checkpoint file. */ @Nullable private CheckpointEntry parseFromFile(ByteBuffer buf, File file) throws IgniteCheckedException { Matcher matcher = CP_FILE_NAME_PATTERN.matcher(file.getName()); if (!matcher.matches()) return null; CheckpointEntryType type = CheckpointEntryType.valueOf(matcher.group(3)); if (type != CheckpointEntryType.START) return null; long cpTs = Long.parseLong(matcher.group(1)); UUID cpId = UUID.fromString(matcher.group(2)); WALPointer ptr = readPointer(file, buf); return createCheckPointEntry(cpTs, ptr, cpId, null, CheckpointEntryType.START); } /** * Removes checkpoint start/end files belongs to given {@code cpEntry}. * * @param cpEntry Checkpoint entry. * * @throws IgniteCheckedException If failed to delete. */ private void removeCheckpointFiles(CheckpointEntry cpEntry) throws IgniteCheckedException { Path startFile = new File(cpDir.getAbsolutePath(), checkpointFileName(cpEntry, CheckpointEntryType.START)).toPath(); Path endFile = new File(cpDir.getAbsolutePath(), checkpointFileName(cpEntry, CheckpointEntryType.END)).toPath(); try { if (Files.exists(startFile)) Files.delete(startFile); if (Files.exists(endFile)) Files.delete(endFile); } catch (IOException e) { throw new StorageException("Failed to delete stale checkpoint files: " + cpEntry, e); } } /** */ private void readMetastore() throws IgniteCheckedException { try { CheckpointStatus status = readCheckpointStatus(); checkpointReadLock(); try { dataRegion(METASTORE_DATA_REGION_NAME).pageMemory().start(); performBinaryMemoryRestore(status, onlyMetastorageGroup(), physicalRecords(), false); metaStorage = createMetastorage(true); applyLogicalUpdates(status, onlyMetastorageGroup(), onlyMetastorageRecords(), false); fillWalDisabledGroups(); notifyMetastorageReadyForRead(); } finally { metaStorage = null; dataRegion(METASTORE_DATA_REGION_NAME).pageMemory().stop(false); cctx.pageStore().cleanupPageStoreIfMatch(new Predicate<Integer>() { @Override public boolean test(Integer grpId) { return MetaStorage.METASTORAGE_CACHE_ID == grpId; } }, false); checkpointReadUnlock(); } } catch (StorageException e) { cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw new IgniteCheckedException(e); } } /** {@inheritDoc} */ @Override public void onActivate(GridKernalContext ctx) throws IgniteCheckedException { if (log.isDebugEnabled()) log.debug("Activate database manager [id=" + cctx.localNodeId() + " topVer=" + cctx.discovery().topologyVersionEx() + " ]"); snapshotMgr = cctx.snapshot(); if (!cctx.kernalContext().clientNode() && checkpointer == null) checkpointer = new Checkpointer(cctx.igniteInstanceName(), "db-checkpoint-thread", log); super.onActivate(ctx); if (!cctx.kernalContext().clientNode()) { initializeCheckpointPool(); finishRecovery(); } } /** {@inheritDoc} */ @Override public void onDeActivate(GridKernalContext kctx) { if (log.isDebugEnabled()) log.debug("DeActivate database manager [id=" + cctx.localNodeId() + " topVer=" + cctx.discovery().topologyVersionEx() + " ]"); onKernalStop0(false); super.onDeActivate(kctx); /* Must be here, because after deactivate we can invoke activate and file lock must be already configured */ stopping = false; } /** * */ private void initializeCheckpointPool() { if (persistenceCfg.getCheckpointThreads() > 1) asyncRunner = new IgniteThreadPoolExecutor( CHECKPOINT_RUNNER_THREAD_PREFIX, cctx.igniteInstanceName(), persistenceCfg.getCheckpointThreads(), persistenceCfg.getCheckpointThreads(), 30_000, new LinkedBlockingQueue<Runnable>() ); } /** {@inheritDoc} */ @Override protected void registerMetricsMBeans(IgniteConfiguration cfg) { super.registerMetricsMBeans(cfg); registerMetricsMBean( cctx.kernalContext().config(), MBEAN_GROUP, MBEAN_NAME, persStoreMetrics, DataStorageMetricsMXBean.class ); } /** {@inheritDoc} */ @Deprecated @Override protected IgniteOutClosure<Long> freeSpaceProvider(final DataRegionConfiguration dataRegCfg) { if (!dataRegCfg.isPersistenceEnabled()) return super.freeSpaceProvider(dataRegCfg); final String dataRegName = dataRegCfg.getName(); return new IgniteOutClosure<Long>() { @Override public Long apply() { long freeSpace = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; freeSpace += ((GridCacheOffheapManager)grpCtx.offheap()).freeSpace(); } return freeSpace; } }; } /** {@inheritDoc} */ @Override protected DataRegionMetricsProvider dataRegionMetricsProvider(final DataRegionConfiguration dataRegCfg) { if (!dataRegCfg.isPersistenceEnabled()) return super.dataRegionMetricsProvider(dataRegCfg); final String dataRegName = dataRegCfg.getName(); return new DataRegionMetricsProvider() { @Override public long partiallyFilledPagesFreeSpace() { long freeSpace = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; freeSpace += ((GridCacheOffheapManager)grpCtx.offheap()).freeSpace(); } return freeSpace; } @Override public long emptyDataPages() { long emptyDataPages = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; emptyDataPages += ((GridCacheOffheapManager)grpCtx.offheap()).emptyDataPages(); } return emptyDataPages; } }; } /** * Restores last valid WAL pointer and resumes logging from that pointer. * Re-creates metastorage if needed. * * @throws IgniteCheckedException If failed. */ private void finishRecovery() throws IgniteCheckedException { assert !cctx.kernalContext().clientNode(); long time = System.currentTimeMillis(); checkpointReadLock(); try { for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.beforeResumeWalLogging(this); // Try to resume logging since last finished checkpoint if possible. if (walTail == null) { CheckpointStatus status = readCheckpointStatus(); walTail = CheckpointStatus.NULL_PTR.equals(status.endPtr) ? null : status.endPtr; } cctx.wal().resumeLogging(walTail); walTail = null; // Recreate metastorage to refresh page memory state after deactivation. if (metaStorage == null) metaStorage = createMetastorage(false); notifyMetastorageReadyForReadWrite(); U.log(log, "Finish recovery performed in " + (System.currentTimeMillis() - time) + " ms."); } catch (IgniteCheckedException e) { if (X.hasCause(e, StorageException.class, IOException.class)) cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw e; } finally { checkpointReadUnlock(); } } /** * @param readOnly Metastorage read-only mode. * @return Instance of Metastorage. * @throws IgniteCheckedException If failed to create metastorage. */ private MetaStorage createMetastorage(boolean readOnly) throws IgniteCheckedException { cctx.pageStore().initializeForMetastorage(); MetaStorage storage = new MetaStorage( cctx, dataRegion(METASTORE_DATA_REGION_NAME), (DataRegionMetricsImpl) memMetricsMap.get(METASTORE_DATA_REGION_NAME), readOnly ); storage.init(this); return storage; } /** * @param cacheGroupsPredicate Cache groups to restore. * @param recordTypePredicate Filter records by type. * @return Last seen WAL pointer during binary memory recovery. * @throws IgniteCheckedException If failed. */ private RestoreBinaryState restoreBinaryMemory( IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate ) throws IgniteCheckedException { long time = System.currentTimeMillis(); try { log.info("Starting binary memory restore for: " + cctx.cache().cacheGroupDescriptors().keySet()); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.beforeBinaryMemoryRestore(this); CheckpointStatus status = readCheckpointStatus(); // First, bring memory to the last consistent checkpoint state if needed. // This method should return a pointer to the last valid record in the WAL. RestoreBinaryState binaryState = performBinaryMemoryRestore( status, cacheGroupsPredicate, recordTypePredicate, true ); WALPointer restored = binaryState.lastReadRecordPointer().map(FileWALPointer::next).orElse(null); if (restored == null && !status.endPtr.equals(CheckpointStatus.NULL_PTR)) { throw new StorageException("The memory cannot be restored. The critical part of WAL archive is missing " + "[tailWalPtr=" + restored + ", endPtr=" + status.endPtr + ']'); } else if (restored != null) U.log(log, "Binary memory state restored at node startup [restoredPtr=" + restored + ']'); // Wal logging is now available. cctx.wal().resumeLogging(restored); // Log MemoryRecoveryRecord to make sure that old physical records are not replayed during // next physical recovery. memoryRecoveryRecordPtr = cctx.wal().log(new MemoryRecoveryRecord(U.currentTimeMillis())); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.afterBinaryMemoryRestore(this, binaryState); if (log.isInfoEnabled()) log.info("Binary recovery performed in " + (System.currentTimeMillis() - time) + " ms."); return binaryState; } catch (IgniteCheckedException e) { if (X.hasCause(e, StorageException.class, IOException.class)) cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw e; } } /** {@inheritDoc} */ @Override protected void onKernalStop0(boolean cancel) { checkpointLock.writeLock().lock(); try { stopping = true; } finally { checkpointLock.writeLock().unlock(); } shutdownCheckpointer(cancel); lsnrs.clear(); super.onKernalStop0(cancel); unregisterMetricsMBean( cctx.gridConfig(), MBEAN_GROUP, MBEAN_NAME ); metaStorage = null; } /** {@inheritDoc} */ @Override protected void stop0(boolean cancel) { super.stop0(cancel); releaseFileLock(); } /** */ private long[] calculateFragmentSizes(int concLvl, long cacheSize, long chpBufSize) { if (concLvl < 2) concLvl = Runtime.getRuntime().availableProcessors(); long fragmentSize = cacheSize / concLvl; if (fragmentSize < 1024 * 1024) fragmentSize = 1024 * 1024; long[] sizes = new long[concLvl + 1]; for (int i = 0; i < concLvl; i++) sizes[i] = fragmentSize; sizes[concLvl] = chpBufSize; return sizes; } /** {@inheritDoc} */ @Override protected PageMemory createPageMemory( DirectMemoryProvider memProvider, DataStorageConfiguration memCfg, DataRegionConfiguration plcCfg, DataRegionMetricsImpl memMetrics, final boolean trackable ) { if (!plcCfg.isPersistenceEnabled()) return super.createPageMemory(memProvider, memCfg, plcCfg, memMetrics, trackable); memMetrics.persistenceEnabled(true); long cacheSize = plcCfg.getMaxSize(); // Checkpoint buffer size can not be greater than cache size, it does not make sense. long chpBufSize = checkpointBufferSize(plcCfg); if (chpBufSize > cacheSize) { U.quietAndInfo(log, "Configured checkpoint page buffer size is too big, setting to the max region size [size=" + U.readableSize(cacheSize, false) + ", memPlc=" + plcCfg.getName() + ']'); chpBufSize = cacheSize; } GridInClosure3X<Long, FullPageId, PageMemoryEx> changeTracker; if (trackable) changeTracker = new GridInClosure3X<Long, FullPageId, PageMemoryEx>() { @Override public void applyx( Long page, FullPageId fullId, PageMemoryEx pageMem ) throws IgniteCheckedException { if (trackable) snapshotMgr.onChangeTrackerPage(page, fullId, pageMem); } }; else changeTracker = null; PageMemoryImpl pageMem = new PageMemoryImpl( wrapMetricsMemoryProvider(memProvider, memMetrics), calculateFragmentSizes( memCfg.getConcurrencyLevel(), cacheSize, chpBufSize ), cctx, memCfg.getPageSize(), (fullId, pageBuf, tag) -> { memMetrics.onPageWritten(); // We can write only page from disk into snapshot. snapshotMgr.beforePageWrite(fullId); // Write page to disk. storeMgr.write(fullId.groupId(), fullId.pageId(), pageBuf, tag); AtomicInteger cntr = evictedPagesCntr; if (cntr != null) cntr.incrementAndGet(); }, changeTracker, this, memMetrics, resolveThrottlingPolicy(), this ); memMetrics.pageMemory(pageMem); return pageMem; } /** * @param memoryProvider0 Memory provider. * @param memMetrics Memory metrics. * @return Wrapped memory provider. */ @Override protected DirectMemoryProvider wrapMetricsMemoryProvider( final DirectMemoryProvider memoryProvider0, final DataRegionMetricsImpl memMetrics ) { return new DirectMemoryProvider() { private AtomicInteger checkPointBufferIdxCnt = new AtomicInteger(); private final DirectMemoryProvider memProvider = memoryProvider0; @Override public void initialize(long[] chunkSizes) { memProvider.initialize(chunkSizes); checkPointBufferIdxCnt.set(chunkSizes.length); } @Override public void shutdown(boolean deallocate) { memProvider.shutdown(deallocate); } @Override public DirectMemoryRegion nextRegion() { DirectMemoryRegion nextMemoryRegion = memProvider.nextRegion(); if (nextMemoryRegion == null) return null; int idx = checkPointBufferIdxCnt.decrementAndGet(); long chunkSize = nextMemoryRegion.size(); // Checkpoint chunk last in the long[] chunkSizes. if (idx != 0) memMetrics.updateOffHeapSize(chunkSize); else memMetrics.updateCheckpointBufferSize(chunkSize); return nextMemoryRegion; } }; } /** * Resolves throttling policy according to the settings. */ @NotNull private PageMemoryImpl.ThrottlingPolicy resolveThrottlingPolicy() { PageMemoryImpl.ThrottlingPolicy plc = persistenceCfg.isWriteThrottlingEnabled() ? PageMemoryImpl.ThrottlingPolicy.SPEED_BASED : PageMemoryImpl.ThrottlingPolicy.CHECKPOINT_BUFFER_ONLY; if (throttlingPolicyOverride != null) { try { plc = PageMemoryImpl.ThrottlingPolicy.valueOf(throttlingPolicyOverride.toUpperCase()); } catch (IllegalArgumentException e) { log.error("Incorrect value of IGNITE_OVERRIDE_WRITE_THROTTLING_ENABLED property. " + "The default throttling policy will be used [plc=" + throttlingPolicyOverride + ", defaultPlc=" + plc + ']'); } } return plc; } /** {@inheritDoc} */ @Override protected void checkRegionEvictionProperties(DataRegionConfiguration regCfg, DataStorageConfiguration dbCfg) throws IgniteCheckedException { if (!regCfg.isPersistenceEnabled()) super.checkRegionEvictionProperties(regCfg, dbCfg); else if (regCfg.getPageEvictionMode() != DataPageEvictionMode.DISABLED) { U.warn(log, "Page eviction mode will have no effect because the oldest pages are evicted automatically " + "if Ignite persistence is enabled: " + regCfg.getName()); } } /** {@inheritDoc} */ @Override protected void checkPageSize(DataStorageConfiguration memCfg) { if (memCfg.getPageSize() == 0) { try { assert cctx.pageStore() instanceof FilePageStoreManager : "Invalid page store manager was created: " + cctx.pageStore(); Path anyIdxPartFile = IgniteUtils.searchFileRecursively( ((FilePageStoreManager)cctx.pageStore()).workDir().toPath(), FilePageStoreManager.INDEX_FILE_NAME); if (anyIdxPartFile != null) { memCfg.setPageSize(resolvePageSizeFromPartitionFile(anyIdxPartFile)); return; } } catch (IgniteCheckedException | IOException | IllegalArgumentException e) { U.quietAndWarn(log, "Attempt to resolve pageSize from store files failed: " + e.getMessage()); U.quietAndWarn(log, "Default page size will be used: " + DataStorageConfiguration.DFLT_PAGE_SIZE + " bytes"); } memCfg.setPageSize(DataStorageConfiguration.DFLT_PAGE_SIZE); } } /** * @param partFile Partition file. */ private int resolvePageSizeFromPartitionFile(Path partFile) throws IOException, IgniteCheckedException { try (FileIO fileIO = ioFactory.create(partFile.toFile())) { int minimalHdr = FilePageStore.HEADER_SIZE; if (fileIO.size() < minimalHdr) throw new IgniteCheckedException("Partition file is too small: " + partFile); ByteBuffer hdr = ByteBuffer.allocate(minimalHdr).order(ByteOrder.LITTLE_ENDIAN); fileIO.readFully(hdr); hdr.rewind(); hdr.getLong(); // Read signature. hdr.getInt(); // Read version. hdr.get(); // Read type. int pageSize = hdr.getInt(); if (pageSize == 2048) { U.quietAndWarn(log, "You are currently using persistent store with 2K pages (DataStorageConfiguration#" + "pageSize). If you use SSD disk, consider migrating to 4K pages for better IO performance."); } return pageSize; } } /** * @param cancel Cancel flag. */ @SuppressWarnings("unused") private void shutdownCheckpointer(boolean cancel) { Checkpointer cp = checkpointer; if (cp != null) { if (cancel) cp.shutdownNow(); else cp.cancel(); try { U.join(cp); checkpointer = null; } catch (IgniteInterruptedCheckedException ignore) { U.warn(log, "Was interrupted while waiting for checkpointer shutdown, " + "will not wait for checkpoint to finish."); cp.shutdownNow(); while (true) { try { U.join(cp); checkpointer = null; cp.scheduledCp.cpFinishFut.onDone( new NodeStoppingException("Checkpointer is stopped during node stop.")); break; } catch (IgniteInterruptedCheckedException ignored) { //Ignore } } Thread.currentThread().interrupt(); } } if (asyncRunner != null) { asyncRunner.shutdownNow(); try { asyncRunner.awaitTermination(2, TimeUnit.MINUTES); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); } } } /** {@inheritDoc} */ @Override public void beforeExchange(GridDhtPartitionsExchangeFuture fut) throws IgniteCheckedException { // Try to restore partition states. if (fut.localJoinExchange() || fut.activateCluster() || (fut.exchangeActions() != null && !F.isEmpty(fut.exchangeActions().cacheGroupsToStart()))) { U.doInParallel( cctx.kernalContext().getSystemExecutorService(), cctx.cache().cacheGroups(), cacheGroup -> { if (cacheGroup.isLocal()) return null; cctx.database().checkpointReadLock(); try { cacheGroup.offheap().restorePartitionStates(Collections.emptyMap()); if (cacheGroup.localStartVersion().equals(fut.initialVersion())) cacheGroup.topology().afterStateRestored(fut.initialVersion()); fut.timeBag().finishLocalStage("Restore partition states " + "[grp=" + cacheGroup.cacheOrGroupName() + "]"); } finally { cctx.database().checkpointReadUnlock(); } return null; } ); fut.timeBag().finishGlobalStage("Restore partition states"); } if (cctx.kernalContext().query().moduleEnabled()) { ExchangeActions acts = fut.exchangeActions(); if (acts != null) { if (!F.isEmpty(acts.cacheStartRequests())) { for (ExchangeActions.CacheActionData actionData : acts.cacheStartRequests()) prepareIndexRebuildFuture(CU.cacheId(actionData.request().cacheName())); } else if (acts.localJoinContext() != null && !F.isEmpty(acts.localJoinContext().caches())) { for (T2<DynamicCacheDescriptor, NearCacheConfiguration> tup : acts.localJoinContext().caches()) prepareIndexRebuildFuture(tup.get1().cacheId()); } } } } /** * Creates a new index rebuild future that should be completed later after exchange is done. The future * has to be created before exchange is initialized to guarantee that we will capture a correct future * after activation or restore completes. * If there was an old future for the given ID, it will be completed. * * @param cacheId Cache ID. */ private void prepareIndexRebuildFuture(int cacheId) { GridFutureAdapter<Void> old = idxRebuildFuts.put(cacheId, new GridFutureAdapter<>()); if (old != null) old.onDone(); } /** {@inheritDoc} */ @Override public void rebuildIndexesIfNeeded(GridDhtPartitionsExchangeFuture fut) { GridQueryProcessor qryProc = cctx.kernalContext().query(); if (qryProc.moduleEnabled()) { for (final GridCacheContext cacheCtx : (Collection<GridCacheContext>)cctx.cacheContexts()) { if (cacheCtx.startTopologyVersion().equals(fut.initialVersion())) { final int cacheId = cacheCtx.cacheId(); final GridFutureAdapter<Void> usrFut = idxRebuildFuts.get(cacheId); IgniteInternalFuture<?> rebuildFut = qryProc.rebuildIndexesFromHash(cacheCtx); if (rebuildFut != null) { log().info("Started indexes rebuilding for cache [name=" + cacheCtx.name() + ", grpName=" + cacheCtx.group().name() + ']'); assert usrFut != null : "Missing user future for cache: " + cacheCtx.name(); rebuildFut.listen(new CI1<IgniteInternalFuture>() { @Override public void apply(IgniteInternalFuture fut) { idxRebuildFuts.remove(cacheId, usrFut); Throwable err = fut.error(); usrFut.onDone(err); CacheConfiguration ccfg = cacheCtx.config(); if (ccfg != null) { if (err == null) log().info("Finished indexes rebuilding for cache [name=" + ccfg.getName() + ", grpName=" + ccfg.getGroupName() + ']'); else { if (!(err instanceof NodeStoppingException)) log().error("Failed to rebuild indexes for cache [name=" + ccfg.getName() + ", grpName=" + ccfg.getGroupName() + ']', err); } } } }); } else { if (usrFut != null) { idxRebuildFuts.remove(cacheId, usrFut); usrFut.onDone(); } } } } } } /** {@inheritDoc} */ @Nullable @Override public IgniteInternalFuture indexRebuildFuture(int cacheId) { return idxRebuildFuts.get(cacheId); } /** {@inheritDoc} */ @Override public void onCacheGroupsStopped( Collection<IgniteBiTuple<CacheGroupContext, Boolean>> stoppedGrps ) { Map<PageMemoryEx, Collection<Integer>> destroyed = new HashMap<>(); for (IgniteBiTuple<CacheGroupContext, Boolean> tup : stoppedGrps) { CacheGroupContext gctx = tup.get1(); if (!gctx.persistenceEnabled()) continue; snapshotMgr.onCacheGroupStop(gctx, tup.get2()); PageMemoryEx pageMem = (PageMemoryEx)gctx.dataRegion().pageMemory(); Collection<Integer> grpIds = destroyed.computeIfAbsent(pageMem, k -> new HashSet<>()); grpIds.add(tup.get1().groupId()); pageMem.onCacheGroupDestroyed(tup.get1().groupId()); if (tup.get2()) cctx.kernalContext().encryption().onCacheGroupDestroyed(gctx.groupId()); } Collection<IgniteInternalFuture<Void>> clearFuts = new ArrayList<>(destroyed.size()); for (Map.Entry<PageMemoryEx, Collection<Integer>> entry : destroyed.entrySet()) { final Collection<Integer> grpIds = entry.getValue(); clearFuts.add(entry.getKey().clearAsync((grpId, pageIdg) -> grpIds.contains(grpId), false)); } for (IgniteInternalFuture<Void> clearFut : clearFuts) { try { clearFut.get(); } catch (IgniteCheckedException e) { log.error("Failed to clear page memory", e); } } if (cctx.pageStore() != null) { for (IgniteBiTuple<CacheGroupContext, Boolean> tup : stoppedGrps) { CacheGroupContext grp = tup.get1(); try { cctx.pageStore().shutdownForCacheGroup(grp, tup.get2()); } catch (IgniteCheckedException e) { U.error(log, "Failed to gracefully clean page store resources for destroyed cache " + "[cache=" + grp.cacheOrGroupName() + "]", e); } } } } /** * Gets the checkpoint read lock. While this lock is held, checkpoint thread will not acquireSnapshotWorker memory * state. * @throws IgniteException If failed. */ @Override public void checkpointReadLock() { if (checkpointLock.writeLock().isHeldByCurrentThread()) return; long timeout = checkpointReadLockTimeout; long start = U.currentTimeMillis(); boolean interruped = false; try { for (; ; ) { try { if (timeout > 0 && (U.currentTimeMillis() - start) >= timeout) failCheckpointReadLock(); try { if (timeout > 0) { if (!checkpointLock.readLock().tryLock(timeout - (U.currentTimeMillis() - start), TimeUnit.MILLISECONDS)) failCheckpointReadLock(); } else checkpointLock.readLock().lock(); } catch (InterruptedException e) { interruped = true; continue; } if (stopping) { checkpointLock.readLock().unlock(); throw new IgniteException(new NodeStoppingException("Failed to perform cache update: node is stopping.")); } if (checkpointLock.getReadHoldCount() > 1 || safeToUpdatePageMemories()) break; else { checkpointLock.readLock().unlock(); if (timeout > 0 && U.currentTimeMillis() - start >= timeout) failCheckpointReadLock(); try { checkpointer.wakeupForCheckpoint(0, "too many dirty pages").cpBeginFut .getUninterruptibly(); } catch (IgniteFutureTimeoutCheckedException e) { failCheckpointReadLock(); } catch (IgniteCheckedException e) { throw new IgniteException("Failed to wait for checkpoint begin.", e); } } } catch (CheckpointReadLockTimeoutException e) { log.error(e.getMessage(), e); timeout = 0; } } } finally { if (interruped) Thread.currentThread().interrupt(); } if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() + 1); } /** * Invokes critical failure processing. Always throws. * * @throws CheckpointReadLockTimeoutException If node was not invalidated as result of handling. * @throws IgniteException If node was invalidated as result of handling. */ private void failCheckpointReadLock() throws CheckpointReadLockTimeoutException, IgniteException { String msg = "Checkpoint read lock acquisition has been timed out."; IgniteException e = new IgniteException(msg); if (cctx.kernalContext().failure().process(new FailureContext(SYSTEM_CRITICAL_OPERATION_TIMEOUT, e))) throw e; throw new CheckpointReadLockTimeoutException(msg); } /** {@inheritDoc} */ @Override public boolean checkpointLockIsHeldByThread() { return !ASSERTION_ENABLED || checkpointLock.isWriteLockedByCurrentThread() || CHECKPOINT_LOCK_HOLD_COUNT.get() > 0 || Thread.currentThread().getName().startsWith(CHECKPOINT_RUNNER_THREAD_PREFIX); } /** * @return {@code true} if all PageMemory instances are safe to update. */ private boolean safeToUpdatePageMemories() { Collection<DataRegion> memPlcs = context().database().dataRegions(); if (memPlcs == null) return true; for (DataRegion memPlc : memPlcs) { if (!memPlc.config().isPersistenceEnabled()) continue; PageMemoryEx pageMemEx = (PageMemoryEx)memPlc.pageMemory(); if (!pageMemEx.safeToUpdate()) return false; } return true; } /** * Releases the checkpoint read lock. */ @Override public void checkpointReadUnlock() { if (checkpointLock.writeLock().isHeldByCurrentThread()) return; checkpointLock.readLock().unlock(); if (checkpointer != null) { Collection<DataRegion> dataRegs = context().database().dataRegions(); if (dataRegs != null) { for (DataRegion dataReg : dataRegs) { if (!dataReg.config().isPersistenceEnabled()) continue; PageMemoryEx mem = (PageMemoryEx)dataReg.pageMemory(); if (mem != null && !mem.safeToUpdate()) { checkpointer.wakeupForCheckpoint(0, "too many dirty pages"); break; } } } } if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() - 1); } /** {@inheritDoc} */ @Override public synchronized Map<Integer, Map<Integer, Long>> reserveHistoryForExchange() { assert reservedForExchange == null : reservedForExchange; reservedForExchange = new HashMap<>(); Map</*grpId*/Integer, Set</*partId*/Integer>> applicableGroupsAndPartitions = partitionsApplicableForWalRebalance(); Map</*grpId*/Integer, Map</*partId*/Integer, CheckpointEntry>> earliestValidCheckpoints; checkpointReadLock(); try { earliestValidCheckpoints = cpHistory.searchAndReserveCheckpoints(applicableGroupsAndPartitions); } finally { checkpointReadUnlock(); } Map</*grpId*/Integer, Map</*partId*/Integer, /*updCntr*/Long>> grpPartsWithCnts = new HashMap<>(); for (Map.Entry<Integer, Map<Integer, CheckpointEntry>> e : earliestValidCheckpoints.entrySet()) { int grpId = e.getKey(); for (Map.Entry<Integer, CheckpointEntry> e0 : e.getValue().entrySet()) { CheckpointEntry cpEntry = e0.getValue(); int partId = e0.getKey(); assert cctx.wal().reserved(cpEntry.checkpointMark()) : "WAL segment for checkpoint " + cpEntry + " has not reserved"; Long updCntr = cpEntry.partitionCounter(cctx, grpId, partId); if (updCntr != null) { reservedForExchange.computeIfAbsent(grpId, k -> new HashMap<>()) .put(partId, new T2<>(updCntr, cpEntry.checkpointMark())); grpPartsWithCnts.computeIfAbsent(grpId, k -> new HashMap<>()).put(partId, updCntr); } } } return grpPartsWithCnts; } /** * @return Map of group id -> Set of partitions which can be used as suppliers for WAL rebalance. */ private Map<Integer, Set<Integer>> partitionsApplicableForWalRebalance() { Map<Integer, Set<Integer>> res = new HashMap<>(); for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal()) continue; for (GridDhtLocalPartition locPart : grp.topology().currentLocalPartitions()) { if (locPart.state() == GridDhtPartitionState.OWNING && locPart.fullSize() > walRebalanceThreshold) res.computeIfAbsent(grp.groupId(), k -> new HashSet<>()).add(locPart.id()); } } return res; } /** {@inheritDoc} */ @Override public synchronized void releaseHistoryForExchange() { if (reservedForExchange == null) return; FileWALPointer earliestPtr = null; for (Map.Entry<Integer, Map<Integer, T2<Long, WALPointer>>> e : reservedForExchange.entrySet()) { for (Map.Entry<Integer, T2<Long, WALPointer>> e0 : e.getValue().entrySet()) { FileWALPointer ptr = (FileWALPointer) e0.getValue().get2(); if (earliestPtr == null || ptr.index() < earliestPtr.index()) earliestPtr = ptr; } } reservedForExchange = null; if (earliestPtr == null) return; assert cctx.wal().reserved(earliestPtr) : "Earliest checkpoint WAL pointer is not reserved for exchange: " + earliestPtr; try { cctx.wal().release(earliestPtr); } catch (IgniteCheckedException e) { log.error("Failed to release earliest checkpoint WAL pointer: " + earliestPtr, e); } } /** {@inheritDoc} */ @Override public boolean reserveHistoryForPreloading(int grpId, int partId, long cntr) { CheckpointEntry cpEntry = cpHistory.searchCheckpointEntry(grpId, partId, cntr); if (cpEntry == null) return false; WALPointer ptr = cpEntry.checkpointMark(); if (ptr == null) return false; boolean reserved = cctx.wal().reserve(ptr); if (reserved) reservedForPreloading.put(new T2<>(grpId, partId), new T2<>(cntr, ptr)); return reserved; } /** {@inheritDoc} */ @Override public void releaseHistoryForPreloading() { for (Map.Entry<T2<Integer, Integer>, T2<Long, WALPointer>> e : reservedForPreloading.entrySet()) { try { cctx.wal().release(e.getValue().get2()); } catch (IgniteCheckedException ex) { U.error(log, "Could not release WAL reservation", ex); throw new IgniteException(ex); } } reservedForPreloading.clear(); } /** * */ @Nullable @Override public IgniteInternalFuture wakeupForCheckpoint(String reason) { Checkpointer cp = checkpointer; if (cp != null) return cp.wakeupForCheckpoint(0, reason).cpBeginFut; return null; } /** {@inheritDoc} */ @Override public void waitForCheckpoint(String reason) throws IgniteCheckedException { Checkpointer cp = checkpointer; if (cp == null) return; CheckpointProgressSnapshot progSnapshot = cp.wakeupForCheckpoint(0, reason); IgniteInternalFuture fut1 = progSnapshot.cpFinishFut; fut1.get(); if (!progSnapshot.started) return; IgniteInternalFuture fut2 = cp.wakeupForCheckpoint(0, reason).cpFinishFut; assert fut1 != fut2; fut2.get(); } /** {@inheritDoc} */ @Override public CheckpointFuture forceCheckpoint(String reason) { Checkpointer cp = checkpointer; if (cp == null) return null; return cp.wakeupForCheckpoint(0, reason); } /** {@inheritDoc} */ @Override public WALPointer lastCheckpointMarkWalPointer() { CheckpointEntry lastCheckpointEntry = cpHistory == null ? null : cpHistory.lastCheckpoint(); return lastCheckpointEntry == null ? null : lastCheckpointEntry.checkpointMark(); } /** * @return Checkpoint directory. */ public File checkpointDirectory() { return cpDir; } /** * @param lsnr Listener. */ public void addCheckpointListener(DbCheckpointListener lsnr) { lsnrs.add(lsnr); } /** * @param lsnr Listener. */ public void removeCheckpointListener(DbCheckpointListener lsnr) { lsnrs.remove(lsnr); } /** * @return Read checkpoint status. * @throws IgniteCheckedException If failed to read checkpoint status page. */ @SuppressWarnings("TooBroadScope") private CheckpointStatus readCheckpointStatus() throws IgniteCheckedException { long lastStartTs = 0; long lastEndTs = 0; UUID startId = CheckpointStatus.NULL_UUID; UUID endId = CheckpointStatus.NULL_UUID; File startFile = null; File endFile = null; WALPointer startPtr = CheckpointStatus.NULL_PTR; WALPointer endPtr = CheckpointStatus.NULL_PTR; File dir = cpDir; if (!dir.exists()) { log.warning("Read checkpoint status: checkpoint directory is not found."); return new CheckpointStatus(0, startId, startPtr, endId, endPtr); } File[] files = dir.listFiles(); for (File file : files) { Matcher matcher = CP_FILE_NAME_PATTERN.matcher(file.getName()); if (matcher.matches()) { long ts = Long.parseLong(matcher.group(1)); UUID id = UUID.fromString(matcher.group(2)); CheckpointEntryType type = CheckpointEntryType.valueOf(matcher.group(3)); if (type == CheckpointEntryType.START && ts > lastStartTs) { lastStartTs = ts; startId = id; startFile = file; } else if (type == CheckpointEntryType.END && ts > lastEndTs) { lastEndTs = ts; endId = id; endFile = file; } } } ByteBuffer buf = ByteBuffer.allocate(FileWALPointer.POINTER_SIZE); buf.order(ByteOrder.nativeOrder()); if (startFile != null) startPtr = readPointer(startFile, buf); if (endFile != null) endPtr = readPointer(endFile, buf); if (log.isInfoEnabled()) log.info("Read checkpoint status [startMarker=" + startFile + ", endMarker=" + endFile + ']'); return new CheckpointStatus(lastStartTs, startId, startPtr, endId, endPtr); } /** * Loads WAL pointer from CP file * * @param cpMarkerFile Checkpoint mark file. * @return WAL pointer. * @throws IgniteCheckedException If failed to read mark file. */ private WALPointer readPointer(File cpMarkerFile, ByteBuffer buf) throws IgniteCheckedException { buf.position(0); try (FileIO io = ioFactory.create(cpMarkerFile, READ)) { io.readFully(buf); buf.flip(); return new FileWALPointer(buf.getLong(), buf.getInt(), buf.getInt()); } catch (IOException e) { throw new IgniteCheckedException( "Failed to read checkpoint pointer from marker file: " + cpMarkerFile.getAbsolutePath(), e); } } /** {@inheritDoc} */ @Override public void startMemoryRestore(GridKernalContext kctx) throws IgniteCheckedException { if (kctx.clientNode()) return; checkpointReadLock(); try { // Preform early regions startup before restoring state. initAndStartRegions(kctx.config().getDataStorageConfiguration()); // Restore binary memory for all not WAL disabled cache groups. restoreBinaryMemory( groupsWithEnabledWal(), physicalRecords() ); if (recoveryVerboseLogging && log.isInfoEnabled()) { log.info("Partition states information after BINARY RECOVERY phase:"); dumpPartitionsInfo(cctx, log); } CheckpointStatus status = readCheckpointStatus(); RestoreLogicalState logicalState = applyLogicalUpdates( status, groupsWithEnabledWal(), logicalRecords(), true ); if (recoveryVerboseLogging && log.isInfoEnabled()) { log.info("Partition states information after LOGICAL RECOVERY phase:"); dumpPartitionsInfo(cctx, log); } walTail = tailPointer(logicalState.lastReadRecordPointer().orElse(null)); cctx.wal().onDeActivate(kctx); } catch (IgniteCheckedException e) { releaseFileLock(); throw e; } finally { checkpointReadUnlock(); } } /** * @param f Consumer. * @return Accumulated result for all page stores. */ public long forAllPageStores(ToLongFunction<PageStore> f) { long res = 0; for (CacheGroupContext gctx : cctx.cache().cacheGroups()) res += forGroupPageStores(gctx, f); return res; } /** * @param grpId Cache group id. * @param partId Partition ID. * @return Page store. * @throws IgniteCheckedException If failed. */ public PageStore getPageStore(int grpId, int partId) throws IgniteCheckedException { return storeMgr.getStore(grpId, partId); } /** * @param gctx Group context. * @param f Consumer. * @return Accumulated result for all page stores. */ public long forGroupPageStores(CacheGroupContext gctx, ToLongFunction<PageStore> f) { int groupId = gctx.groupId(); long res = 0; try { Collection<PageStore> stores = storeMgr.getStores(groupId); if (stores != null) { for (PageStore store : stores) res += f.applyAsLong(store); } } catch (IgniteCheckedException e) { throw new IgniteException(e); } return res; } /** * Calculates tail pointer for WAL at the end of logical recovery. * * @param from Start replay WAL from. * @return Tail pointer. * @throws IgniteCheckedException If failed. */ private WALPointer tailPointer(WALPointer from) throws IgniteCheckedException { WALIterator it = cctx.wal().replay(from); try { while (it.hasNextX()) { IgniteBiTuple<WALPointer, WALRecord> rec = it.nextX(); if (rec == null) break; } } finally { it.close(); } return it.lastRead().map(WALPointer::next).orElse(null); } /** * Called when all partitions have been fully restored and pre-created on node start. * * Starts checkpointing process and initiates first checkpoint. * * @throws IgniteCheckedException If first checkpoint has failed. */ @Override public void onStateRestored(AffinityTopologyVersion topVer) throws IgniteCheckedException { IgniteThread cpThread = new IgniteThread(cctx.igniteInstanceName(), "db-checkpoint-thread", checkpointer); cpThread.start(); checkpointerThread = cpThread; CheckpointProgressSnapshot chp = checkpointer.wakeupForCheckpoint(0, "node started"); if (chp != null) chp.cpBeginFut.get(); } /** * @param status Checkpoint status. * @param cacheGroupsPredicate Cache groups to restore. * @throws IgniteCheckedException If failed. * @throws StorageException In case I/O error occurred during operations with storage. */ private RestoreBinaryState performBinaryMemoryRestore( CheckpointStatus status, IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate, boolean finalizeState ) throws IgniteCheckedException { if (log.isInfoEnabled()) log.info("Checking memory state [lastValidPos=" + status.endPtr + ", lastMarked=" + status.startPtr + ", lastCheckpointId=" + status.cpStartId + ']'); WALPointer recPtr = status.endPtr; boolean apply = status.needRestoreMemory(); if (apply) { if (finalizeState) U.quietAndWarn(log, "Ignite node stopped in the middle of checkpoint. Will restore memory state and " + "finish checkpoint on node start."); cctx.pageStore().beginRecover(); WALRecord rec = cctx.wal().read(status.startPtr); if (!(rec instanceof CheckpointRecord)) throw new StorageException("Checkpoint marker doesn't point to checkpoint record " + "[ptr=" + status.startPtr + ", rec=" + rec + "]"); WALPointer cpMark = ((CheckpointRecord)rec).checkpointMark(); if (cpMark != null) { log.info("Restoring checkpoint after logical recovery, will start physical recovery from " + "back pointer: " + cpMark); recPtr = cpMark; } } else cctx.wal().notchLastCheckpointPtr(status.startPtr); AtomicReference<IgniteCheckedException> applyError = new AtomicReference<>(); StripedExecutor exec = cctx.kernalContext().getStripedExecutorService(); Semaphore semaphore = new Semaphore(semaphorePertmits(exec)); long start = U.currentTimeMillis(); long lastArchivedSegment = cctx.wal().lastArchivedSegment(); WALIterator it = cctx.wal().replay(recPtr, recordTypePredicate); RestoreBinaryState restoreBinaryState = new RestoreBinaryState(status, it, lastArchivedSegment, cacheGroupsPredicate); AtomicLong applied = new AtomicLong(); try { while (it.hasNextX()) { if (applyError.get() != null) throw applyError.get(); WALRecord rec = restoreBinaryState.next(); if (rec == null) break; switch (rec.type()) { case PAGE_RECORD: if (restoreBinaryState.needApplyBinaryUpdate()) { PageSnapshot pageSnapshot = (PageSnapshot)rec; // Here we do not require tag check because we may be applying memory changes after // several repetitive restarts and the same pages may have changed several times. int groupId = pageSnapshot.fullPageId().groupId(); int partId = partId(pageSnapshot.fullPageId().pageId()); stripedApplyPage((pageMem) -> { try { applyPageSnapshot(pageMem, pageSnapshot); applied.incrementAndGet(); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page snapshot, " + pageSnapshot); applyError.compareAndSet(null, e); } }, groupId, partId, exec, semaphore ); } break; case PART_META_UPDATE_STATE: PartitionMetaStateRecord metaStateRecord = (PartitionMetaStateRecord)rec; { int groupId = metaStateRecord.groupId(); int partId = metaStateRecord.partitionId(); stripedApplyPage((pageMem) -> { GridDhtPartitionState state = fromOrdinal(metaStateRecord.state()); if (state == null || state == GridDhtPartitionState.EVICTED) schedulePartitionDestroy(groupId, partId); else { try { cancelOrWaitPartitionDestroy(groupId, partId); } catch (IgniteCheckedException e) { U.error(log, "Failed to cancel or wait partition destroy, " + metaStateRecord); applyError.compareAndSet(null, e); } } }, groupId, partId, exec, semaphore); } break; case PARTITION_DESTROY: PartitionDestroyRecord destroyRecord = (PartitionDestroyRecord)rec; { int groupId = destroyRecord.groupId(); int partId = destroyRecord.partitionId(); stripedApplyPage((pageMem) -> { pageMem.invalidate(groupId, partId); schedulePartitionDestroy(groupId, partId); }, groupId, partId, exec, semaphore); } break; default: if (restoreBinaryState.needApplyBinaryUpdate() && rec instanceof PageDeltaRecord) { PageDeltaRecord pageDelta = (PageDeltaRecord)rec; int groupId = pageDelta.groupId(); int partId = partId(pageDelta.pageId()); stripedApplyPage((pageMem) -> { try { applyPageDelta(pageMem, pageDelta); applied.incrementAndGet(); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page delta, " + pageDelta); applyError.compareAndSet(null, e); } }, groupId, partId, exec, semaphore); } } } } finally { it.close(); } awaitApplyComplete(exec, applyError); if (!finalizeState) return null; FileWALPointer lastReadPtr = restoreBinaryState.lastReadRecordPointer().orElse(null); if (status.needRestoreMemory()) { if (restoreBinaryState.needApplyBinaryUpdate()) throw new StorageException("Failed to restore memory state (checkpoint marker is present " + "on disk, but checkpoint record is missed in WAL) " + "[cpStatus=" + status + ", lastRead=" + lastReadPtr + "]"); log.info("Finished applying memory changes [changesApplied=" + applied + ", time=" + (U.currentTimeMillis() - start) + " ms]"); assert applied.get() > 0; finalizeCheckpointOnRecovery(status.cpStartTs, status.cpStartId, status.startPtr, exec); } cpHistory.initialize(retreiveHistory()); return restoreBinaryState; } /** * Calculate the maximum number of concurrent tasks for apply through the striped executor. * * @param exec Striped executor. * @return Number of permits. */ private int semaphorePertmits(StripedExecutor exec) { // 4 task per-stripe by default. int permits = exec.stripes() * 4; long maxMemory = Runtime.getRuntime().maxMemory(); // Heuristic calculation part of heap size as a maximum number of concurrent tasks. int permits0 = (int)((maxMemory * 0.2) / (4096 * 2)); // May be for small heap. Get a low number of permits. if (permits0 < permits) permits = permits0; // Property for override any calculation. return getInteger(IGNITE_RECOVERY_SEMAPHORE_PERMITS, permits); } /** * @param exec Striped executor. * @param applyError Check error reference. */ private void awaitApplyComplete( StripedExecutor exec, AtomicReference<IgniteCheckedException> applyError ) throws IgniteCheckedException { if (applyError.get() != null) throw applyError.get(); // Fail-fast check. else { try { // Await completion apply tasks in all stripes. exec.awaitComplete(); } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } // Checking error after all task applied. if (applyError.get() != null) throw applyError.get(); } } /** * @param consumer Runnable task. * @param grpId Group Id. * @param partId Partition Id. * @param exec Striped executor. */ public void stripedApplyPage( Consumer<PageMemoryEx> consumer, int grpId, int partId, StripedExecutor exec, Semaphore semaphore ) throws IgniteCheckedException { assert consumer != null; assert exec != null; assert semaphore != null; PageMemoryEx pageMem = getPageMemoryForCacheGroup(grpId); if (pageMem == null) return; stripedApply(() -> consumer.accept(pageMem), grpId, partId, exec, semaphore); } /** * @param run Runnable task. * @param grpId Group Id. * @param partId Partition Id. * @param exec Striped executor. */ public void stripedApply( Runnable run, int grpId, int partId, StripedExecutor exec, Semaphore semaphore ) { assert run != null; assert exec != null; assert semaphore != null; int stripes = exec.stripes(); int stripe = U.stripeIdx(stripes, grpId, partId); assert stripe >= 0 && stripe <= stripes : "idx=" + stripe + ", stripes=" + stripes; try { semaphore.acquire(); } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } exec.execute(stripe, () -> { // WA for avoid assert check in PageMemory, that current thread hold chpLock. CHECKPOINT_LOCK_HOLD_COUNT.set(1); try { run.run(); } finally { CHECKPOINT_LOCK_HOLD_COUNT.set(0); semaphore.release(); } }); } /** * @param pageMem Page memory. * @param pageSnapshotRecord Page snapshot record. * @throws IgniteCheckedException If failed. */ public void applyPageSnapshot(PageMemoryEx pageMem, PageSnapshot pageSnapshotRecord) throws IgniteCheckedException { int grpId = pageSnapshotRecord.fullPageId().groupId(); long pageId = pageSnapshotRecord.fullPageId().pageId(); long page = pageMem.acquirePage(grpId, pageId, IoStatisticsHolderNoOp.INSTANCE, true); try { long pageAddr = pageMem.writeLock(grpId, pageId, page, true); try { PageUtils.putBytes(pageAddr, 0, pageSnapshotRecord.pageData()); } finally { pageMem.writeUnlock(grpId, pageId, page, null, true, true); } } finally { pageMem.releasePage(grpId, pageId, page); } } /** * @param pageMem Page memory. * @param pageDeltaRecord Page delta record. * @throws IgniteCheckedException If failed. */ private void applyPageDelta(PageMemoryEx pageMem, PageDeltaRecord pageDeltaRecord) throws IgniteCheckedException { int grpId = pageDeltaRecord.groupId(); long pageId = pageDeltaRecord.pageId(); // Here we do not require tag check because we may be applying memory changes after // several repetitive restarts and the same pages may have changed several times. long page = pageMem.acquirePage(grpId, pageId, IoStatisticsHolderNoOp.INSTANCE, true); try { long pageAddr = pageMem.writeLock(grpId, pageId, page, true); try { pageDeltaRecord.applyDelta(pageMem, pageAddr); } finally { pageMem.writeUnlock(grpId, pageId, page, null, true, true); } } finally { pageMem.releasePage(grpId, pageId, page); } } /** * Obtains PageMemory reference from cache descriptor instead of cache context. * * @param grpId Cache group id. * @return PageMemoryEx instance. * @throws IgniteCheckedException if no DataRegion is configured for a name obtained from cache descriptor. */ private PageMemoryEx getPageMemoryForCacheGroup(int grpId) throws IgniteCheckedException { if (grpId == MetaStorage.METASTORAGE_CACHE_ID) return (PageMemoryEx)dataRegion(METASTORE_DATA_REGION_NAME).pageMemory(); // TODO IGNITE-7792 add generic mapping. if (grpId == TxLog.TX_LOG_CACHE_ID) return (PageMemoryEx)dataRegion(TxLog.TX_LOG_CACHE_NAME).pageMemory(); // TODO IGNITE-5075: cache descriptor can be removed. GridCacheSharedContext sharedCtx = context(); CacheGroupDescriptor desc = sharedCtx.cache().cacheGroupDescriptors().get(grpId); if (desc == null) return null; String memPlcName = desc.config().getDataRegionName(); return (PageMemoryEx)sharedCtx.database().dataRegion(memPlcName).pageMemory(); } /** * Apply update from some iterator and with specific filters. * * @param it WalIterator. * @param recPredicate Wal record filter. * @param entryPredicate Entry filter. */ public void applyUpdatesOnRecovery( @Nullable WALIterator it, IgniteBiPredicate<WALPointer, WALRecord> recPredicate, IgnitePredicate<DataEntry> entryPredicate ) throws IgniteCheckedException { if (it == null) return; cctx.walState().runWithOutWAL(() -> { while (it.hasNext()) { IgniteBiTuple<WALPointer, WALRecord> next = it.next(); WALRecord rec = next.get2(); if (!recPredicate.apply(next.get1(), rec)) break; switch (rec.type()) { case MVCC_DATA_RECORD: case DATA_RECORD: checkpointReadLock(); try { DataRecord dataRec = (DataRecord)rec; for (DataEntry dataEntry : dataRec.writeEntries()) { if (entryPredicate.apply(dataEntry)) { checkpointReadLock(); try { int cacheId = dataEntry.cacheId(); GridCacheContext cacheCtx = cctx.cacheContext(cacheId); if (cacheCtx != null) applyUpdate(cacheCtx, dataEntry); else if (log != null) log.warning("Cache is not started. Updates cannot be applied " + "[cacheId=" + cacheId + ']'); } finally { checkpointReadUnlock(); } } } } catch (IgniteCheckedException e) { throw new IgniteException(e); } finally { checkpointReadUnlock(); } break; case MVCC_TX_RECORD: checkpointReadLock(); try { MvccTxRecord txRecord = (MvccTxRecord)rec; byte txState = convertToTxState(txRecord.state()); cctx.coordinators().updateState(txRecord.mvccVersion(), txState, true); } finally { checkpointReadUnlock(); } break; default: // Skip other records. } } }); } /** * @param status Last registered checkpoint status. * @throws IgniteCheckedException If failed to apply updates. * @throws StorageException If IO exception occurred while reading write-ahead log. */ private RestoreLogicalState applyLogicalUpdates( CheckpointStatus status, IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate, boolean skipFieldLookup ) throws IgniteCheckedException { if (log.isInfoEnabled()) log.info("Applying lost cache updates since last checkpoint record [lastMarked=" + status.startPtr + ", lastCheckpointId=" + status.cpStartId + ']'); if (skipFieldLookup) cctx.kernalContext().query().skipFieldLookup(true); long start = U.currentTimeMillis(); AtomicReference<IgniteCheckedException> applyError = new AtomicReference<>(); AtomicLong applied = new AtomicLong(); long lastArchivedSegment = cctx.wal().lastArchivedSegment(); StripedExecutor exec = cctx.kernalContext().getStripedExecutorService(); Semaphore semaphore = new Semaphore(semaphorePertmits(exec)); WALIterator it = cctx.wal().replay(status.startPtr, recordTypePredicate); RestoreLogicalState restoreLogicalState = new RestoreLogicalState(it, lastArchivedSegment, cacheGroupsPredicate); try { while (it.hasNextX()) { WALRecord rec = restoreLogicalState.next(); if (rec == null) break; switch (rec.type()) { case MVCC_DATA_RECORD: case DATA_RECORD: case ENCRYPTED_DATA_RECORD: DataRecord dataRec = (DataRecord)rec; for (DataEntry dataEntry : dataRec.writeEntries()) { int cacheId = dataEntry.cacheId(); DynamicCacheDescriptor cacheDesc = cctx.cache().cacheDescriptor(cacheId); // Can empty in case recovery node on blt changed. if (cacheDesc == null) continue; stripedApply(() -> { GridCacheContext cacheCtx = cctx.cacheContext(cacheId); try { applyUpdate(cacheCtx, dataEntry); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply data entry, dataEntry=" + dataEntry + ", ptr=" + dataRec.position()); applyError.compareAndSet(null, e); } applied.incrementAndGet(); }, cacheId, dataEntry.partitionId(), exec, semaphore); } break; case MVCC_TX_RECORD: MvccTxRecord txRecord = (MvccTxRecord)rec; byte txState = convertToTxState(txRecord.state()); cctx.coordinators().updateState(txRecord.mvccVersion(), txState, true); break; case PART_META_UPDATE_STATE: PartitionMetaStateRecord metaStateRecord = (PartitionMetaStateRecord)rec; GroupPartitionId groupPartitionId = new GroupPartitionId( metaStateRecord.groupId(), metaStateRecord.partitionId() ); PartitionRecoverState state = new PartitionRecoverState( (int)metaStateRecord.state(), metaStateRecord.updateCounter() ); restoreLogicalState.partitionRecoveryStates.put(groupPartitionId, state); break; case METASTORE_DATA_RECORD: MetastoreDataRecord metastoreDataRecord = (MetastoreDataRecord)rec; metaStorage.applyUpdate(metastoreDataRecord.key(), metastoreDataRecord.value()); break; case META_PAGE_UPDATE_NEXT_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_SUCCESSFUL_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_SUCCESSFUL_FULL_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_ALLOCATED_INDEX: PageDeltaRecord pageDelta = (PageDeltaRecord)rec; stripedApplyPage((pageMem) -> { try { applyPageDelta(pageMem, pageDelta); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page delta, " + pageDelta); applyError.compareAndSet(null, e); } }, pageDelta.groupId(), partId(pageDelta.pageId()), exec, semaphore); break; default: // Skip other records. } } } finally { it.close(); if (skipFieldLookup) cctx.kernalContext().query().skipFieldLookup(false); } awaitApplyComplete(exec, applyError); if (log.isInfoEnabled()) log.info("Finished applying WAL changes [updatesApplied=" + applied + ", time=" + (U.currentTimeMillis() - start) + " ms]"); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.afterLogicalUpdatesApplied(this, restoreLogicalState); return restoreLogicalState; } /** * Convert {@link TransactionState} to Mvcc {@link TxState}. * * @param state TransactionState. * @return TxState. */ private byte convertToTxState(TransactionState state) { switch (state) { case PREPARED: return TxState.PREPARED; case COMMITTED: return TxState.COMMITTED; case ROLLED_BACK: return TxState.ABORTED; default: throw new IllegalStateException("Unsupported TxState."); } } /** * Wal truncate callBack. * * @param highBound WALPointer. */ public void onWalTruncated(WALPointer highBound) throws IgniteCheckedException { List<CheckpointEntry> removedFromHistory = cpHistory.onWalTruncated(highBound); for (CheckpointEntry cp : removedFromHistory) removeCheckpointFiles(cp); } /** * @param cacheCtx Cache context to apply an update. * @param dataEntry Data entry to apply. * @throws IgniteCheckedException If failed to restore. */ private void applyUpdate(GridCacheContext cacheCtx, DataEntry dataEntry) throws IgniteCheckedException { int partId = dataEntry.partitionId(); if (partId == -1) partId = cacheCtx.affinity().partition(dataEntry.key()); GridDhtLocalPartition locPart = cacheCtx.isLocal() ? null : cacheCtx.topology().forceCreatePartition(partId); switch (dataEntry.op()) { case CREATE: case UPDATE: if (dataEntry instanceof MvccDataEntry) { cacheCtx.offheap().mvccApplyUpdate( cacheCtx, dataEntry.key(), dataEntry.value(), dataEntry.writeVersion(), dataEntry.expireTime(), locPart, ((MvccDataEntry)dataEntry).mvccVer()); } else { cacheCtx.offheap().update( cacheCtx, dataEntry.key(), dataEntry.value(), dataEntry.writeVersion(), dataEntry.expireTime(), locPart, null); } if (dataEntry.partitionCounter() != 0) cacheCtx.offheap().onPartitionInitialCounterUpdated(partId, dataEntry.partitionCounter()); break; case DELETE: if (dataEntry instanceof MvccDataEntry) { cacheCtx.offheap().mvccApplyUpdate( cacheCtx, dataEntry.key(), null, dataEntry.writeVersion(), 0L, locPart, ((MvccDataEntry)dataEntry).mvccVer()); } else cacheCtx.offheap().remove(cacheCtx, dataEntry.key(), partId, locPart); if (dataEntry.partitionCounter() != 0) cacheCtx.offheap().onPartitionInitialCounterUpdated(partId, dataEntry.partitionCounter()); break; case READ: // do nothing break; default: throw new IgniteCheckedException("Invalid operation for WAL entry update: " + dataEntry.op()); } } /** * @throws IgniteCheckedException If failed. */ private void finalizeCheckpointOnRecovery( long cpTs, UUID cpId, WALPointer walPtr, StripedExecutor exec ) throws IgniteCheckedException { assert cpTs != 0; long start = System.currentTimeMillis(); Collection<DataRegion> regions = dataRegions(); Collection<GridMultiCollectionWrapper<FullPageId>> res = new ArrayList(regions.size()); int pagesNum = 0; // Collect collection of dirty pages from all regions. for (DataRegion memPlc : regions) { if (memPlc.config().isPersistenceEnabled()){ GridMultiCollectionWrapper<FullPageId> nextCpPagesCol = ((PageMemoryEx)memPlc.pageMemory()).beginCheckpoint(); pagesNum += nextCpPagesCol.size(); res.add(nextCpPagesCol); } } // Sort and split all dirty pages set to several stripes. GridMultiCollectionWrapper<FullPageId> pages = splitAndSortCpPagesIfNeeded( new IgniteBiTuple<>(res, pagesNum), exec.stripes()); // Identity stores set for future fsync. Collection<PageStore> updStores = new GridConcurrentHashSet<>(); AtomicInteger cpPagesCnt = new AtomicInteger(); // Shared refernce for tracking exception during write pages. AtomicReference<IgniteCheckedException> writePagesError = new AtomicReference<>(); for (int i = 0; i < pages.collectionsSize(); i++) { // Calculate stripe index. int stripeIdx = i % exec.stripes(); // Inner collection index. int innerIdx = i; exec.execute(stripeIdx, () -> { // Local buffer for write pages. ByteBuffer writePageBuf = ByteBuffer.allocateDirect(pageSize()); writePageBuf.order(ByteOrder.nativeOrder()); Collection<FullPageId> pages0 = pages.innerCollection(innerIdx); FullPageId pageId = null; try { for (FullPageId fullId : pages0) { // Fail-fast break if some exception occurred. if (writePagesError.get() != null) break; writePageBuf.rewind(); PageMemoryEx pageMem = getPageMemoryForCacheGroup(fullId.groupId()); // Write page content to writePageBuf. Integer tag = pageMem.getForCheckpoint(fullId, writePageBuf, null); assert tag == null || tag != PageMemoryImpl.TRY_AGAIN_TAG : "Lock is held by other thread for page " + fullId; if (tag != null) { writePageBuf.rewind(); // Save pageId to local variable for future using if exception occurred. pageId = fullId; // Write writePageBuf to page store. PageStore store = storeMgr.writeInternal( fullId.groupId(), fullId.pageId(), writePageBuf, tag, true); writePageBuf.rewind(); // Save store for future fsync. updStores.add(store); } } // Add number of handled pages. cpPagesCnt.addAndGet(pages0.size()); } catch (IgniteCheckedException e) { U.error(log, "Failed to write page to pageStore, pageId=" + pageId); writePagesError.compareAndSet(null, e); } }); } // Await completion all write tasks. awaitApplyComplete(exec, writePagesError); long written = U.currentTimeMillis(); // Fsync all touched stores. for (PageStore updStore : updStores) updStore.sync(); long fsync = U.currentTimeMillis(); for (DataRegion memPlc : regions) { if (memPlc.config().isPersistenceEnabled()) ((PageMemoryEx)memPlc.pageMemory()).finishCheckpoint(); } ByteBuffer tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); CheckpointEntry cp = prepareCheckpointEntry( tmpWriteBuf, cpTs, cpId, walPtr, null, CheckpointEntryType.END); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.END); cctx.pageStore().finishRecover(); if (log.isInfoEnabled()) log.info(String.format("Checkpoint finished [cpId=%s, pages=%d, markPos=%s, " + "pagesWrite=%dms, fsync=%dms, total=%dms]", cpId, cpPagesCnt.get(), walPtr, written - start, fsync - written, fsync - start)); } /** * Prepares checkpoint entry containing WAL pointer to checkpoint record. * Writes into given {@code ptrBuf} WAL pointer content. * * @param entryBuf Buffer to fill * @param cpTs Checkpoint timestamp. * @param cpId Checkpoint id. * @param ptr WAL pointer containing record. * @param rec Checkpoint WAL record. * @param type Checkpoint type. * @return Checkpoint entry. */ private CheckpointEntry prepareCheckpointEntry( ByteBuffer entryBuf, long cpTs, UUID cpId, WALPointer ptr, @Nullable CheckpointRecord rec, CheckpointEntryType type ) { assert ptr instanceof FileWALPointer; FileWALPointer filePtr = (FileWALPointer)ptr; entryBuf.rewind(); entryBuf.putLong(filePtr.index()); entryBuf.putInt(filePtr.fileOffset()); entryBuf.putInt(filePtr.length()); entryBuf.flip(); return createCheckPointEntry(cpTs, ptr, cpId, rec, type); } /** * Writes checkpoint entry buffer {@code entryBuf} to specified checkpoint file with 2-phase protocol. * * @param entryBuf Checkpoint entry buffer to write. * @param cp Checkpoint entry. * @param type Checkpoint entry type. * @throws StorageException If failed to write checkpoint entry. */ public void writeCheckpointEntry(ByteBuffer entryBuf, CheckpointEntry cp, CheckpointEntryType type) throws StorageException { String fileName = checkpointFileName(cp, type); String tmpFileName = fileName + FilePageStoreManager.TMP_SUFFIX; try { try (FileIO io = ioFactory.create(Paths.get(cpDir.getAbsolutePath(), skipSync ? fileName : tmpFileName).toFile(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { io.writeFully(entryBuf); entryBuf.clear(); if (!skipSync) io.force(true); } if (!skipSync) Files.move(Paths.get(cpDir.getAbsolutePath(), tmpFileName), Paths.get(cpDir.getAbsolutePath(), fileName)); } catch (IOException e) { throw new StorageException("Failed to write checkpoint entry [ptr=" + cp.checkpointMark() + ", cpTs=" + cp.timestamp() + ", cpId=" + cp.checkpointId() + ", type=" + type + "]", e); } } /** {@inheritDoc} */ @Override public AtomicInteger writtenPagesCounter() { return writtenPagesCntr; } /** {@inheritDoc} */ @Override public AtomicInteger syncedPagesCounter() { return syncedPagesCntr; } /** {@inheritDoc} */ @Override public AtomicInteger evictedPagesCntr() { return evictedPagesCntr; } /** {@inheritDoc} */ @Override public int currentCheckpointPagesCount() { return currCheckpointPagesCnt; } /** * @param cpTs Checkpoint timestamp. * @param cpId Checkpoint ID. * @param type Checkpoint type. * @return Checkpoint file name. */ private static String checkpointFileName(long cpTs, UUID cpId, CheckpointEntryType type) { return cpTs + "-" + cpId + "-" + type + ".bin"; } /** * @param cp Checkpoint entry. * @param type Checkpoint type. * @return Checkpoint file name. */ public static String checkpointFileName(CheckpointEntry cp, CheckpointEntryType type) { return checkpointFileName(cp.timestamp(), cp.checkpointId(), type); } /** * Replace thread local with buffers. Thread local should provide direct buffer with one page in length. * * @param threadBuf new thread-local with buffers for the checkpoint threads. */ public void setThreadBuf(final ThreadLocal<ByteBuffer> threadBuf) { this.threadBuf = threadBuf; } /** * @param cpTs Checkpoint timestamp. * @param ptr Wal pointer of checkpoint. * @param cpId Checkpoint ID. * @param rec Checkpoint record. * @param type Checkpoint type. * * @return Checkpoint entry. */ public CheckpointEntry createCheckPointEntry( long cpTs, WALPointer ptr, UUID cpId, @Nullable CheckpointRecord rec, CheckpointEntryType type ) { assert cpTs > 0; assert ptr != null; assert cpId != null; assert type != null; Map<Integer, CacheState> cacheGrpStates = null; // Do not hold groups state in-memory if there is no space in the checkpoint history to prevent possible OOM. // In this case the actual group states will be readed from WAL by demand. if (rec != null && cpHistory.hasSpace()) cacheGrpStates = rec.cacheGroupStates(); return new CheckpointEntry(cpTs, ptr, cpId, cacheGrpStates); } /** * @return Checkpoint history. */ @Nullable public CheckpointHistory checkpointHistory() { return cpHistory; } /** * Adds given partition to checkpointer destroy queue. * * @param grpId Group ID. * @param partId Partition ID. */ public void schedulePartitionDestroy(int grpId, int partId) { Checkpointer cp = checkpointer; if (cp != null) cp.schedulePartitionDestroy(cctx.cache().cacheGroup(grpId), grpId, partId); } /** * Cancels or wait for partition destroy. * * @param grpId Group ID. * @param partId Partition ID. * @throws IgniteCheckedException If failed. */ public void cancelOrWaitPartitionDestroy(int grpId, int partId) throws IgniteCheckedException { Checkpointer cp = checkpointer; if (cp != null) cp.cancelOrWaitPartitionDestroy(grpId, partId); } /** * Timeout for checkpoint read lock acquisition. * * @return Timeout for checkpoint read lock acquisition in milliseconds. */ @Override public long checkpointReadLockTimeout() { return checkpointReadLockTimeout; } /** * Sets timeout for checkpoint read lock acquisition. * * @param val New timeout in milliseconds, non-positive value denotes infinite timeout. */ @Override public void checkpointReadLockTimeout(long val) { checkpointReadLockTimeout = val; } /** * Partition destroy queue. */ private static class PartitionDestroyQueue { /** */ private final ConcurrentMap<T2<Integer, Integer>, PartitionDestroyRequest> pendingReqs = new ConcurrentHashMap<>(); /** * @param grpCtx Group context. * @param partId Partition ID to destroy. */ private void addDestroyRequest(@Nullable CacheGroupContext grpCtx, int grpId, int partId) { PartitionDestroyRequest req = new PartitionDestroyRequest(grpId, partId); PartitionDestroyRequest old = pendingReqs.putIfAbsent(new T2<>(grpId, partId), req); assert old == null || grpCtx == null : "Must wait for old destroy request to finish before adding a new one " + "[grpId=" + grpId + ", grpName=" + grpCtx.cacheOrGroupName() + ", partId=" + partId + ']'; } /** * @param destroyId Destroy ID. * @return Destroy request to complete if was not concurrently cancelled. */ private PartitionDestroyRequest beginDestroy(T2<Integer, Integer> destroyId) { PartitionDestroyRequest rmvd = pendingReqs.remove(destroyId); return rmvd == null ? null : rmvd.beginDestroy() ? rmvd : null; } /** * @param grpId Group ID. * @param partId Partition ID. * @return Destroy request to wait for if destroy has begun. */ private PartitionDestroyRequest cancelDestroy(int grpId, int partId) { PartitionDestroyRequest rmvd = pendingReqs.remove(new T2<>(grpId, partId)); return rmvd == null ? null : !rmvd.cancel() ? rmvd : null; } } /** * Partition destroy request. */ private static class PartitionDestroyRequest { /** */ private final int grpId; /** */ private final int partId; /** Destroy cancelled flag. */ private boolean cancelled; /** Destroy future. Not null if partition destroy has begun. */ private GridFutureAdapter<Void> destroyFut; /** * @param grpId Group ID. * @param partId Partition ID. */ private PartitionDestroyRequest(int grpId, int partId) { this.grpId = grpId; this.partId = partId; } /** * Cancels partition destroy request. * * @return {@code False} if this request needs to be waited for. */ private synchronized boolean cancel() { if (destroyFut != null) { assert !cancelled; return false; } cancelled = true; return true; } /** * Initiates partition destroy. * * @return {@code True} if destroy request should be executed, {@code false} otherwise. */ private synchronized boolean beginDestroy() { if (cancelled) { assert destroyFut == null; return false; } if (destroyFut != null) return false; destroyFut = new GridFutureAdapter<>(); return true; } /** * */ private synchronized void onDone(Throwable err) { assert destroyFut != null; destroyFut.onDone(err); } /** * */ private void waitCompleted() throws IgniteCheckedException { GridFutureAdapter<Void> fut; synchronized (this) { assert destroyFut != null; fut = destroyFut; } fut.get(); } /** {@inheritDoc} */ @Override public String toString() { return "PartitionDestroyRequest [grpId=" + grpId + ", partId=" + partId + ']'; } } /** * Checkpointer object is used for notification on checkpoint begin, predicate is {@link #scheduledCp}<code>.nextCpTs - now * > 0 </code>. Method {@link #wakeupForCheckpoint} uses notify, {@link #waitCheckpointEvent} uses wait */ @SuppressWarnings("NakedNotify") public class Checkpointer extends GridWorker { /** Checkpoint started log message format. */ private static final String CHECKPOINT_STARTED_LOG_FORMAT = "Checkpoint started [checkpointId=%s, startPtr=%s," + " checkpointBeforeLockTime=%dms, checkpointLockWait=%dms, checkpointListenersExecuteTime=%dms, " + "checkpointLockHoldTime=%dms, walCpRecordFsyncDuration=%dms, %s pages=%d, reason='%s']"; /** Temporary write buffer. */ private final ByteBuffer tmpWriteBuf; /** Next scheduled checkpoint progress. */ private volatile CheckpointProgress scheduledCp; /** Current checkpoint. This field is updated only by checkpoint thread. */ @Nullable private volatile CheckpointProgress curCpProgress; /** Shutdown now. */ private volatile boolean shutdownNow; /** */ private long lastCpTs; /** Pause detector. */ private final LongJVMPauseDetector pauseDetector; /** Long JVM pause threshold. */ private final int longJvmPauseThreshold = getInteger(IGNITE_JVM_PAUSE_DETECTOR_THRESHOLD, DEFAULT_JVM_PAUSE_DETECTOR_THRESHOLD); /** * @param gridName Grid name. * @param name Thread name. * @param log Logger. */ protected Checkpointer(@Nullable String gridName, String name, IgniteLogger log) { super(gridName, name, log, cctx.kernalContext().workersRegistry()); scheduledCp = new CheckpointProgress(U.currentTimeMillis() + checkpointFreq); tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); pauseDetector = cctx.kernalContext().longJvmPauseDetector(); } /** * @return Progress of current chekpoint or {@code null}, if isn't checkpoint at this moment. */ public @Nullable CheckpointProgress currentProgress(){ return curCpProgress; } /** {@inheritDoc} */ @Override protected void body() { Throwable err = null; try { while (!isCancelled()) { waitCheckpointEvent(); if (skipCheckpointOnNodeStop && (isCancelled() || shutdownNow)) { if (log.isInfoEnabled()) log.warning("Skipping last checkpoint because node is stopping."); return; } GridFutureAdapter<Void> enableChangeApplied = GridCacheDatabaseSharedManager.this.enableChangeApplied; if (enableChangeApplied != null) { enableChangeApplied.onDone(); GridCacheDatabaseSharedManager.this.enableChangeApplied = null; } if (checkpointsEnabled) doCheckpoint(); else { synchronized (this) { scheduledCp.nextCpTs = U.currentTimeMillis() + checkpointFreq; } } } } catch (Throwable t) { err = t; scheduledCp.cpFinishFut.onDone(t); throw t; } finally { if (err == null && !(stopping && isCancelled)) err = new IllegalStateException("Thread is terminated unexpectedly: " + name()); if (err instanceof OutOfMemoryError) cctx.kernalContext().failure().process(new FailureContext(CRITICAL_ERROR, err)); else if (err != null) cctx.kernalContext().failure().process(new FailureContext(SYSTEM_WORKER_TERMINATION, err)); } // Final run after the cancellation. if (checkpointsEnabled && !shutdownNow) { try { doCheckpoint(); scheduledCp.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); } catch (Throwable e) { scheduledCp.cpFinishFut.onDone(e); } } } /** * */ private CheckpointProgressSnapshot wakeupForCheckpoint(long delayFromNow, String reason) { CheckpointProgress sched = scheduledCp; long next = U.currentTimeMillis() + delayFromNow; if (sched.nextCpTs <= next) return new CheckpointProgressSnapshot(sched); CheckpointProgressSnapshot ret; synchronized (this) { sched = scheduledCp; if (sched.nextCpTs > next) { sched.reason = reason; sched.nextCpTs = next; } ret = new CheckpointProgressSnapshot(sched); notifyAll(); } return ret; } /** * @param snapshotOperation Snapshot operation. */ public IgniteInternalFuture wakeupForSnapshotCreation(SnapshotOperation snapshotOperation) { GridFutureAdapter<Object> ret; synchronized (this) { scheduledCp.nextCpTs = U.currentTimeMillis(); scheduledCp.reason = "snapshot"; scheduledCp.nextSnapshot = true; scheduledCp.snapshotOperation = snapshotOperation; ret = scheduledCp.cpBeginFut; notifyAll(); } return ret; } /** * */ private void doCheckpoint() { Checkpoint chp = null; try { CheckpointMetricsTracker tracker = new CheckpointMetricsTracker(); try { chp = markCheckpointBegin(tracker); } catch (Exception e) { if (curCpProgress != null) curCpProgress.cpFinishFut.onDone(e); // In case of checkpoint initialization error node should be invalidated and stopped. cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw new IgniteException(e); // Re-throw as unchecked exception to force stopping checkpoint thread. } updateHeartbeat(); currCheckpointPagesCnt = chp.pagesSize; writtenPagesCntr = new AtomicInteger(); syncedPagesCntr = new AtomicInteger(); evictedPagesCntr = new AtomicInteger(); boolean success = false; int destroyedPartitionsCnt; try { if (chp.hasDelta()) { // Identity stores set. ConcurrentLinkedHashMap<PageStore, LongAdder> updStores = new ConcurrentLinkedHashMap<>(); CountDownFuture doneWriteFut = new CountDownFuture( asyncRunner == null ? 1 : chp.cpPages.collectionsSize()); tracker.onPagesWriteStart(); final int totalPagesToWriteCnt = chp.cpPages.size(); if (asyncRunner != null) { for (int i = 0; i < chp.cpPages.collectionsSize(); i++) { Runnable write = new WriteCheckpointPages( tracker, chp.cpPages.innerCollection(i), updStores, doneWriteFut, totalPagesToWriteCnt, new Runnable() { @Override public void run() { updateHeartbeat(); } }, asyncRunner ); try { asyncRunner.execute(write); } catch (RejectedExecutionException ignore) { // Run the task synchronously. updateHeartbeat(); write.run(); } } } else { // Single-threaded checkpoint. updateHeartbeat(); Runnable write = new WriteCheckpointPages( tracker, chp.cpPages, updStores, doneWriteFut, totalPagesToWriteCnt, new Runnable() { @Override public void run() { updateHeartbeat(); } }, null); write.run(); } updateHeartbeat(); // Wait and check for errors. doneWriteFut.get(); // Must re-check shutdown flag here because threads may have skipped some pages. // If so, we should not put finish checkpoint mark. if (shutdownNow) { chp.progress.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); return; } tracker.onFsyncStart(); if (!skipSync) { for (Map.Entry<PageStore, LongAdder> updStoreEntry : updStores.entrySet()) { if (shutdownNow) { chp.progress.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); return; } blockingSectionBegin(); try { updStoreEntry.getKey().sync(); } finally { blockingSectionEnd(); } syncedPagesCntr.addAndGet(updStoreEntry.getValue().intValue()); } } } else { tracker.onPagesWriteStart(); tracker.onFsyncStart(); } snapshotMgr.afterCheckpointPageWritten(); destroyedPartitionsCnt = destroyEvictedPartitions(); // Must mark successful checkpoint only if there are no exceptions or interrupts. success = true; } finally { if (success) markCheckpointEnd(chp); } tracker.onEnd(); if (chp.hasDelta() || destroyedPartitionsCnt > 0) { if (printCheckpointStats) { if (log.isInfoEnabled()) { String walSegsCoveredMsg = prepareWalSegsCoveredMsg(chp.walSegsCoveredRange); log.info(String.format("Checkpoint finished [cpId=%s, pages=%d, markPos=%s, " + "walSegmentsCleared=%d, walSegmentsCovered=%s, markDuration=%dms, pagesWrite=%dms, fsync=%dms, " + "total=%dms]", chp.cpEntry != null ? chp.cpEntry.checkpointId() : "", chp.pagesSize, chp.cpEntry != null ? chp.cpEntry.checkpointMark() : "", chp.walFilesDeleted, walSegsCoveredMsg, tracker.markDuration(), tracker.pagesWriteDuration(), tracker.fsyncDuration(), tracker.totalDuration())); } } } updateMetrics(chp, tracker); } catch (IgniteCheckedException e) { if (chp != null) chp.progress.cpFinishFut.onDone(e); cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); } } /** * @param chp Checkpoint. * @param tracker Tracker. */ private void updateMetrics(Checkpoint chp, CheckpointMetricsTracker tracker) { if (persStoreMetrics.metricsEnabled()) { persStoreMetrics.onCheckpoint( tracker.lockWaitDuration(), tracker.markDuration(), tracker.pagesWriteDuration(), tracker.fsyncDuration(), tracker.totalDuration(), chp.pagesSize, tracker.dataPagesWritten(), tracker.cowPagesWritten(), forAllPageStores(PageStore::size), forAllPageStores(PageStore::getSparseSize)); } } /** */ private String prepareWalSegsCoveredMsg(IgniteBiTuple<Long, Long> walRange) { String res; long startIdx = walRange.get1(); long endIdx = walRange.get2(); if (endIdx < 0 || endIdx < startIdx) res = "[]"; else if (endIdx == startIdx) res = "[" + endIdx + "]"; else res = "[" + startIdx + " - " + endIdx + "]"; return res; } /** * Processes all evicted partitions scheduled for destroy. * * @throws IgniteCheckedException If failed. * * @return The number of destroyed partition files. */ private int destroyEvictedPartitions() throws IgniteCheckedException { PartitionDestroyQueue destroyQueue = curCpProgress.destroyQueue; if (destroyQueue.pendingReqs.isEmpty()) return 0; List<PartitionDestroyRequest> reqs = null; for (final PartitionDestroyRequest req : destroyQueue.pendingReqs.values()) { if (!req.beginDestroy()) continue; final int grpId = req.grpId; final int partId = req.partId; CacheGroupContext grp = cctx.cache().cacheGroup(grpId); assert grp != null : "Cache group is not initialized [grpId=" + grpId + "]"; assert grp.offheap() instanceof GridCacheOffheapManager : "Destroying partition files when persistence is off " + grp.offheap(); final GridCacheOffheapManager offheap = (GridCacheOffheapManager) grp.offheap(); Runnable destroyPartTask = () -> { try { offheap.destroyPartitionStore(grpId, partId); req.onDone(null); if (log.isDebugEnabled()) log.debug("Partition file has destroyed [grpId=" + grpId + ", partId=" + partId + "]"); } catch (Exception e) { req.onDone(new IgniteCheckedException( "Partition file destroy has failed [grpId=" + grpId + ", partId=" + partId + "]", e)); } }; if (asyncRunner != null) { try { asyncRunner.execute(destroyPartTask); } catch (RejectedExecutionException ignore) { // Run the task synchronously. destroyPartTask.run(); } } else destroyPartTask.run(); if (reqs == null) reqs = new ArrayList<>(); reqs.add(req); } if (reqs != null) for (PartitionDestroyRequest req : reqs) req.waitCompleted(); destroyQueue.pendingReqs.clear(); return reqs != null ? reqs.size() : 0; } /** * @param grpCtx Group context. Can be {@code null} in case of crash recovery. * @param grpId Group ID. * @param partId Partition ID. */ private void schedulePartitionDestroy(@Nullable CacheGroupContext grpCtx, int grpId, int partId) { synchronized (this) { scheduledCp.destroyQueue.addDestroyRequest(grpCtx, grpId, partId); } if (log.isDebugEnabled()) log.debug("Partition file has been scheduled to destroy [grpId=" + grpId + ", partId=" + partId + "]"); if (grpCtx != null) wakeupForCheckpoint(PARTITION_DESTROY_CHECKPOINT_TIMEOUT, "partition destroy"); } /** * @param grpId Group ID. * @param partId Partition ID. */ private void cancelOrWaitPartitionDestroy(int grpId, int partId) throws IgniteCheckedException { PartitionDestroyRequest req; synchronized (this) { req = scheduledCp.destroyQueue.cancelDestroy(grpId, partId); } if (req != null) req.waitCompleted(); CheckpointProgress cur; synchronized (this) { cur = curCpProgress; if (cur != null) req = cur.destroyQueue.cancelDestroy(grpId, partId); } if (req != null) req.waitCompleted(); if (req != null && log.isDebugEnabled()) log.debug("Partition file destroy has cancelled [grpId=" + grpId + ", partId=" + partId + "]"); } /** * */ private void waitCheckpointEvent() { boolean cancel = false; try { synchronized (this) { long remaining; while ((remaining = scheduledCp.nextCpTs - U.currentTimeMillis()) > 0 && !isCancelled()) { blockingSectionBegin(); try { wait(remaining); } finally { blockingSectionEnd(); } } } } catch (InterruptedException ignored) { Thread.currentThread().interrupt(); cancel = true; } if (cancel) isCancelled = true; } /** * */ @SuppressWarnings("TooBroadScope") private Checkpoint markCheckpointBegin(CheckpointMetricsTracker tracker) throws IgniteCheckedException { long cpTs = updateLastCheckpointTime(); CheckpointProgress curr = updateCurrentCheckpointProgress(); CheckpointRecord cpRec = new CheckpointRecord(memoryRecoveryRecordPtr); memoryRecoveryRecordPtr = null; CheckpointEntry cp = null; IgniteFuture snapFut = null; IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> cpPagesTuple; boolean hasPages, hasPartitionsToDestroy; DbCheckpointContextImpl ctx0 = new DbCheckpointContextImpl(curr, new PartitionAllocationMap()); internalReadLock(); try { for (DbCheckpointListener lsnr : lsnrs) lsnr.beforeCheckpointBegin(ctx0); ctx0.awaitPendingTasksFinished(); } finally { internalReadUnlock(); } tracker.onLockWaitStart(); checkpointLock.writeLock().lock(); try { assert curCpProgress == curr : "Concurrent checkpoint begin should not be happened"; tracker.onMarkStart(); // Listeners must be invoked before we write checkpoint record to WAL. for (DbCheckpointListener lsnr : lsnrs) lsnr.onMarkCheckpointBegin(ctx0); ctx0.awaitPendingTasksFinished(); tracker.onListenersExecuteEnd(); if (curr.nextSnapshot) snapFut = snapshotMgr.onMarkCheckPointBegin(curr.snapshotOperation, ctx0.partitionStatMap()); fillCacheGroupState(cpRec); cpPagesTuple = beginAllCheckpoints(); hasPages = hasPageForWrite(cpPagesTuple.get1()); hasPartitionsToDestroy = !curr.destroyQueue.pendingReqs.isEmpty(); WALPointer cpPtr = null; if (hasPages || curr.nextSnapshot || hasPartitionsToDestroy) { // No page updates for this checkpoint are allowed from now on. cpPtr = cctx.wal().log(cpRec); if (cpPtr == null) cpPtr = CheckpointStatus.NULL_PTR; } if (hasPages || hasPartitionsToDestroy) { cp = prepareCheckpointEntry( tmpWriteBuf, cpTs, cpRec.checkpointId(), cpPtr, cpRec, CheckpointEntryType.START); cpHistory.addCheckpoint(cp); } } finally { checkpointLock.writeLock().unlock(); tracker.onLockRelease(); } DbCheckpointListener.Context ctx = createOnCheckpointBeginContext(ctx0, hasPages); curr.cpBeginFut.onDone(); for (DbCheckpointListener lsnr : lsnrs) lsnr.onCheckpointBegin(ctx); if (snapFut != null) { try { snapFut.get(); } catch (IgniteException e) { U.error(log, "Failed to wait for snapshot operation initialization: " + curr.snapshotOperation, e); } } if (hasPages || hasPartitionsToDestroy) { assert cp != null; assert cp.checkpointMark() != null; tracker.onWalCpRecordFsyncStart(); // Sync log outside the checkpoint write lock. cctx.wal().flush(cp.checkpointMark(), true); tracker.onWalCpRecordFsyncEnd(); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.START); GridMultiCollectionWrapper<FullPageId> cpPages = splitAndSortCpPagesIfNeeded( cpPagesTuple, persistenceCfg.getCheckpointThreads()); if (printCheckpointStats && log.isInfoEnabled()) { long possibleJvmPauseDur = possibleLongJvmPauseDuration(tracker); log.info( String.format( CHECKPOINT_STARTED_LOG_FORMAT, cpRec.checkpointId(), cp.checkpointMark(), tracker.beforeLockDuration(), tracker.lockWaitDuration(), tracker.listenersExecuteDuration(), tracker.lockHoldDuration(), tracker.walCpRecordFsyncDuration(), possibleJvmPauseDur > 0 ? "possibleJvmPauseDuration=" + possibleJvmPauseDur + "ms," : "", cpPages.size(), curr.reason ) ); } return new Checkpoint(cp, cpPages, curr); } else { if (curr.nextSnapshot) cctx.wal().flush(null, true); if (printCheckpointStats) { if (log.isInfoEnabled()) LT.info(log, String.format("Skipping checkpoint (no pages were modified) [" + "checkpointBeforeLockTime=%dms, checkpointLockWait=%dms, " + "checkpointListenersExecuteTime=%dms, checkpointLockHoldTime=%dms, reason='%s']", tracker.beforeLockDuration(), tracker.lockWaitDuration(), tracker.listenersExecuteDuration(), tracker.lockHoldDuration(), curr.reason)); } return new Checkpoint(null, new GridMultiCollectionWrapper<>(new Collection[0]), curr); } } /** * @param tracker Checkpoint metrics tracker. * @return Duration of possible JVM pause, if it was detected, or {@code -1} otherwise. */ private long possibleLongJvmPauseDuration(CheckpointMetricsTracker tracker) { if (LongJVMPauseDetector.enabled()) { if (tracker.lockWaitDuration() + tracker.lockHoldDuration() > longJvmPauseThreshold) { long now = System.currentTimeMillis(); // We must get last wake up time before search possible pause in events map. long wakeUpTime = pauseDetector.getLastWakeUpTime(); IgniteBiTuple<Long, Long> lastLongPause = pauseDetector.getLastLongPause(); if (lastLongPause != null && tracker.checkpointStartTime() < lastLongPause.get1()) return lastLongPause.get2(); if (now - wakeUpTime > longJvmPauseThreshold) return now - wakeUpTime; } } return -1L; } /** * Take read lock for internal use. */ private void internalReadUnlock() { checkpointLock.readLock().unlock(); if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() - 1); } /** * Release read lock. */ private void internalReadLock() { checkpointLock.readLock().lock(); if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() + 1); } /** * Fill cache group state in checkpoint record. * * @param cpRec Checkpoint record for filling. * @throws IgniteCheckedException if fail. */ private void fillCacheGroupState(CheckpointRecord cpRec) throws IgniteCheckedException { GridCompoundFuture grpHandleFut = asyncRunner == null ? null : new GridCompoundFuture(); for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal() || !grp.walEnabled()) continue; Runnable r = () -> { ArrayList<GridDhtLocalPartition> parts = new ArrayList<>(grp.topology().localPartitions().size()); for (GridDhtLocalPartition part : grp.topology().currentLocalPartitions()) parts.add(part); CacheState state = new CacheState(parts.size()); for (GridDhtLocalPartition part : parts) { state.addPartitionState( part.id(), part.dataStore().fullSize(), part.updateCounter(), (byte)part.state().ordinal() ); } synchronized (cpRec) { cpRec.addCacheGroupState(grp.groupId(), state); } }; if (asyncRunner == null) r.run(); else try { GridFutureAdapter<?> res = new GridFutureAdapter<>(); asyncRunner.execute(U.wrapIgniteFuture(r, res)); grpHandleFut.add(res); } catch (RejectedExecutionException e) { assert false : "Task should never be rejected by async runner"; throw new IgniteException(e); //to protect from disabled asserts and call to failure handler } } if (grpHandleFut != null) { grpHandleFut.markInitialized(); grpHandleFut.get(); } } /** * @return Last checkpoint time. */ private long updateLastCheckpointTime() { long cpTs = System.currentTimeMillis(); // This can happen in an unlikely event of two checkpoints happening // within a currentTimeMillis() granularity window. if (cpTs == lastCpTs) cpTs++; lastCpTs = cpTs; return cpTs; } /** * Update current checkpoint progress by scheduled. * * @return Current checkpoint progress. */ @NotNull private GridCacheDatabaseSharedManager.CheckpointProgress updateCurrentCheckpointProgress() { final CheckpointProgress curr; synchronized (this) { curr = scheduledCp; curr.started = true; if (curr.reason == null) curr.reason = "timeout"; // It is important that we assign a new progress object before checkpoint mark in page memory. scheduledCp = new CheckpointProgress(U.currentTimeMillis() + checkpointFreq); curCpProgress = curr; } return curr; } /** */ private DbCheckpointListener.Context createOnCheckpointBeginContext( DbCheckpointListener.Context delegate, boolean hasPages ) { return new DbCheckpointListener.Context() { /** {@inheritDoc} */ @Override public boolean nextSnapshot() { return delegate.nextSnapshot(); } /** {@inheritDoc} */ @Override public PartitionAllocationMap partitionStatMap() { return delegate.partitionStatMap(); } /** {@inheritDoc} */ @Override public boolean needToSnapshot(String cacheOrGrpName) { return delegate.needToSnapshot(cacheOrGrpName); } /** {@inheritDoc} */ @Override public @Nullable Executor executor() { return delegate.executor(); } /** {@inheritDoc} */ @Override public boolean hasPages() { return hasPages; } }; } /** * Check that at least one collection is not empty. * * @param cpPagesCollWrapper Collection of {@link GridMultiCollectionWrapper} checkpoint pages. */ private boolean hasPageForWrite(Collection<GridMultiCollectionWrapper<FullPageId>> cpPagesCollWrapper) { boolean hasPages = false; for (Collection c : cpPagesCollWrapper) if (!c.isEmpty()) { hasPages = true; break; } return hasPages; } /** * @return tuple with collections of FullPageIds obtained from each PageMemory and overall number of dirty * pages. */ private IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> beginAllCheckpoints() { Collection<GridMultiCollectionWrapper<FullPageId>> res = new ArrayList(dataRegions().size()); int pagesNum = 0; for (DataRegion memPlc : dataRegions()) { if (!memPlc.config().isPersistenceEnabled()) continue; GridMultiCollectionWrapper<FullPageId> nextCpPagesCol = ((PageMemoryEx)memPlc.pageMemory()).beginCheckpoint(); pagesNum += nextCpPagesCol.size(); res.add(nextCpPagesCol); } currCheckpointPagesCnt = pagesNum; return new IgniteBiTuple<>(res, pagesNum); } /** * @param chp Checkpoint snapshot. */ private void markCheckpointEnd(Checkpoint chp) throws IgniteCheckedException { synchronized (this) { writtenPagesCntr = null; syncedPagesCntr = null; evictedPagesCntr = null; for (DataRegion memPlc : dataRegions()) { if (!memPlc.config().isPersistenceEnabled()) continue; ((PageMemoryEx)memPlc.pageMemory()).finishCheckpoint(); } currCheckpointPagesCnt = 0; } if (chp.hasDelta()) { CheckpointEntry cp = prepareCheckpointEntry( tmpWriteBuf, chp.cpEntry.timestamp(), chp.cpEntry.checkpointId(), chp.cpEntry.checkpointMark(), null, CheckpointEntryType.END); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.END); cctx.wal().notchLastCheckpointPtr(chp.cpEntry.checkpointMark()); } List<CheckpointEntry> removedFromHistory = cpHistory.onCheckpointFinished(chp, truncateWalOnCpFinish); for (CheckpointEntry cp : removedFromHistory) removeCheckpointFiles(cp); if (chp.progress != null) chp.progress.cpFinishFut.onDone(); } /** {@inheritDoc} */ @Override public void cancel() { if (log.isDebugEnabled()) log.debug("Cancelling grid runnable: " + this); // Do not interrupt runner thread. isCancelled = true; synchronized (this) { notifyAll(); } } /** * */ public void shutdownNow() { shutdownNow = true; if (!isCancelled) cancel(); } /** * Context with information about current snapshots. */ private class DbCheckpointContextImpl implements DbCheckpointListener.Context { /** Current checkpoint progress. */ private final CheckpointProgress curr; /** Partition map. */ private final PartitionAllocationMap map; /** Pending tasks from executor. */ private GridCompoundFuture pendingTaskFuture; /** * @param curr Current checkpoint progress. * @param map Partition map. */ private DbCheckpointContextImpl(CheckpointProgress curr, PartitionAllocationMap map) { this.curr = curr; this.map = map; this.pendingTaskFuture = asyncRunner == null ? null : new GridCompoundFuture(); } /** {@inheritDoc} */ @Override public boolean nextSnapshot() { return curr.nextSnapshot; } /** {@inheritDoc} */ @Override public PartitionAllocationMap partitionStatMap() { return map; } /** {@inheritDoc} */ @Override public boolean needToSnapshot(String cacheOrGrpName) { return curr.snapshotOperation.cacheGroupIds().contains(CU.cacheId(cacheOrGrpName)); } /** {@inheritDoc} */ @Override public Executor executor() { return asyncRunner == null ? null : cmd -> { try { GridFutureAdapter<?> res = new GridFutureAdapter<>(); asyncRunner.execute(U.wrapIgniteFuture(cmd, res)); pendingTaskFuture.add(res); } catch (RejectedExecutionException e) { assert false : "A task should never be rejected by async runner"; } }; } /** {@inheritDoc} */ @Override public boolean hasPages() { throw new IllegalStateException( "Property is unknown at this moment. You should use onCheckpointBegin() method." ); } /** * Await all async tasks from executor was finished. * * @throws IgniteCheckedException if fail. */ public void awaitPendingTasksFinished() throws IgniteCheckedException { GridCompoundFuture pendingFut = this.pendingTaskFuture; this.pendingTaskFuture = new GridCompoundFuture(); if (pendingFut != null) { pendingFut.markInitialized(); pendingFut.get(); } } } } /** * Reorders list of checkpoint pages and splits them into needed number of sublists according to * {@link DataStorageConfiguration#getCheckpointThreads()} and * {@link DataStorageConfiguration#getCheckpointWriteOrder()}. * * @param cpPagesTuple Checkpoint pages tuple. * @param threads Checkpoint runner threads. */ private GridMultiCollectionWrapper<FullPageId> splitAndSortCpPagesIfNeeded( IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> cpPagesTuple, int threads ) throws IgniteCheckedException { FullPageId[] pagesArr = new FullPageId[cpPagesTuple.get2()]; int realPagesArrSize = 0; for (GridMultiCollectionWrapper<FullPageId> colWrapper : cpPagesTuple.get1()) { for (int i = 0; i < colWrapper.collectionsSize(); i++) for (FullPageId page : colWrapper.innerCollection(i)) { if (realPagesArrSize == pagesArr.length) throw new AssertionError("Incorrect estimated dirty pages number: " + pagesArr.length); pagesArr[realPagesArrSize++] = page; } } FullPageId fakeMaxFullPageId = new FullPageId(Long.MAX_VALUE, Integer.MAX_VALUE); // Some pages may have been replaced, need to fill end of array with fake ones to prevent NPE during sort. for (int i = realPagesArrSize; i < pagesArr.length; i++) pagesArr[i] = fakeMaxFullPageId; if (persistenceCfg.getCheckpointWriteOrder() == CheckpointWriteOrder.SEQUENTIAL) { Comparator<FullPageId> cmp = new Comparator<FullPageId>() { @Override public int compare(FullPageId o1, FullPageId o2) { int cmp = Long.compare(o1.groupId(), o2.groupId()); if (cmp != 0) return cmp; return Long.compare(PageIdUtils.effectivePageId(o1.pageId()), PageIdUtils.effectivePageId(o2.pageId())); } }; if (pagesArr.length >= parallelSortThreshold) parallelSortInIsolatedPool(pagesArr, cmp); else Arrays.sort(pagesArr, cmp); } int pagesSubLists = threads == 1 ? 1 : threads * 4; // Splitting pages to (threads * 4) subtasks. If any thread will be faster, it will help slower threads. Collection[] pagesSubListArr = new Collection[pagesSubLists]; for (int i = 0; i < pagesSubLists; i++) { int from = (int)((long)realPagesArrSize * i / pagesSubLists); int to = (int)((long)realPagesArrSize * (i + 1) / pagesSubLists); pagesSubListArr[i] = new GridReadOnlyArrayView(pagesArr, from, to); } return new GridMultiCollectionWrapper<FullPageId>(pagesSubListArr); } /** * Performs parallel sort in isolated fork join pool. * * @param pagesArr Pages array. * @param cmp Cmp. */ private static void parallelSortInIsolatedPool( FullPageId[] pagesArr, Comparator<FullPageId> cmp ) throws IgniteCheckedException { ForkJoinPool.ForkJoinWorkerThreadFactory factory = new ForkJoinPool.ForkJoinWorkerThreadFactory() { @Override public ForkJoinWorkerThread newThread(ForkJoinPool pool) { ForkJoinWorkerThread worker = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool); worker.setName("checkpoint-pages-sorter-" + worker.getPoolIndex()); return worker; } }; ForkJoinPool forkJoinPool = new ForkJoinPool(PARALLEL_SORT_THREADS + 1, factory, null, false); ForkJoinTask sortTask = forkJoinPool.submit(() -> Arrays.parallelSort(pagesArr, cmp)); try { sortTask.get(); } catch (InterruptedException e) { throw new IgniteInterruptedCheckedException(e); } catch (ExecutionException e) { throw new IgniteCheckedException("Failed to perform pages array parallel sort", e.getCause()); } forkJoinPool.shutdown(); } /** Pages write task */ private class WriteCheckpointPages implements Runnable { /** */ private final CheckpointMetricsTracker tracker; /** Collection of page IDs to write under this task. Overall pages to write may be greater than this collection */ private final Collection<FullPageId> writePageIds; /** */ private final ConcurrentLinkedHashMap<PageStore, LongAdder> updStores; /** */ private final CountDownFuture doneFut; /** Total pages to write, counter may be greater than {@link #writePageIds} size */ private final int totalPagesToWrite; /** */ private final Runnable beforePageWrite; /** If any pages were skipped, new task with remaining pages will be submitted here. */ private final ExecutorService retryWriteExecutor; /** * Creates task for write pages * * @param tracker * @param writePageIds Collection of page IDs to write. * @param updStores * @param doneFut * @param totalPagesToWrite total pages to be written under this checkpoint * @param beforePageWrite Action to be performed before every page write. * @param retryWriteExecutor Retry write executor. */ private WriteCheckpointPages( final CheckpointMetricsTracker tracker, final Collection<FullPageId> writePageIds, final ConcurrentLinkedHashMap<PageStore, LongAdder> updStores, final CountDownFuture doneFut, final int totalPagesToWrite, final Runnable beforePageWrite, final ExecutorService retryWriteExecutor ) { this.tracker = tracker; this.writePageIds = writePageIds; this.updStores = updStores; this.doneFut = doneFut; this.totalPagesToWrite = totalPagesToWrite; this.beforePageWrite = beforePageWrite; this.retryWriteExecutor = retryWriteExecutor; } /** {@inheritDoc} */ @Override public void run() { snapshotMgr.beforeCheckpointPageWritten(); Collection<FullPageId> writePageIds = this.writePageIds; try { List<FullPageId> pagesToRetry = writePages(writePageIds); if (pagesToRetry.isEmpty()) doneFut.onDone((Void)null); else { if (retryWriteExecutor == null) { while (!pagesToRetry.isEmpty()) pagesToRetry = writePages(pagesToRetry); doneFut.onDone((Void)null); } else { // Submit current retry pages to the end of the queue to avoid starvation. WriteCheckpointPages retryWritesTask = new WriteCheckpointPages( tracker, pagesToRetry, updStores, doneFut, totalPagesToWrite, beforePageWrite, retryWriteExecutor); retryWriteExecutor.submit(retryWritesTask); } } } catch (Throwable e) { doneFut.onDone(e); } } /** * @param writePageIds Collections of pages to write. * @return pagesToRetry Pages which should be retried. */ private List<FullPageId> writePages(Collection<FullPageId> writePageIds) throws IgniteCheckedException { ByteBuffer tmpWriteBuf = threadBuf.get(); List<FullPageId> pagesToRetry = new ArrayList<>(); for (FullPageId fullId : writePageIds) { if (checkpointer.shutdownNow) break; tmpWriteBuf.rewind(); beforePageWrite.run(); snapshotMgr.beforePageWrite(fullId); int grpId = fullId.groupId(); PageMemoryEx pageMem; // TODO IGNITE-7792 add generic mapping. if (grpId == MetaStorage.METASTORAGE_CACHE_ID) pageMem = (PageMemoryEx)metaStorage.pageMemory(); else if (grpId == TxLog.TX_LOG_CACHE_ID) pageMem = (PageMemoryEx)dataRegion(TxLog.TX_LOG_CACHE_NAME).pageMemory(); else { CacheGroupContext grp = context().cache().cacheGroup(grpId); DataRegion region = grp != null ? grp.dataRegion() : null; if (region == null || !region.config().isPersistenceEnabled()) continue; pageMem = (PageMemoryEx)region.pageMemory(); } Integer tag = pageMem.getForCheckpoint( fullId, tmpWriteBuf, persStoreMetrics.metricsEnabled() ? tracker : null); if (tag != null) { if (tag == PageMemoryImpl.TRY_AGAIN_TAG) { pagesToRetry.add(fullId); continue; } assert PageIO.getType(tmpWriteBuf) != 0 : "Invalid state. Type is 0! pageId = " + U.hexLong(fullId.pageId()); assert PageIO.getVersion(tmpWriteBuf) != 0 : "Invalid state. Version is 0! pageId = " + U.hexLong(fullId.pageId()); tmpWriteBuf.rewind(); if (persStoreMetrics.metricsEnabled()) { int pageType = PageIO.getType(tmpWriteBuf); if (PageIO.isDataPageType(pageType)) tracker.onDataPageWritten(); } writtenPagesCntr.incrementAndGet(); PageStore store = storeMgr.writeInternal(grpId, fullId.pageId(), tmpWriteBuf, tag, true); updStores.computeIfAbsent(store, k -> new LongAdder()).increment(); } } return pagesToRetry; } } /** * */ public static class Checkpoint { /** Checkpoint entry. */ @Nullable private final CheckpointEntry cpEntry; /** Checkpoint pages. */ private final GridMultiCollectionWrapper<FullPageId> cpPages; /** */ private final CheckpointProgress progress; /** Number of deleted WAL files. */ private int walFilesDeleted; /** WAL segments fully covered by this checkpoint. */ private IgniteBiTuple<Long, Long> walSegsCoveredRange; /** */ private final int pagesSize; /** * @param cpEntry Checkpoint entry. * @param cpPages Pages to write to the page store. * @param progress Checkpoint progress status. */ private Checkpoint( @Nullable CheckpointEntry cpEntry, @NotNull GridMultiCollectionWrapper<FullPageId> cpPages, CheckpointProgress progress ) { this.cpEntry = cpEntry; this.cpPages = cpPages; this.progress = progress; pagesSize = cpPages.size(); } /** * @return {@code true} if this checkpoint contains at least one dirty page. */ public boolean hasDelta() { return pagesSize != 0; } /** * @param walFilesDeleted Wal files deleted. */ public void walFilesDeleted(int walFilesDeleted) { this.walFilesDeleted = walFilesDeleted; } /** * @param walSegsCoveredRange WAL segments fully covered by this checkpoint. */ public void walSegsCoveredRange(final IgniteBiTuple<Long, Long> walSegsCoveredRange) { this.walSegsCoveredRange = walSegsCoveredRange; } } /** * */ public static class CheckpointStatus { /** Null checkpoint UUID. */ private static final UUID NULL_UUID = new UUID(0L, 0L); /** Null WAL pointer. */ public static final WALPointer NULL_PTR = new FileWALPointer(0, 0, 0); /** */ private long cpStartTs; /** */ private UUID cpStartId; /** */ @GridToStringInclude private WALPointer startPtr; /** */ private UUID cpEndId; /** */ @GridToStringInclude private WALPointer endPtr; /** * @param cpStartId Checkpoint start ID. * @param startPtr Checkpoint start pointer. * @param cpEndId Checkpoint end ID. * @param endPtr Checkpoint end pointer. */ private CheckpointStatus(long cpStartTs, UUID cpStartId, WALPointer startPtr, UUID cpEndId, WALPointer endPtr) { this.cpStartTs = cpStartTs; this.cpStartId = cpStartId; this.startPtr = startPtr; this.cpEndId = cpEndId; this.endPtr = endPtr; } /** * @return {@code True} if need perform binary memory recovery. Only records {@link PageDeltaRecord} * and {@link PageSnapshot} needs to be applyed from {@link #cpStartId}. */ public boolean needRestoreMemory() { return !F.eq(cpStartId, cpEndId) && !F.eq(NULL_UUID, cpStartId); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(CheckpointStatus.class, this); } } /** * Data class representing the state of running/scheduled checkpoint. */ public static class CheckpointProgress { /** Scheduled time of checkpoint. */ private volatile long nextCpTs; /** Checkpoint begin phase future. */ private GridFutureAdapter cpBeginFut = new GridFutureAdapter<>(); /** Checkpoint finish phase future. */ private GridFutureAdapter cpFinishFut = new GridFutureAdapter<Void>() { @Override protected boolean onDone(@Nullable Void res, @Nullable Throwable err, boolean cancel) { if (err != null && !cpBeginFut.isDone()) cpBeginFut.onDone(err); return super.onDone(res, err, cancel); } }; /** Flag indicates that snapshot operation will be performed after checkpoint. */ private volatile boolean nextSnapshot; /** Flag indicates that checkpoint is started. */ private volatile boolean started; /** Snapshot operation that should be performed if {@link #nextSnapshot} set to true. */ private volatile SnapshotOperation snapshotOperation; /** Partitions destroy queue. */ private final PartitionDestroyQueue destroyQueue = new PartitionDestroyQueue(); /** Wakeup reason. */ private String reason; /** * @param nextCpTs Next checkpoint timestamp. */ private CheckpointProgress(long nextCpTs) { this.nextCpTs = nextCpTs; } /** */ public boolean started() { return cpBeginFut.isDone(); } /** */ public boolean finished() { return cpFinishFut.isDone(); } } /** * */ private static class CheckpointProgressSnapshot implements CheckpointFuture { /** */ private final boolean started; /** */ private final GridFutureAdapter<Object> cpBeginFut; /** */ private final GridFutureAdapter<Object> cpFinishFut; /** */ CheckpointProgressSnapshot(CheckpointProgress cpProgress) { started = cpProgress.started; cpBeginFut = cpProgress.cpBeginFut; cpFinishFut = cpProgress.cpFinishFut; } /** {@inheritDoc} */ @Override public GridFutureAdapter beginFuture() { return cpBeginFut; } /** {@inheritDoc} */ @Override public GridFutureAdapter<Object> finishFuture() { return cpFinishFut; } /** {@inheritDoc} */ @Override public boolean started() { return started; } } /** * */ public static class FileLockHolder implements AutoCloseable { /** Lock file name. */ private static final String lockFileName = "lock"; /** File. */ private File file; /** Channel. */ private RandomAccessFile lockFile; /** Lock. */ private volatile FileLock lock; /** Kernal context to generate Id of locked node in file. */ @NotNull private GridKernalContext ctx; /** Logger. */ private IgniteLogger log; /** * @param path Path. */ public FileLockHolder(String path, @NotNull GridKernalContext ctx, IgniteLogger log) { try { file = Paths.get(path, lockFileName).toFile(); lockFile = new RandomAccessFile(file, "rw"); this.ctx = ctx; this.log = log; } catch (IOException e) { throw new IgniteException(e); } } /** * @param lockWaitTimeMillis During which time thread will try capture file lock. * @throws IgniteCheckedException If failed to capture file lock. */ public void tryLock(long lockWaitTimeMillis) throws IgniteCheckedException { assert lockFile != null; FileChannel ch = lockFile.getChannel(); SB sb = new SB(); //write node id sb.a("[").a(ctx.localNodeId().toString()).a("]"); //write ip addresses final GridDiscoveryManager discovery = ctx.discovery(); if (discovery != null) { //discovery may be not up and running final ClusterNode node = discovery.localNode(); if (node != null) sb.a(node.addresses()); } //write ports sb.a("["); Iterator<GridPortRecord> it = ctx.ports().records().iterator(); while (it.hasNext()) { GridPortRecord rec = it.next(); sb.a(rec.protocol()).a(":").a(rec.port()); if (it.hasNext()) sb.a(", "); } sb.a("]"); String failMsg; try { String content = null; // Try to get lock, if not available wait 1 sec and re-try. for (int i = 0; i < lockWaitTimeMillis; i += 1000) { try { lock = ch.tryLock(0, 1, false); if (lock != null && lock.isValid()) { writeContent(sb.toString()); return; } } catch (OverlappingFileLockException ignore) { if (content == null) content = readContent(); log.warning("Failed to acquire file lock. Will try again in 1s " + "[nodeId=" + ctx.localNodeId() + ", holder=" + content + ", path=" + file.getAbsolutePath() + ']'); } U.sleep(1000); } if (content == null) content = readContent(); failMsg = "Failed to acquire file lock [holder=" + content + ", time=" + (lockWaitTimeMillis / 1000) + " sec, path=" + file.getAbsolutePath() + ']'; } catch (Exception e) { throw new IgniteCheckedException(e); } if (failMsg != null) throw new IgniteCheckedException(failMsg); } /** * Write node id (who captured lock) into lock file. * * @param content Node id. * @throws IOException if some fail while write node it. */ private void writeContent(String content) throws IOException { FileChannel ch = lockFile.getChannel(); byte[] bytes = content.getBytes(); ByteBuffer buf = ByteBuffer.allocate(bytes.length); buf.put(bytes); buf.flip(); ch.write(buf, 1); ch.force(false); } /** * */ private String readContent() throws IOException { FileChannel ch = lockFile.getChannel(); ByteBuffer buf = ByteBuffer.allocate((int)(ch.size() - 1)); ch.read(buf, 1); String content = new String(buf.array()); buf.clear(); return content; } /** Locked or not. */ public boolean isLocked() { return lock != null && lock.isValid(); } /** Releases file lock */ public void release() { U.releaseQuiet(lock); } /** Closes file channel */ @Override public void close() { release(); U.closeQuiet(lockFile); } /** * @return Absolute path to lock file. */ private String lockPath() { return file.getAbsolutePath(); } } /** {@inheritDoc} */ @Override public DataStorageMetrics persistentStoreMetrics() { return new DataStorageMetricsSnapshot(persStoreMetrics); } /** * */ public DataStorageMetricsImpl persistentStoreMetricsImpl() { return persStoreMetrics; } /** {@inheritDoc} */ @Override public MetaStorage metaStorage() { return metaStorage; } /** {@inheritDoc} */ @Override public void notifyMetaStorageSubscribersOnReadyForRead() throws IgniteCheckedException { metastorageLifecycleLsnrs = cctx.kernalContext().internalSubscriptionProcessor().getMetastorageSubscribers(); readMetastore(); } /** {@inheritDoc} */ @Override public boolean walEnabled(int grpId, boolean local) { if (local) return !initiallyLocalWalDisabledGrps.contains(grpId); else return !initiallyGlobalWalDisabledGrps.contains(grpId); } /** {@inheritDoc} */ @Override public void walEnabled(int grpId, boolean enabled, boolean local) { String key = walGroupIdToKey(grpId, local); checkpointReadLock(); try { if (enabled) metaStorage.remove(key); else { metaStorage.write(key, true); lastCheckpointInapplicableForWalRebalance(grpId); } } catch (IgniteCheckedException e) { throw new IgniteException("Failed to write cache group WAL state [grpId=" + grpId + ", enabled=" + enabled + ']', e); } finally { checkpointReadUnlock(); } } /** * Checks that checkpoint with timestamp {@code cpTs} is inapplicable as start point for WAL rebalance for given group {@code grpId}. * * @param cpTs Checkpoint timestamp. * @param grpId Group ID. * @return {@code true} if checkpoint {@code cpTs} is inapplicable as start point for WAL rebalance for {@code grpId}. * @throws IgniteCheckedException If failed to check. */ public boolean isCheckpointInapplicableForWalRebalance(Long cpTs, int grpId) throws IgniteCheckedException { return metaStorage.read(checkpointInapplicableCpAndGroupIdToKey(cpTs, grpId)) != null; } /** * Set last checkpoint as inapplicable for WAL rebalance for given group {@code grpId}. * * @param grpId Group ID. */ @Override public void lastCheckpointInapplicableForWalRebalance(int grpId) { checkpointReadLock(); try { CheckpointEntry lastCp = cpHistory.lastCheckpoint(); long lastCpTs = lastCp != null ? lastCp.timestamp() : 0; if (lastCpTs != 0) metaStorage.write(checkpointInapplicableCpAndGroupIdToKey(lastCpTs, grpId), true); } catch (IgniteCheckedException e) { log.error("Failed to mark last checkpoint as inapplicable for WAL rebalance for group: " + grpId, e); } finally { checkpointReadUnlock(); } } /** * */ private void fillWalDisabledGroups() { assert metaStorage != null; try { metaStorage.iterate(WAL_KEY_PREFIX, (key, val) -> { T2<Integer, Boolean> t2 = walKeyToGroupIdAndLocalFlag(key); if (t2 != null) { if (t2.get2()) initiallyLocalWalDisabledGrps.add(t2.get1()); else initiallyGlobalWalDisabledGrps.add(t2.get1()); } }, false); } catch (IgniteCheckedException e) { throw new IgniteException("Failed to read cache groups WAL state.", e); } } /** * Convert cache group ID to WAL state key. * * @param grpId Group ID. * @return Key. */ private static String walGroupIdToKey(int grpId, boolean local) { if (local) return WAL_LOCAL_KEY_PREFIX + grpId; else return WAL_GLOBAL_KEY_PREFIX + grpId; } /** * Convert checkpoint timestamp and cache group ID to key for {@link #CHECKPOINT_INAPPLICABLE_FOR_REBALANCE} metastorage records. * * @param cpTs Checkpoint timestamp. * @param grpId Group ID. * @return Key. */ private static String checkpointInapplicableCpAndGroupIdToKey(long cpTs, int grpId) { return CHECKPOINT_INAPPLICABLE_FOR_REBALANCE + cpTs + "-" + grpId; } /** * Convert WAL state key to cache group ID. * * @param key Key. * @return Group ID. */ private static T2<Integer, Boolean> walKeyToGroupIdAndLocalFlag(String key) { if (key.startsWith(WAL_LOCAL_KEY_PREFIX)) return new T2<>(Integer.parseInt(key.substring(WAL_LOCAL_KEY_PREFIX.length())), true); else if (key.startsWith(WAL_GLOBAL_KEY_PREFIX)) return new T2<>(Integer.parseInt(key.substring(WAL_GLOBAL_KEY_PREFIX.length())), false); else return null; } /** * Method dumps partitions info see {@link #dumpPartitionsInfo(CacheGroupContext, IgniteLogger)} * for all persistent cache groups. * * @param cctx Shared context. * @param log Logger. * @throws IgniteCheckedException If failed. */ private static void dumpPartitionsInfo(GridCacheSharedContext cctx, IgniteLogger log) throws IgniteCheckedException { for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal() || !grp.persistenceEnabled()) continue; dumpPartitionsInfo(grp, log); } } /** * Retrieves from page memory meta information about given {@code grp} group partitions * and dumps this information to log INFO level. * * @param grp Cache group. * @param log Logger. * @throws IgniteCheckedException If failed. */ private static void dumpPartitionsInfo(CacheGroupContext grp, IgniteLogger log) throws IgniteCheckedException { PageMemoryEx pageMem = (PageMemoryEx)grp.dataRegion().pageMemory(); IgnitePageStoreManager pageStore = grp.shared().pageStore(); assert pageStore != null : "Persistent cache should have initialize page store manager."; for (int p = 0; p < grp.affinity().partitions(); p++) { if (grp.topology().localPartition(p) != null) { GridDhtLocalPartition part = grp.topology().localPartition(p); log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=" + part.state() + ", counter=" + part.updateCounter() + ", size=" + part.fullSize() + "]"); continue; } if (!pageStore.exists(grp.groupId(), p)) continue; pageStore.ensure(grp.groupId(), p); if (pageStore.pages(grp.groupId(), p) <= 1) { log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=N/A (only file header) ]"); continue; } long partMetaId = pageMem.partitionMetaPageId(grp.groupId(), p); long partMetaPage = pageMem.acquirePage(grp.groupId(), partMetaId); try { long pageAddr = pageMem.readLock(grp.groupId(), partMetaId, partMetaPage); try { PagePartitionMetaIO io = PagePartitionMetaIO.VERSIONS.forPage(pageAddr); GridDhtPartitionState partitionState = fromOrdinal(io.getPartitionState(pageAddr)); String state = partitionState != null ? partitionState.toString() : "N/A"; long updateCounter = io.getUpdateCounter(pageAddr); long size = io.getSize(pageAddr); log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=" + state + ", counter=" + updateCounter + ", size=" + size + "]"); } finally { pageMem.readUnlock(grp.groupId(), partMetaId, partMetaPage); } } finally { pageMem.releasePage(grp.groupId(), partMetaId, partMetaPage); } } } /** * Recovery lifecycle for read-write metastorage. */ private class MetastorageRecoveryLifecycle implements DatabaseLifecycleListener { /** {@inheritDoc} */ @Override public void beforeBinaryMemoryRestore(IgniteCacheDatabaseSharedManager mgr) throws IgniteCheckedException { cctx.pageStore().initializeForMetastorage(); } /** {@inheritDoc} */ @Override public void afterBinaryMemoryRestore( IgniteCacheDatabaseSharedManager mgr, RestoreBinaryState restoreState ) throws IgniteCheckedException { assert metaStorage == null; metaStorage = createMetastorage(false); } } /** * @return Cache group predicate that passes only Metastorage cache group id. */ private IgnitePredicate<Integer> onlyMetastorageGroup() { return groupId -> MetaStorage.METASTORAGE_CACHE_ID == groupId; } /** * @return Cache group predicate that passes only cache groups with enabled WAL. */ private IgnitePredicate<Integer> groupsWithEnabledWal() { return groupId -> !initiallyGlobalWalDisabledGrps.contains(groupId) && !initiallyLocalWalDisabledGrps.contains(groupId); } /** * @return WAL records predicate that passes only Metastorage data records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> onlyMetastorageRecords() { return (type, ptr) -> type == METASTORE_DATA_RECORD; } /** * @return WAL records predicate that passes only physical and mixed WAL records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> physicalRecords() { return (type, ptr) -> type.purpose() == WALRecord.RecordPurpose.PHYSICAL || type.purpose() == WALRecord.RecordPurpose.MIXED; } /** * @return WAL records predicate that passes only logical and mixed WAL records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> logicalRecords() { return (type, ptr) -> type.purpose() == WALRecord.RecordPurpose.LOGICAL || type.purpose() == WALRecord.RecordPurpose.MIXED; } /** * Abstract class to create restore context. */ private abstract class RestoreStateContext { /** Last archived segment. */ protected final long lastArchivedSegment; /** WAL iterator. */ private final WALIterator iterator; /** Only {@link WalRecordCacheGroupAware} records satisfied this predicate will be applied. */ private final IgnitePredicate<Integer> cacheGroupPredicate; /** * @param iterator WAL iterator. * @param lastArchivedSegment Last archived segment index. * @param cacheGroupPredicate Cache groups predicate. */ protected RestoreStateContext( WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupPredicate ) { this.iterator = iterator; this.lastArchivedSegment = lastArchivedSegment; this.cacheGroupPredicate = cacheGroupPredicate; } /** * Advance iterator to the next record. * * @return WALRecord entry. * @throws IgniteCheckedException If CRC check fail during binary recovery state or another exception occurring. */ public WALRecord next() throws IgniteCheckedException { try { for (;;) { if (!iterator.hasNextX()) return null; IgniteBiTuple<WALPointer, WALRecord> tup = iterator.nextX(); if (tup == null) return null; WALRecord rec = tup.get2(); WALPointer ptr = tup.get1(); rec.position(ptr); // Filter out records by group id. if (rec instanceof WalRecordCacheGroupAware) { WalRecordCacheGroupAware grpAwareRecord = (WalRecordCacheGroupAware) rec; if (!cacheGroupPredicate.apply(grpAwareRecord.groupId())) continue; } // Filter out data entries by group id. if (rec instanceof DataRecord) rec = filterEntriesByGroupId((DataRecord) rec); return rec; } } catch (IgniteCheckedException e) { boolean throwsCRCError = throwsCRCError(); if (X.hasCause(e, IgniteDataIntegrityViolationException.class)) { if (throwsCRCError) throw e; else return null; } log.error("There is an error during restore state [throwsCRCError=" + throwsCRCError + ']', e); throw e; } } /** * Filter outs data entries from given data record that not satisfy {@link #cacheGroupPredicate}. * * @param record Original data record. * @return Data record with filtered data entries. */ private DataRecord filterEntriesByGroupId(DataRecord record) { List<DataEntry> filteredEntries = record.writeEntries().stream() .filter(entry -> { int cacheId = entry.cacheId(); return cctx.cacheContext(cacheId) != null && cacheGroupPredicate.apply(cctx.cacheContext(cacheId).groupId()); }) .collect(Collectors.toList()); return record.setWriteEntries(filteredEntries); } /** * * @return Last read WAL record pointer. */ public Optional<FileWALPointer> lastReadRecordPointer() { return iterator.lastRead().map(ptr -> (FileWALPointer)ptr); } /** * * @return Flag indicates need throws CRC exception or not. */ public boolean throwsCRCError() { return lastReadRecordPointer().filter(ptr -> ptr.index() <= lastArchivedSegment).isPresent(); } } /** * Restore memory context. Tracks the safety of binary recovery. */ public class RestoreBinaryState extends RestoreStateContext { /** Checkpoint status. */ private final CheckpointStatus status; /** The flag indicates need to apply the binary update or no needed. */ private boolean needApplyBinaryUpdates; /** * @param status Checkpoint status. * @param iterator WAL iterator. * @param lastArchivedSegment Last archived segment index. * @param cacheGroupsPredicate Cache groups predicate. */ public RestoreBinaryState( CheckpointStatus status, WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupsPredicate ) { super(iterator, lastArchivedSegment, cacheGroupsPredicate); this.status = status; this.needApplyBinaryUpdates = status.needRestoreMemory(); } /** * Advance iterator to the next record. * * @return WALRecord entry. * @throws IgniteCheckedException If CRC check fail during binary recovery state or another exception occurring. */ @Override public WALRecord next() throws IgniteCheckedException { WALRecord rec = super.next(); if (rec == null) return null; if (rec.type() == CHECKPOINT_RECORD) { CheckpointRecord cpRec = (CheckpointRecord)rec; // We roll memory up until we find a checkpoint start record registered in the status. if (F.eq(cpRec.checkpointId(), status.cpStartId)) { log.info("Found last checkpoint marker [cpId=" + cpRec.checkpointId() + ", pos=" + rec.position() + ']'); needApplyBinaryUpdates = false; } else if (!F.eq(cpRec.checkpointId(), status.cpEndId)) U.warn(log, "Found unexpected checkpoint marker, skipping [cpId=" + cpRec.checkpointId() + ", expCpId=" + status.cpStartId + ", pos=" + rec.position() + ']'); } return rec; } /** * * @return Flag indicates need apply binary record or not. */ public boolean needApplyBinaryUpdate() { return needApplyBinaryUpdates; } /** * * @return Flag indicates need throws CRC exception or not. */ @Override public boolean throwsCRCError() { log.info("Throws CRC error check [needApplyBinaryUpdates=" + needApplyBinaryUpdates + ", lastArchivedSegment=" + lastArchivedSegment + ", lastRead=" + lastReadRecordPointer() + ']'); if (needApplyBinaryUpdates) return true; return super.throwsCRCError(); } } /** * Restore logical state context. Tracks the safety of logical recovery. */ public class RestoreLogicalState extends RestoreStateContext { /** States of partitions recovered during applying logical updates. */ private final Map<GroupPartitionId, PartitionRecoverState> partitionRecoveryStates = new HashMap<>(); /** * @param lastArchivedSegment Last archived segment index. */ public RestoreLogicalState(WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupsPredicate) { super(iterator, lastArchivedSegment, cacheGroupsPredicate); } /** * @return Map of restored partition states for cache groups. */ public Map<GroupPartitionId, PartitionRecoverState> partitionRecoveryStates() { return Collections.unmodifiableMap(partitionRecoveryStates); } } /** Indicates checkpoint read lock acquisition failure which did not lead to node invalidation. */ private static class CheckpointReadLockTimeoutException extends IgniteCheckedException { /** */ private static final long serialVersionUID = 0L; /** */ private CheckpointReadLockTimeoutException(String msg) { super(msg); } } }
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/GridCacheDatabaseSharedManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.ForkJoinTask; import java.util.concurrent.ForkJoinWorkerThread; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.function.ToLongFunction; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.ignite.DataStorageMetrics; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.DataRegionMetricsProvider; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.CheckpointWriteOrder; import org.apache.ignite.configuration.DataPageEvictionMode; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.failure.FailureContext; import org.apache.ignite.failure.FailureType; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.LongJVMPauseDetector; import org.apache.ignite.internal.NodeStoppingException; import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager; import org.apache.ignite.internal.mem.DirectMemoryProvider; import org.apache.ignite.internal.mem.DirectMemoryRegion; import org.apache.ignite.internal.pagemem.FullPageId; import org.apache.ignite.internal.pagemem.PageIdAllocator; import org.apache.ignite.internal.pagemem.PageIdUtils; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.pagemem.PageUtils; import org.apache.ignite.internal.pagemem.store.IgnitePageStoreManager; import org.apache.ignite.internal.pagemem.store.PageStore; import org.apache.ignite.internal.pagemem.wal.WALIterator; import org.apache.ignite.internal.pagemem.wal.WALPointer; import org.apache.ignite.internal.pagemem.wal.record.CacheState; import org.apache.ignite.internal.pagemem.wal.record.CheckpointRecord; import org.apache.ignite.internal.pagemem.wal.record.DataEntry; import org.apache.ignite.internal.pagemem.wal.record.DataRecord; import org.apache.ignite.internal.pagemem.wal.record.MemoryRecoveryRecord; import org.apache.ignite.internal.pagemem.wal.record.MetastoreDataRecord; import org.apache.ignite.internal.pagemem.wal.record.MvccDataEntry; import org.apache.ignite.internal.pagemem.wal.record.MvccTxRecord; import org.apache.ignite.internal.pagemem.wal.record.PageSnapshot; import org.apache.ignite.internal.pagemem.wal.record.WALRecord; import org.apache.ignite.internal.pagemem.wal.record.WalRecordCacheGroupAware; import org.apache.ignite.internal.pagemem.wal.record.delta.PageDeltaRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionDestroyRecord; import org.apache.ignite.internal.pagemem.wal.record.delta.PartitionMetaStateRecord; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.CacheGroupDescriptor; import org.apache.ignite.internal.processors.cache.DynamicCacheDescriptor; import org.apache.ignite.internal.processors.cache.ExchangeActions; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState; import org.apache.ignite.internal.processors.cache.mvcc.txlog.TxLog; import org.apache.ignite.internal.processors.cache.mvcc.txlog.TxState; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntry; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointEntryType; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointHistory; import org.apache.ignite.internal.processors.cache.persistence.file.FileIO; import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStore; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetastorageLifecycleListener; import org.apache.ignite.internal.processors.cache.persistence.pagemem.CheckpointMetricsTracker; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx; import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryImpl; import org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId; import org.apache.ignite.internal.processors.cache.persistence.partstate.PartitionAllocationMap; import org.apache.ignite.internal.processors.cache.persistence.partstate.PartitionRecoverState; import org.apache.ignite.internal.processors.cache.persistence.snapshot.IgniteCacheSnapshotManager; import org.apache.ignite.internal.processors.cache.persistence.snapshot.SnapshotOperation; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PagePartitionMetaIO; import org.apache.ignite.internal.processors.cache.persistence.wal.FileWALPointer; import org.apache.ignite.internal.processors.cache.persistence.wal.crc.IgniteDataIntegrityViolationException; import org.apache.ignite.internal.processors.port.GridPortRecord; import org.apache.ignite.internal.processors.query.GridQueryProcessor; import org.apache.ignite.internal.stat.IoStatisticsHolderNoOp; import org.apache.ignite.internal.util.GridConcurrentHashSet; import org.apache.ignite.internal.util.GridMultiCollectionWrapper; import org.apache.ignite.internal.util.GridReadOnlyArrayView; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.internal.util.StripedExecutor; import org.apache.ignite.internal.util.future.CountDownFuture; import org.apache.ignite.internal.util.future.GridCompoundFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.lang.GridInClosure3X; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.LT; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteOutClosure; import org.apache.ignite.lang.IgnitePredicate; import org.apache.ignite.mxbean.DataStorageMetricsMXBean; import org.apache.ignite.thread.IgniteThread; import org.apache.ignite.thread.IgniteThreadPoolExecutor; import org.apache.ignite.transactions.TransactionState; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jsr166.ConcurrentLinkedHashMap; import static java.nio.file.StandardOpenOption.READ; import static org.apache.ignite.IgniteSystemProperties.IGNITE_CHECKPOINT_READ_LOCK_TIMEOUT; import static org.apache.ignite.IgniteSystemProperties.IGNITE_JVM_PAUSE_DETECTOR_THRESHOLD; import static org.apache.ignite.IgniteSystemProperties.IGNITE_PDS_WAL_REBALANCE_THRESHOLD; import static org.apache.ignite.IgniteSystemProperties.IGNITE_RECOVERY_SEMAPHORE_PERMITS; import static org.apache.ignite.IgniteSystemProperties.getBoolean; import static org.apache.ignite.IgniteSystemProperties.getInteger; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL_SNAPSHOT; import static org.apache.ignite.failure.FailureType.CRITICAL_ERROR; import static org.apache.ignite.failure.FailureType.SYSTEM_CRITICAL_OPERATION_TIMEOUT; import static org.apache.ignite.failure.FailureType.SYSTEM_WORKER_TERMINATION; import static org.apache.ignite.internal.IgnitionEx.grid; import static org.apache.ignite.internal.LongJVMPauseDetector.DEFAULT_JVM_PAUSE_DETECTOR_THRESHOLD; import static org.apache.ignite.internal.pagemem.PageIdUtils.partId; import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CHECKPOINT_RECORD; import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.METASTORE_DATA_RECORD; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.fromOrdinal; import static org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager.TMP_FILE_MATCHER; import static org.apache.ignite.internal.util.IgniteUtils.checkpointBufferSize; /** * */ @SuppressWarnings({"unchecked", "NonPrivateFieldAccessedInSynchronizedContext"}) public class GridCacheDatabaseSharedManager extends IgniteCacheDatabaseSharedManager implements CheckpointWriteProgressSupplier { /** */ public static final String IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC = "IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC"; /** */ public static final String IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP = "IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP"; /** MemoryPolicyConfiguration name reserved for meta store. */ public static final String METASTORE_DATA_REGION_NAME = "metastoreMemPlc"; /** Skip sync. */ private final boolean skipSync = getBoolean(IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC); /** */ private final int walRebalanceThreshold = getInteger(IGNITE_PDS_WAL_REBALANCE_THRESHOLD, 500_000); /** Value of property for throttling policy override. */ private final String throttlingPolicyOverride = IgniteSystemProperties.getString( IgniteSystemProperties.IGNITE_OVERRIDE_WRITE_THROTTLING_ENABLED); /** */ private final boolean skipCheckpointOnNodeStop = getBoolean(IGNITE_PDS_SKIP_CHECKPOINT_ON_NODE_STOP, false); /** * Starting from this number of dirty pages in checkpoint, array will be sorted with * {@link Arrays#parallelSort(Comparable[])} in case of {@link CheckpointWriteOrder#SEQUENTIAL}. */ private final int parallelSortThreshold = IgniteSystemProperties.getInteger( IgniteSystemProperties.CHECKPOINT_PARALLEL_SORT_THRESHOLD, 512 * 1024); /** Checkpoint lock hold count. */ private static final ThreadLocal<Integer> CHECKPOINT_LOCK_HOLD_COUNT = ThreadLocal.withInitial(() -> 0); /** Assertion enabled. */ private static final boolean ASSERTION_ENABLED = GridCacheDatabaseSharedManager.class.desiredAssertionStatus(); /** Checkpoint file name pattern. */ public static final Pattern CP_FILE_NAME_PATTERN = Pattern.compile("(\\d+)-(.*)-(START|END)\\.bin"); /** */ private static final String MBEAN_NAME = "DataStorageMetrics"; /** */ private static final String MBEAN_GROUP = "Persistent Store"; /** WAL marker prefix for meta store. */ private static final String WAL_KEY_PREFIX = "grp-wal-"; /** Prefix for meta store records which means that WAL was disabled globally for some group. */ private static final String WAL_GLOBAL_KEY_PREFIX = WAL_KEY_PREFIX + "disabled-"; /** Prefix for meta store records which means that WAL was disabled locally for some group. */ private static final String WAL_LOCAL_KEY_PREFIX = WAL_KEY_PREFIX + "local-disabled-"; /** Prefix for meta store records which means that checkpoint entry for some group is not applicable for WAL rebalance. */ private static final String CHECKPOINT_INAPPLICABLE_FOR_REBALANCE = "cp-wal-rebalance-inapplicable-"; /** Timeout between partition file destroy and checkpoint to handle it. */ private static final long PARTITION_DESTROY_CHECKPOINT_TIMEOUT = 30 * 1000; // 30 Seconds. /** */ private static final String CHECKPOINT_RUNNER_THREAD_PREFIX = "checkpoint-runner"; /** This number of threads will be created and used for parallel sorting. */ private static final int PARALLEL_SORT_THREADS = Math.min(Runtime.getRuntime().availableProcessors(), 8); /** Checkpoint thread. Needs to be volatile because it is created in exchange worker. */ private volatile Checkpointer checkpointer; /** Checkpointer thread instance. */ private volatile IgniteThread checkpointerThread; /** For testing only. */ private volatile boolean checkpointsEnabled = true; /** For testing only. */ private volatile GridFutureAdapter<Void> enableChangeApplied; /** */ ReentrantReadWriteLock checkpointLock = new ReentrantReadWriteLock(); /** */ private long checkpointFreq; /** */ private CheckpointHistory cpHistory; /** */ private FilePageStoreManager storeMgr; /** Checkpoint metadata directory ("cp"), contains files with checkpoint start and end */ private File cpDir; /** */ private volatile boolean printCheckpointStats = true; /** Database configuration. */ private final DataStorageConfiguration persistenceCfg; /** */ private final Collection<DbCheckpointListener> lsnrs = new CopyOnWriteArrayList<>(); /** */ private boolean stopping; /** * The position of last seen WAL pointer. Used for resumming logging from this pointer. * * If binary memory recovery pefrormed on node start, the checkpoint END pointer will store * not the last WAL pointer and can't be used for resumming logging. */ private volatile WALPointer walTail; /** Checkpoint runner thread pool. If null tasks are to be run in single thread */ @Nullable private IgniteThreadPoolExecutor asyncRunner; /** Thread local with buffers for the checkpoint threads. Each buffer represent one page for durable memory. */ private ThreadLocal<ByteBuffer> threadBuf; /** Map from a cacheId to a future indicating that there is an in-progress index rebuild for the given cache. */ private final ConcurrentMap<Integer, GridFutureAdapter<Void>> idxRebuildFuts = new ConcurrentHashMap<>(); /** * Lock holder for compatible folders mode. Null if lock holder was created at start node. <br> * In this case lock is held on PDS resover manager and it is not required to manage locking here */ @Nullable private FileLockHolder fileLockHolder; /** Lock wait time. */ private final long lockWaitTime; /** */ private final boolean truncateWalOnCpFinish; /** */ private Map</*grpId*/Integer, Map</*partId*/Integer, T2</*updCntr*/Long, WALPointer>>> reservedForExchange; /** */ private final ConcurrentMap<T2</*grpId*/Integer, /*partId*/Integer>, T2</*updCntr*/Long, WALPointer>> reservedForPreloading = new ConcurrentHashMap<>(); /** Snapshot manager. */ private IgniteCacheSnapshotManager snapshotMgr; /** */ private DataStorageMetricsImpl persStoreMetrics; /** Counter for written checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger writtenPagesCntr = null; /** Counter for fsynced checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger syncedPagesCntr = null; /** Counter for evicted checkpoint pages. Not null only if checkpoint is running. */ private volatile AtomicInteger evictedPagesCntr = null; /** Number of pages in current checkpoint at the beginning of checkpoint. */ private volatile int currCheckpointPagesCnt; /** * MetaStorage instance. Value {@code null} means storage not initialized yet. * Guarded by {@link GridCacheDatabaseSharedManager#checkpointReadLock()} */ private MetaStorage metaStorage; /** */ private List<MetastorageLifecycleListener> metastorageLifecycleLsnrs; /** Initially disabled cache groups. */ private Collection<Integer> initiallyGlobalWalDisabledGrps = new HashSet<>(); /** Initially local wal disabled groups. */ private Collection<Integer> initiallyLocalWalDisabledGrps = new HashSet<>(); /** File I/O factory for writing checkpoint markers. */ private final FileIOFactory ioFactory; /** Timeout for checkpoint read lock acquisition in milliseconds. */ private volatile long checkpointReadLockTimeout; /** Flag allows to log additional information about partitions during recovery phases. */ private final boolean recoveryVerboseLogging = getBoolean(IgniteSystemProperties.IGNITE_RECOVERY_VERBOSE_LOGGING, true); /** Pointer to a memory recovery record that should be included into the next checkpoint record. */ private volatile WALPointer memoryRecoveryRecordPtr; /** * @param ctx Kernal context. */ public GridCacheDatabaseSharedManager(GridKernalContext ctx) { IgniteConfiguration cfg = ctx.config(); persistenceCfg = cfg.getDataStorageConfiguration(); assert persistenceCfg != null; checkpointFreq = persistenceCfg.getCheckpointFrequency(); truncateWalOnCpFinish = persistenceCfg.isWalHistorySizeParameterUsed() ? persistenceCfg.getWalHistorySize() != Integer.MAX_VALUE : persistenceCfg.getMaxWalArchiveSize() != Long.MAX_VALUE; lockWaitTime = persistenceCfg.getLockWaitTime(); persStoreMetrics = new DataStorageMetricsImpl( persistenceCfg.isMetricsEnabled(), persistenceCfg.getMetricsRateTimeInterval(), persistenceCfg.getMetricsSubIntervalCount() ); ioFactory = persistenceCfg.getFileIOFactory(); Long cfgCheckpointReadLockTimeout = ctx.config().getDataStorageConfiguration() != null ? ctx.config().getDataStorageConfiguration().getCheckpointReadLockTimeout() : null; checkpointReadLockTimeout = IgniteSystemProperties.getLong(IGNITE_CHECKPOINT_READ_LOCK_TIMEOUT, cfgCheckpointReadLockTimeout != null ? cfgCheckpointReadLockTimeout : (ctx.workersRegistry() != null ? ctx.workersRegistry().getSystemWorkerBlockedTimeout() : ctx.config().getFailureDetectionTimeout())); } /** * @return File store manager. */ public FilePageStoreManager getFileStoreManager() { return storeMgr; } /** */ private void notifyMetastorageReadyForRead() throws IgniteCheckedException { for (MetastorageLifecycleListener lsnr : metastorageLifecycleLsnrs) lsnr.onReadyForRead(metaStorage); } /** */ private void notifyMetastorageReadyForReadWrite() throws IgniteCheckedException { for (MetastorageLifecycleListener lsnr : metastorageLifecycleLsnrs) lsnr.onReadyForReadWrite(metaStorage); } /** * */ public Checkpointer getCheckpointer() { return checkpointer; } /** * For test use only. * * @return Checkpointer thread instance. */ public IgniteThread checkpointerThread() { return checkpointerThread; } /** * For test use only. */ public IgniteInternalFuture<Void> enableCheckpoints(boolean enable) { GridFutureAdapter<Void> fut = new GridFutureAdapter<>(); enableChangeApplied = fut; checkpointsEnabled = enable; wakeupForCheckpoint("enableCheckpoints()"); return fut; } /** {@inheritDoc} */ @Override protected void initDataRegions0(DataStorageConfiguration memCfg) throws IgniteCheckedException { super.initDataRegions0(memCfg); addDataRegion( memCfg, createMetastoreDataRegionConfig(memCfg), false ); persStoreMetrics.regionMetrics(memMetricsMap.values()); } /** * Create metastorage data region configuration with enabled persistence by default. * * @param storageCfg Data storage configuration. * @return Data region configuration. */ private DataRegionConfiguration createMetastoreDataRegionConfig(DataStorageConfiguration storageCfg) { DataRegionConfiguration cfg = new DataRegionConfiguration(); cfg.setName(METASTORE_DATA_REGION_NAME); cfg.setInitialSize(storageCfg.getSystemRegionInitialSize()); cfg.setMaxSize(storageCfg.getSystemRegionMaxSize()); cfg.setPersistenceEnabled(true); return cfg; } /** {@inheritDoc} */ @Override protected void start0() throws IgniteCheckedException { super.start0(); threadBuf = new ThreadLocal<ByteBuffer>() { /** {@inheritDoc} */ @Override protected ByteBuffer initialValue() { ByteBuffer tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); return tmpWriteBuf; } }; snapshotMgr = cctx.snapshot(); final GridKernalContext kernalCtx = cctx.kernalContext(); if (!kernalCtx.clientNode()) { kernalCtx.internalSubscriptionProcessor().registerDatabaseListener(new MetastorageRecoveryLifecycle()); checkpointer = new Checkpointer(cctx.igniteInstanceName(), "db-checkpoint-thread", log); cpHistory = new CheckpointHistory(kernalCtx); IgnitePageStoreManager store = cctx.pageStore(); assert store instanceof FilePageStoreManager : "Invalid page store manager was created: " + store; storeMgr = (FilePageStoreManager)store; cpDir = Paths.get(storeMgr.workDir().getAbsolutePath(), "cp").toFile(); if (!U.mkdirs(cpDir)) throw new IgniteCheckedException("Could not create directory for checkpoint metadata: " + cpDir); final FileLockHolder preLocked = kernalCtx.pdsFolderResolver() .resolveFolders() .getLockedFileLockHolder(); acquireFileLock(preLocked); cleanupTempCheckpointDirectory(); persStoreMetrics.wal(cctx.wal()); } } /** * Cleanup checkpoint directory from all temporary files. */ @Override public void cleanupTempCheckpointDirectory() throws IgniteCheckedException { try { try (DirectoryStream<Path> files = Files.newDirectoryStream(cpDir.toPath(), TMP_FILE_MATCHER::matches)) { for (Path path : files) Files.delete(path); } } catch (IOException e) { throw new IgniteCheckedException("Failed to cleanup checkpoint directory from temporary files: " + cpDir, e); } } /** {@inheritDoc} */ @Override public void cleanupRestoredCaches() { if (dataRegionMap.isEmpty()) return; boolean hasMvccCache = false; for (CacheGroupDescriptor grpDesc : cctx.cache().cacheGroupDescriptors().values()) { hasMvccCache |= grpDesc.config().getAtomicityMode() == TRANSACTIONAL_SNAPSHOT; String regionName = grpDesc.config().getDataRegionName(); DataRegion region = regionName != null ? dataRegionMap.get(regionName) : dfltDataRegion; if (region == null) continue; if (log.isInfoEnabled()) log.info("Page memory " + region.config().getName() + " for " + grpDesc + " has invalidated."); int partitions = grpDesc.config().getAffinity().partitions(); if (region.pageMemory() instanceof PageMemoryEx) { PageMemoryEx memEx = (PageMemoryEx)region.pageMemory(); for (int partId = 0; partId < partitions; partId++) memEx.invalidate(grpDesc.groupId(), partId); memEx.invalidate(grpDesc.groupId(), PageIdAllocator.INDEX_PARTITION); } } if (!hasMvccCache && dataRegionMap.containsKey(TxLog.TX_LOG_CACHE_NAME)) { PageMemory memory = dataRegionMap.get(TxLog.TX_LOG_CACHE_NAME).pageMemory(); if (memory instanceof PageMemoryEx) ((PageMemoryEx)memory).invalidate(TxLog.TX_LOG_CACHE_ID, PageIdAllocator.INDEX_PARTITION); } final boolean hasMvccCache0 = hasMvccCache; storeMgr.cleanupPageStoreIfMatch( new Predicate<Integer>() { @Override public boolean test(Integer grpId) { return MetaStorage.METASTORAGE_CACHE_ID != grpId && (TxLog.TX_LOG_CACHE_ID != grpId || !hasMvccCache0); } }, true); } /** {@inheritDoc} */ @Override public void cleanupCheckpointDirectory() throws IgniteCheckedException { if (cpHistory != null) cpHistory = new CheckpointHistory(cctx.kernalContext()); try { try (DirectoryStream<Path> files = Files.newDirectoryStream(cpDir.toPath())) { for (Path path : files) Files.delete(path); } } catch (IOException e) { throw new IgniteCheckedException("Failed to cleanup checkpoint directory: " + cpDir, e); } } /** * @param preLocked Pre-locked file lock holder. */ private void acquireFileLock(FileLockHolder preLocked) throws IgniteCheckedException { if (cctx.kernalContext().clientNode()) return; fileLockHolder = preLocked == null ? new FileLockHolder(storeMgr.workDir().getPath(), cctx.kernalContext(), log) : preLocked; if (!fileLockHolder.isLocked()) { if (log.isDebugEnabled()) log.debug("Try to capture file lock [nodeId=" + cctx.localNodeId() + " path=" + fileLockHolder.lockPath() + "]"); fileLockHolder.tryLock(lockWaitTime); } } /** * */ private void releaseFileLock() { if (cctx.kernalContext().clientNode() || fileLockHolder == null) return; if (log.isDebugEnabled()) log.debug("Release file lock [nodeId=" + cctx.localNodeId() + " path=" + fileLockHolder.lockPath() + "]"); fileLockHolder.close(); } /** * Retreives checkpoint history form specified {@code dir}. * * @return List of checkpoints. */ private List<CheckpointEntry> retreiveHistory() throws IgniteCheckedException { if (!cpDir.exists()) return Collections.emptyList(); try (DirectoryStream<Path> cpFiles = Files.newDirectoryStream( cpDir.toPath(), path -> CP_FILE_NAME_PATTERN.matcher(path.toFile().getName()).matches()) ) { List<CheckpointEntry> checkpoints = new ArrayList<>(); ByteBuffer buf = ByteBuffer.allocate(FileWALPointer.POINTER_SIZE); buf.order(ByteOrder.nativeOrder()); for (Path cpFile : cpFiles) { CheckpointEntry cp = parseFromFile(buf, cpFile.toFile()); if (cp != null) checkpoints.add(cp); } return checkpoints; } catch (IOException e) { throw new IgniteCheckedException("Failed to load checkpoint history.", e); } } /** * Parses checkpoint entry from given file. * * @param buf Temporary byte buffer. * @param file Checkpoint file. */ @Nullable private CheckpointEntry parseFromFile(ByteBuffer buf, File file) throws IgniteCheckedException { Matcher matcher = CP_FILE_NAME_PATTERN.matcher(file.getName()); if (!matcher.matches()) return null; CheckpointEntryType type = CheckpointEntryType.valueOf(matcher.group(3)); if (type != CheckpointEntryType.START) return null; long cpTs = Long.parseLong(matcher.group(1)); UUID cpId = UUID.fromString(matcher.group(2)); WALPointer ptr = readPointer(file, buf); return createCheckPointEntry(cpTs, ptr, cpId, null, CheckpointEntryType.START); } /** * Removes checkpoint start/end files belongs to given {@code cpEntry}. * * @param cpEntry Checkpoint entry. * * @throws IgniteCheckedException If failed to delete. */ private void removeCheckpointFiles(CheckpointEntry cpEntry) throws IgniteCheckedException { Path startFile = new File(cpDir.getAbsolutePath(), checkpointFileName(cpEntry, CheckpointEntryType.START)).toPath(); Path endFile = new File(cpDir.getAbsolutePath(), checkpointFileName(cpEntry, CheckpointEntryType.END)).toPath(); try { if (Files.exists(startFile)) Files.delete(startFile); if (Files.exists(endFile)) Files.delete(endFile); } catch (IOException e) { throw new StorageException("Failed to delete stale checkpoint files: " + cpEntry, e); } } /** */ private void readMetastore() throws IgniteCheckedException { try { CheckpointStatus status = readCheckpointStatus(); checkpointReadLock(); try { dataRegion(METASTORE_DATA_REGION_NAME).pageMemory().start(); performBinaryMemoryRestore(status, onlyMetastorageGroup(), physicalRecords(), false); metaStorage = createMetastorage(true); applyLogicalUpdates(status, onlyMetastorageGroup(), onlyMetastorageRecords(), false); fillWalDisabledGroups(); notifyMetastorageReadyForRead(); } finally { metaStorage = null; dataRegion(METASTORE_DATA_REGION_NAME).pageMemory().stop(false); cctx.pageStore().cleanupPageStoreIfMatch(new Predicate<Integer>() { @Override public boolean test(Integer grpId) { return MetaStorage.METASTORAGE_CACHE_ID == grpId; } }, false); checkpointReadUnlock(); } } catch (StorageException e) { cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw new IgniteCheckedException(e); } } /** {@inheritDoc} */ @Override public void onActivate(GridKernalContext ctx) throws IgniteCheckedException { if (log.isDebugEnabled()) log.debug("Activate database manager [id=" + cctx.localNodeId() + " topVer=" + cctx.discovery().topologyVersionEx() + " ]"); snapshotMgr = cctx.snapshot(); if (!cctx.kernalContext().clientNode() && checkpointer == null) checkpointer = new Checkpointer(cctx.igniteInstanceName(), "db-checkpoint-thread", log); super.onActivate(ctx); if (!cctx.kernalContext().clientNode()) { initializeCheckpointPool(); finishRecovery(); } } /** {@inheritDoc} */ @Override public void onDeActivate(GridKernalContext kctx) { if (log.isDebugEnabled()) log.debug("DeActivate database manager [id=" + cctx.localNodeId() + " topVer=" + cctx.discovery().topologyVersionEx() + " ]"); onKernalStop0(false); super.onDeActivate(kctx); /* Must be here, because after deactivate we can invoke activate and file lock must be already configured */ stopping = false; } /** * */ private void initializeCheckpointPool() { if (persistenceCfg.getCheckpointThreads() > 1) asyncRunner = new IgniteThreadPoolExecutor( CHECKPOINT_RUNNER_THREAD_PREFIX, cctx.igniteInstanceName(), persistenceCfg.getCheckpointThreads(), persistenceCfg.getCheckpointThreads(), 30_000, new LinkedBlockingQueue<Runnable>() ); } /** {@inheritDoc} */ @Override protected void registerMetricsMBeans(IgniteConfiguration cfg) { super.registerMetricsMBeans(cfg); registerMetricsMBean( cctx.kernalContext().config(), MBEAN_GROUP, MBEAN_NAME, persStoreMetrics, DataStorageMetricsMXBean.class ); } /** {@inheritDoc} */ @Deprecated @Override protected IgniteOutClosure<Long> freeSpaceProvider(final DataRegionConfiguration dataRegCfg) { if (!dataRegCfg.isPersistenceEnabled()) return super.freeSpaceProvider(dataRegCfg); final String dataRegName = dataRegCfg.getName(); return new IgniteOutClosure<Long>() { @Override public Long apply() { long freeSpace = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; freeSpace += ((GridCacheOffheapManager)grpCtx.offheap()).freeSpace(); } return freeSpace; } }; } /** {@inheritDoc} */ @Override protected DataRegionMetricsProvider dataRegionMetricsProvider(final DataRegionConfiguration dataRegCfg) { if (!dataRegCfg.isPersistenceEnabled()) return super.dataRegionMetricsProvider(dataRegCfg); final String dataRegName = dataRegCfg.getName(); return new DataRegionMetricsProvider() { @Override public long partiallyFilledPagesFreeSpace() { long freeSpace = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; freeSpace += ((GridCacheOffheapManager)grpCtx.offheap()).freeSpace(); } return freeSpace; } @Override public long emptyDataPages() { long emptyDataPages = 0L; for (CacheGroupContext grpCtx : cctx.cache().cacheGroups()) { if (!grpCtx.dataRegion().config().getName().equals(dataRegName)) continue; assert grpCtx.offheap() instanceof GridCacheOffheapManager; emptyDataPages += ((GridCacheOffheapManager)grpCtx.offheap()).emptyDataPages(); } return emptyDataPages; } }; } /** * Restores last valid WAL pointer and resumes logging from that pointer. * Re-creates metastorage if needed. * * @throws IgniteCheckedException If failed. */ private void finishRecovery() throws IgniteCheckedException { assert !cctx.kernalContext().clientNode(); long time = System.currentTimeMillis(); checkpointReadLock(); try { for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.beforeResumeWalLogging(this); // Try to resume logging since last finished checkpoint if possible. if (walTail == null) { CheckpointStatus status = readCheckpointStatus(); walTail = CheckpointStatus.NULL_PTR.equals(status.endPtr) ? null : status.endPtr; } cctx.wal().resumeLogging(walTail); walTail = null; // Recreate metastorage to refresh page memory state after deactivation. if (metaStorage == null) metaStorage = createMetastorage(false); notifyMetastorageReadyForReadWrite(); U.log(log, "Finish recovery performed in " + (System.currentTimeMillis() - time) + " ms."); } catch (IgniteCheckedException e) { if (X.hasCause(e, StorageException.class, IOException.class)) cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw e; } finally { checkpointReadUnlock(); } } /** * @param readOnly Metastorage read-only mode. * @return Instance of Metastorage. * @throws IgniteCheckedException If failed to create metastorage. */ private MetaStorage createMetastorage(boolean readOnly) throws IgniteCheckedException { cctx.pageStore().initializeForMetastorage(); MetaStorage storage = new MetaStorage( cctx, dataRegion(METASTORE_DATA_REGION_NAME), (DataRegionMetricsImpl) memMetricsMap.get(METASTORE_DATA_REGION_NAME), readOnly ); storage.init(this); return storage; } /** * @param cacheGroupsPredicate Cache groups to restore. * @param recordTypePredicate Filter records by type. * @return Last seen WAL pointer during binary memory recovery. * @throws IgniteCheckedException If failed. */ private RestoreBinaryState restoreBinaryMemory( IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate ) throws IgniteCheckedException { long time = System.currentTimeMillis(); try { log.info("Starting binary memory restore for: " + cctx.cache().cacheGroupDescriptors().keySet()); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.beforeBinaryMemoryRestore(this); CheckpointStatus status = readCheckpointStatus(); // First, bring memory to the last consistent checkpoint state if needed. // This method should return a pointer to the last valid record in the WAL. RestoreBinaryState binaryState = performBinaryMemoryRestore( status, cacheGroupsPredicate, recordTypePredicate, true ); WALPointer restored = binaryState.lastReadRecordPointer().map(FileWALPointer::next).orElse(null); if (restored == null && !status.endPtr.equals(CheckpointStatus.NULL_PTR)) { throw new StorageException("The memory cannot be restored. The critical part of WAL archive is missing " + "[tailWalPtr=" + restored + ", endPtr=" + status.endPtr + ']'); } else if (restored != null) U.log(log, "Binary memory state restored at node startup [restoredPtr=" + restored + ']'); // Wal logging is now available. cctx.wal().resumeLogging(restored); // Log MemoryRecoveryRecord to make sure that old physical records are not replayed during // next physical recovery. memoryRecoveryRecordPtr = cctx.wal().log(new MemoryRecoveryRecord(U.currentTimeMillis())); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.afterBinaryMemoryRestore(this, binaryState); if (log.isInfoEnabled()) log.info("Binary recovery performed in " + (System.currentTimeMillis() - time) + " ms."); return binaryState; } catch (IgniteCheckedException e) { if (X.hasCause(e, StorageException.class, IOException.class)) cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw e; } } /** {@inheritDoc} */ @Override protected void onKernalStop0(boolean cancel) { checkpointLock.writeLock().lock(); try { stopping = true; } finally { checkpointLock.writeLock().unlock(); } shutdownCheckpointer(cancel); lsnrs.clear(); super.onKernalStop0(cancel); unregisterMetricsMBean( cctx.gridConfig(), MBEAN_GROUP, MBEAN_NAME ); metaStorage = null; } /** {@inheritDoc} */ @Override protected void stop0(boolean cancel) { super.stop0(cancel); releaseFileLock(); } /** */ private long[] calculateFragmentSizes(int concLvl, long cacheSize, long chpBufSize) { if (concLvl < 2) concLvl = Runtime.getRuntime().availableProcessors(); long fragmentSize = cacheSize / concLvl; if (fragmentSize < 1024 * 1024) fragmentSize = 1024 * 1024; long[] sizes = new long[concLvl + 1]; for (int i = 0; i < concLvl; i++) sizes[i] = fragmentSize; sizes[concLvl] = chpBufSize; return sizes; } /** {@inheritDoc} */ @Override protected PageMemory createPageMemory( DirectMemoryProvider memProvider, DataStorageConfiguration memCfg, DataRegionConfiguration plcCfg, DataRegionMetricsImpl memMetrics, final boolean trackable ) { if (!plcCfg.isPersistenceEnabled()) return super.createPageMemory(memProvider, memCfg, plcCfg, memMetrics, trackable); memMetrics.persistenceEnabled(true); long cacheSize = plcCfg.getMaxSize(); // Checkpoint buffer size can not be greater than cache size, it does not make sense. long chpBufSize = checkpointBufferSize(plcCfg); if (chpBufSize > cacheSize) { U.quietAndInfo(log, "Configured checkpoint page buffer size is too big, setting to the max region size [size=" + U.readableSize(cacheSize, false) + ", memPlc=" + plcCfg.getName() + ']'); chpBufSize = cacheSize; } GridInClosure3X<Long, FullPageId, PageMemoryEx> changeTracker; if (trackable) changeTracker = new GridInClosure3X<Long, FullPageId, PageMemoryEx>() { @Override public void applyx( Long page, FullPageId fullId, PageMemoryEx pageMem ) throws IgniteCheckedException { if (trackable) snapshotMgr.onChangeTrackerPage(page, fullId, pageMem); } }; else changeTracker = null; PageMemoryImpl pageMem = new PageMemoryImpl( wrapMetricsMemoryProvider(memProvider, memMetrics), calculateFragmentSizes( memCfg.getConcurrencyLevel(), cacheSize, chpBufSize ), cctx, memCfg.getPageSize(), (fullId, pageBuf, tag) -> { memMetrics.onPageWritten(); // We can write only page from disk into snapshot. snapshotMgr.beforePageWrite(fullId); // Write page to disk. storeMgr.write(fullId.groupId(), fullId.pageId(), pageBuf, tag); AtomicInteger cntr = evictedPagesCntr; if (cntr != null) cntr.incrementAndGet(); }, changeTracker, this, memMetrics, resolveThrottlingPolicy(), this ); memMetrics.pageMemory(pageMem); return pageMem; } /** * @param memoryProvider0 Memory provider. * @param memMetrics Memory metrics. * @return Wrapped memory provider. */ @Override protected DirectMemoryProvider wrapMetricsMemoryProvider( final DirectMemoryProvider memoryProvider0, final DataRegionMetricsImpl memMetrics ) { return new DirectMemoryProvider() { private AtomicInteger checkPointBufferIdxCnt = new AtomicInteger(); private final DirectMemoryProvider memProvider = memoryProvider0; @Override public void initialize(long[] chunkSizes) { memProvider.initialize(chunkSizes); checkPointBufferIdxCnt.set(chunkSizes.length); } @Override public void shutdown(boolean deallocate) { memProvider.shutdown(deallocate); } @Override public DirectMemoryRegion nextRegion() { DirectMemoryRegion nextMemoryRegion = memProvider.nextRegion(); if (nextMemoryRegion == null) return null; int idx = checkPointBufferIdxCnt.decrementAndGet(); long chunkSize = nextMemoryRegion.size(); // Checkpoint chunk last in the long[] chunkSizes. if (idx != 0) memMetrics.updateOffHeapSize(chunkSize); else memMetrics.updateCheckpointBufferSize(chunkSize); return nextMemoryRegion; } }; } /** * Resolves throttling policy according to the settings. */ @NotNull private PageMemoryImpl.ThrottlingPolicy resolveThrottlingPolicy() { PageMemoryImpl.ThrottlingPolicy plc = persistenceCfg.isWriteThrottlingEnabled() ? PageMemoryImpl.ThrottlingPolicy.SPEED_BASED : PageMemoryImpl.ThrottlingPolicy.CHECKPOINT_BUFFER_ONLY; if (throttlingPolicyOverride != null) { try { plc = PageMemoryImpl.ThrottlingPolicy.valueOf(throttlingPolicyOverride.toUpperCase()); } catch (IllegalArgumentException e) { log.error("Incorrect value of IGNITE_OVERRIDE_WRITE_THROTTLING_ENABLED property. " + "The default throttling policy will be used [plc=" + throttlingPolicyOverride + ", defaultPlc=" + plc + ']'); } } return plc; } /** {@inheritDoc} */ @Override protected void checkRegionEvictionProperties(DataRegionConfiguration regCfg, DataStorageConfiguration dbCfg) throws IgniteCheckedException { if (!regCfg.isPersistenceEnabled()) super.checkRegionEvictionProperties(regCfg, dbCfg); else if (regCfg.getPageEvictionMode() != DataPageEvictionMode.DISABLED) { U.warn(log, "Page eviction mode will have no effect because the oldest pages are evicted automatically " + "if Ignite persistence is enabled: " + regCfg.getName()); } } /** {@inheritDoc} */ @Override protected void checkPageSize(DataStorageConfiguration memCfg) { if (memCfg.getPageSize() == 0) { try { assert cctx.pageStore() instanceof FilePageStoreManager : "Invalid page store manager was created: " + cctx.pageStore(); Path anyIdxPartFile = IgniteUtils.searchFileRecursively( ((FilePageStoreManager)cctx.pageStore()).workDir().toPath(), FilePageStoreManager.INDEX_FILE_NAME); if (anyIdxPartFile != null) { memCfg.setPageSize(resolvePageSizeFromPartitionFile(anyIdxPartFile)); return; } } catch (IgniteCheckedException | IOException | IllegalArgumentException e) { U.quietAndWarn(log, "Attempt to resolve pageSize from store files failed: " + e.getMessage()); U.quietAndWarn(log, "Default page size will be used: " + DataStorageConfiguration.DFLT_PAGE_SIZE + " bytes"); } memCfg.setPageSize(DataStorageConfiguration.DFLT_PAGE_SIZE); } } /** * @param partFile Partition file. */ private int resolvePageSizeFromPartitionFile(Path partFile) throws IOException, IgniteCheckedException { try (FileIO fileIO = ioFactory.create(partFile.toFile())) { int minimalHdr = FilePageStore.HEADER_SIZE; if (fileIO.size() < minimalHdr) throw new IgniteCheckedException("Partition file is too small: " + partFile); ByteBuffer hdr = ByteBuffer.allocate(minimalHdr).order(ByteOrder.LITTLE_ENDIAN); fileIO.readFully(hdr); hdr.rewind(); hdr.getLong(); // Read signature. hdr.getInt(); // Read version. hdr.get(); // Read type. int pageSize = hdr.getInt(); if (pageSize == 2048) { U.quietAndWarn(log, "You are currently using persistent store with 2K pages (DataStorageConfiguration#" + "pageSize). If you use SSD disk, consider migrating to 4K pages for better IO performance."); } return pageSize; } } /** * @param cancel Cancel flag. */ @SuppressWarnings("unused") private void shutdownCheckpointer(boolean cancel) { Checkpointer cp = checkpointer; if (cp != null) { if (cancel) cp.shutdownNow(); else cp.cancel(); try { U.join(cp); checkpointer = null; } catch (IgniteInterruptedCheckedException ignore) { U.warn(log, "Was interrupted while waiting for checkpointer shutdown, " + "will not wait for checkpoint to finish."); cp.shutdownNow(); while (true) { try { U.join(cp); checkpointer = null; cp.scheduledCp.cpFinishFut.onDone( new NodeStoppingException("Checkpointer is stopped during node stop.")); break; } catch (IgniteInterruptedCheckedException ignored) { //Ignore } } Thread.currentThread().interrupt(); } } if (asyncRunner != null) { asyncRunner.shutdownNow(); try { asyncRunner.awaitTermination(2, TimeUnit.MINUTES); } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); } } } /** {@inheritDoc} */ @Override public void beforeExchange(GridDhtPartitionsExchangeFuture fut) throws IgniteCheckedException { // Try to restore partition states. if (fut.localJoinExchange() || fut.activateCluster() || (fut.exchangeActions() != null && !F.isEmpty(fut.exchangeActions().cacheGroupsToStart()))) { U.doInParallel( cctx.kernalContext().getSystemExecutorService(), cctx.cache().cacheGroups(), cacheGroup -> { if (cacheGroup.isLocal()) return null; cctx.database().checkpointReadLock(); try { cacheGroup.offheap().restorePartitionStates(Collections.emptyMap()); if (cacheGroup.localStartVersion().equals(fut.initialVersion())) cacheGroup.topology().afterStateRestored(fut.initialVersion()); fut.timeBag().finishLocalStage("Restore partition states " + "[grp=" + cacheGroup.cacheOrGroupName() + "]"); } finally { cctx.database().checkpointReadUnlock(); } return null; } ); fut.timeBag().finishGlobalStage("Restore partition states"); } if (cctx.kernalContext().query().moduleEnabled()) { ExchangeActions acts = fut.exchangeActions(); if (acts != null) { if (!F.isEmpty(acts.cacheStartRequests())) { for (ExchangeActions.CacheActionData actionData : acts.cacheStartRequests()) prepareIndexRebuildFuture(CU.cacheId(actionData.request().cacheName())); } else if (acts.localJoinContext() != null && !F.isEmpty(acts.localJoinContext().caches())) { for (T2<DynamicCacheDescriptor, NearCacheConfiguration> tup : acts.localJoinContext().caches()) prepareIndexRebuildFuture(tup.get1().cacheId()); } } } } /** * Creates a new index rebuild future that should be completed later after exchange is done. The future * has to be created before exchange is initialized to guarantee that we will capture a correct future * after activation or restore completes. * If there was an old future for the given ID, it will be completed. * * @param cacheId Cache ID. */ private void prepareIndexRebuildFuture(int cacheId) { GridFutureAdapter<Void> old = idxRebuildFuts.put(cacheId, new GridFutureAdapter<>()); if (old != null) old.onDone(); } /** {@inheritDoc} */ @Override public void rebuildIndexesIfNeeded(GridDhtPartitionsExchangeFuture fut) { GridQueryProcessor qryProc = cctx.kernalContext().query(); if (qryProc.moduleEnabled()) { for (final GridCacheContext cacheCtx : (Collection<GridCacheContext>)cctx.cacheContexts()) { if (cacheCtx.startTopologyVersion().equals(fut.initialVersion())) { final int cacheId = cacheCtx.cacheId(); final GridFutureAdapter<Void> usrFut = idxRebuildFuts.get(cacheId); IgniteInternalFuture<?> rebuildFut = qryProc.rebuildIndexesFromHash(cacheCtx); if (rebuildFut != null) { log().info("Started indexes rebuilding for cache [name=" + cacheCtx.name() + ", grpName=" + cacheCtx.group().name() + ']'); assert usrFut != null : "Missing user future for cache: " + cacheCtx.name(); rebuildFut.listen(new CI1<IgniteInternalFuture>() { @Override public void apply(IgniteInternalFuture fut) { idxRebuildFuts.remove(cacheId, usrFut); Throwable err = fut.error(); usrFut.onDone(err); CacheConfiguration ccfg = cacheCtx.config(); if (ccfg != null) { if (err == null) log().info("Finished indexes rebuilding for cache [name=" + ccfg.getName() + ", grpName=" + ccfg.getGroupName() + ']'); else { if (!(err instanceof NodeStoppingException)) log().error("Failed to rebuild indexes for cache [name=" + ccfg.getName() + ", grpName=" + ccfg.getGroupName() + ']', err); } } } }); } else { if (usrFut != null) { idxRebuildFuts.remove(cacheId, usrFut); usrFut.onDone(); } } } } } } /** {@inheritDoc} */ @Nullable @Override public IgniteInternalFuture indexRebuildFuture(int cacheId) { return idxRebuildFuts.get(cacheId); } /** {@inheritDoc} */ @Override public void onCacheGroupsStopped( Collection<IgniteBiTuple<CacheGroupContext, Boolean>> stoppedGrps ) { Map<PageMemoryEx, Collection<Integer>> destroyed = new HashMap<>(); for (IgniteBiTuple<CacheGroupContext, Boolean> tup : stoppedGrps) { CacheGroupContext gctx = tup.get1(); if (!gctx.persistenceEnabled()) continue; snapshotMgr.onCacheGroupStop(gctx, tup.get2()); PageMemoryEx pageMem = (PageMemoryEx)gctx.dataRegion().pageMemory(); Collection<Integer> grpIds = destroyed.computeIfAbsent(pageMem, k -> new HashSet<>()); grpIds.add(tup.get1().groupId()); pageMem.onCacheGroupDestroyed(tup.get1().groupId()); if (tup.get2()) cctx.kernalContext().encryption().onCacheGroupDestroyed(gctx.groupId()); } Collection<IgniteInternalFuture<Void>> clearFuts = new ArrayList<>(destroyed.size()); for (Map.Entry<PageMemoryEx, Collection<Integer>> entry : destroyed.entrySet()) { final Collection<Integer> grpIds = entry.getValue(); clearFuts.add(entry.getKey().clearAsync((grpId, pageIdg) -> grpIds.contains(grpId), false)); } for (IgniteInternalFuture<Void> clearFut : clearFuts) { try { clearFut.get(); } catch (IgniteCheckedException e) { log.error("Failed to clear page memory", e); } } if (cctx.pageStore() != null) { for (IgniteBiTuple<CacheGroupContext, Boolean> tup : stoppedGrps) { CacheGroupContext grp = tup.get1(); try { cctx.pageStore().shutdownForCacheGroup(grp, tup.get2()); } catch (IgniteCheckedException e) { U.error(log, "Failed to gracefully clean page store resources for destroyed cache " + "[cache=" + grp.cacheOrGroupName() + "]", e); } } } } /** * Gets the checkpoint read lock. While this lock is held, checkpoint thread will not acquireSnapshotWorker memory * state. * @throws IgniteException If failed. */ @Override public void checkpointReadLock() { if (checkpointLock.writeLock().isHeldByCurrentThread()) return; long timeout = checkpointReadLockTimeout; long start = U.currentTimeMillis(); boolean interruped = false; try { for (; ; ) { try { if (timeout > 0 && (U.currentTimeMillis() - start) >= timeout) failCheckpointReadLock(); try { if (timeout > 0) { if (!checkpointLock.readLock().tryLock(timeout - (U.currentTimeMillis() - start), TimeUnit.MILLISECONDS)) failCheckpointReadLock(); } else checkpointLock.readLock().lock(); } catch (InterruptedException e) { interruped = true; continue; } if (stopping) { checkpointLock.readLock().unlock(); throw new IgniteException(new NodeStoppingException("Failed to perform cache update: node is stopping.")); } if (checkpointLock.getReadHoldCount() > 1 || safeToUpdatePageMemories()) break; else { checkpointLock.readLock().unlock(); if (timeout > 0 && U.currentTimeMillis() - start >= timeout) failCheckpointReadLock(); try { checkpointer.wakeupForCheckpoint(0, "too many dirty pages").cpBeginFut .getUninterruptibly(); } catch (IgniteFutureTimeoutCheckedException e) { failCheckpointReadLock(); } catch (IgniteCheckedException e) { throw new IgniteException("Failed to wait for checkpoint begin.", e); } } } catch (CheckpointReadLockTimeoutException e) { log.error(e.getMessage(), e); timeout = 0; } } } finally { if (interruped) Thread.currentThread().interrupt(); } if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() + 1); } /** * Invokes critical failure processing. Always throws. * * @throws CheckpointReadLockTimeoutException If node was not invalidated as result of handling. * @throws IgniteException If node was invalidated as result of handling. */ private void failCheckpointReadLock() throws CheckpointReadLockTimeoutException, IgniteException { String msg = "Checkpoint read lock acquisition has been timed out."; IgniteException e = new IgniteException(msg); if (cctx.kernalContext().failure().process(new FailureContext(SYSTEM_CRITICAL_OPERATION_TIMEOUT, e))) throw e; throw new CheckpointReadLockTimeoutException(msg); } /** {@inheritDoc} */ @Override public boolean checkpointLockIsHeldByThread() { return !ASSERTION_ENABLED || checkpointLock.isWriteLockedByCurrentThread() || CHECKPOINT_LOCK_HOLD_COUNT.get() > 0 || Thread.currentThread().getName().startsWith(CHECKPOINT_RUNNER_THREAD_PREFIX); } /** * @return {@code true} if all PageMemory instances are safe to update. */ private boolean safeToUpdatePageMemories() { Collection<DataRegion> memPlcs = context().database().dataRegions(); if (memPlcs == null) return true; for (DataRegion memPlc : memPlcs) { if (!memPlc.config().isPersistenceEnabled()) continue; PageMemoryEx pageMemEx = (PageMemoryEx)memPlc.pageMemory(); if (!pageMemEx.safeToUpdate()) return false; } return true; } /** * Releases the checkpoint read lock. */ @Override public void checkpointReadUnlock() { if (checkpointLock.writeLock().isHeldByCurrentThread()) return; checkpointLock.readLock().unlock(); if (checkpointer != null) { Collection<DataRegion> dataRegs = context().database().dataRegions(); if (dataRegs != null) { for (DataRegion dataReg : dataRegs) { if (!dataReg.config().isPersistenceEnabled()) continue; PageMemoryEx mem = (PageMemoryEx)dataReg.pageMemory(); if (mem != null && !mem.safeToUpdate()) { checkpointer.wakeupForCheckpoint(0, "too many dirty pages"); break; } } } } if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() - 1); } /** {@inheritDoc} */ @Override public synchronized Map<Integer, Map<Integer, Long>> reserveHistoryForExchange() { assert reservedForExchange == null : reservedForExchange; reservedForExchange = new HashMap<>(); Map</*grpId*/Integer, Set</*partId*/Integer>> applicableGroupsAndPartitions = partitionsApplicableForWalRebalance(); Map</*grpId*/Integer, Map</*partId*/Integer, CheckpointEntry>> earliestValidCheckpoints; checkpointReadLock(); try { earliestValidCheckpoints = cpHistory.searchAndReserveCheckpoints(applicableGroupsAndPartitions); } finally { checkpointReadUnlock(); } Map</*grpId*/Integer, Map</*partId*/Integer, /*updCntr*/Long>> grpPartsWithCnts = new HashMap<>(); for (Map.Entry<Integer, Map<Integer, CheckpointEntry>> e : earliestValidCheckpoints.entrySet()) { int grpId = e.getKey(); for (Map.Entry<Integer, CheckpointEntry> e0 : e.getValue().entrySet()) { CheckpointEntry cpEntry = e0.getValue(); int partId = e0.getKey(); assert cctx.wal().reserved(cpEntry.checkpointMark()) : "WAL segment for checkpoint " + cpEntry + " has not reserved"; Long updCntr = cpEntry.partitionCounter(cctx, grpId, partId); if (updCntr != null) { reservedForExchange.computeIfAbsent(grpId, k -> new HashMap<>()) .put(partId, new T2<>(updCntr, cpEntry.checkpointMark())); grpPartsWithCnts.computeIfAbsent(grpId, k -> new HashMap<>()).put(partId, updCntr); } } } return grpPartsWithCnts; } /** * @return Map of group id -> Set of partitions which can be used as suppliers for WAL rebalance. */ private Map<Integer, Set<Integer>> partitionsApplicableForWalRebalance() { Map<Integer, Set<Integer>> res = new HashMap<>(); for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal()) continue; for (GridDhtLocalPartition locPart : grp.topology().currentLocalPartitions()) { if (locPart.state() == GridDhtPartitionState.OWNING && locPart.fullSize() > walRebalanceThreshold) res.computeIfAbsent(grp.groupId(), k -> new HashSet<>()).add(locPart.id()); } } return res; } /** {@inheritDoc} */ @Override public synchronized void releaseHistoryForExchange() { if (reservedForExchange == null) return; FileWALPointer earliestPtr = null; for (Map.Entry<Integer, Map<Integer, T2<Long, WALPointer>>> e : reservedForExchange.entrySet()) { for (Map.Entry<Integer, T2<Long, WALPointer>> e0 : e.getValue().entrySet()) { FileWALPointer ptr = (FileWALPointer) e0.getValue().get2(); if (earliestPtr == null || ptr.index() < earliestPtr.index()) earliestPtr = ptr; } } reservedForExchange = null; if (earliestPtr == null) return; assert cctx.wal().reserved(earliestPtr) : "Earliest checkpoint WAL pointer is not reserved for exchange: " + earliestPtr; try { cctx.wal().release(earliestPtr); } catch (IgniteCheckedException e) { log.error("Failed to release earliest checkpoint WAL pointer: " + earliestPtr, e); } } /** {@inheritDoc} */ @Override public boolean reserveHistoryForPreloading(int grpId, int partId, long cntr) { CheckpointEntry cpEntry = cpHistory.searchCheckpointEntry(grpId, partId, cntr); if (cpEntry == null) return false; WALPointer ptr = cpEntry.checkpointMark(); if (ptr == null) return false; boolean reserved = cctx.wal().reserve(ptr); if (reserved) reservedForPreloading.put(new T2<>(grpId, partId), new T2<>(cntr, ptr)); return reserved; } /** {@inheritDoc} */ @Override public void releaseHistoryForPreloading() { for (Map.Entry<T2<Integer, Integer>, T2<Long, WALPointer>> e : reservedForPreloading.entrySet()) { try { cctx.wal().release(e.getValue().get2()); } catch (IgniteCheckedException ex) { U.error(log, "Could not release WAL reservation", ex); throw new IgniteException(ex); } } reservedForPreloading.clear(); } /** * */ @Nullable @Override public IgniteInternalFuture wakeupForCheckpoint(String reason) { Checkpointer cp = checkpointer; if (cp != null) return cp.wakeupForCheckpoint(0, reason).cpBeginFut; return null; } /** {@inheritDoc} */ @Override public void waitForCheckpoint(String reason) throws IgniteCheckedException { Checkpointer cp = checkpointer; if (cp == null) return; CheckpointProgressSnapshot progSnapshot = cp.wakeupForCheckpoint(0, reason); IgniteInternalFuture fut1 = progSnapshot.cpFinishFut; fut1.get(); if (!progSnapshot.started) return; IgniteInternalFuture fut2 = cp.wakeupForCheckpoint(0, reason).cpFinishFut; assert fut1 != fut2; fut2.get(); } /** {@inheritDoc} */ @Override public CheckpointFuture forceCheckpoint(String reason) { Checkpointer cp = checkpointer; if (cp == null) return null; return cp.wakeupForCheckpoint(0, reason); } /** {@inheritDoc} */ @Override public WALPointer lastCheckpointMarkWalPointer() { CheckpointEntry lastCheckpointEntry = cpHistory == null ? null : cpHistory.lastCheckpoint(); return lastCheckpointEntry == null ? null : lastCheckpointEntry.checkpointMark(); } /** * @return Checkpoint directory. */ public File checkpointDirectory() { return cpDir; } /** * @param lsnr Listener. */ public void addCheckpointListener(DbCheckpointListener lsnr) { lsnrs.add(lsnr); } /** * @param lsnr Listener. */ public void removeCheckpointListener(DbCheckpointListener lsnr) { lsnrs.remove(lsnr); } /** * @return Read checkpoint status. * @throws IgniteCheckedException If failed to read checkpoint status page. */ @SuppressWarnings("TooBroadScope") private CheckpointStatus readCheckpointStatus() throws IgniteCheckedException { long lastStartTs = 0; long lastEndTs = 0; UUID startId = CheckpointStatus.NULL_UUID; UUID endId = CheckpointStatus.NULL_UUID; File startFile = null; File endFile = null; WALPointer startPtr = CheckpointStatus.NULL_PTR; WALPointer endPtr = CheckpointStatus.NULL_PTR; File dir = cpDir; if (!dir.exists()) { log.warning("Read checkpoint status: checkpoint directory is not found."); return new CheckpointStatus(0, startId, startPtr, endId, endPtr); } File[] files = dir.listFiles(); for (File file : files) { Matcher matcher = CP_FILE_NAME_PATTERN.matcher(file.getName()); if (matcher.matches()) { long ts = Long.parseLong(matcher.group(1)); UUID id = UUID.fromString(matcher.group(2)); CheckpointEntryType type = CheckpointEntryType.valueOf(matcher.group(3)); if (type == CheckpointEntryType.START && ts > lastStartTs) { lastStartTs = ts; startId = id; startFile = file; } else if (type == CheckpointEntryType.END && ts > lastEndTs) { lastEndTs = ts; endId = id; endFile = file; } } } ByteBuffer buf = ByteBuffer.allocate(FileWALPointer.POINTER_SIZE); buf.order(ByteOrder.nativeOrder()); if (startFile != null) startPtr = readPointer(startFile, buf); if (endFile != null) endPtr = readPointer(endFile, buf); if (log.isInfoEnabled()) log.info("Read checkpoint status [startMarker=" + startFile + ", endMarker=" + endFile + ']'); return new CheckpointStatus(lastStartTs, startId, startPtr, endId, endPtr); } /** * Loads WAL pointer from CP file * * @param cpMarkerFile Checkpoint mark file. * @return WAL pointer. * @throws IgniteCheckedException If failed to read mark file. */ private WALPointer readPointer(File cpMarkerFile, ByteBuffer buf) throws IgniteCheckedException { buf.position(0); try (FileIO io = ioFactory.create(cpMarkerFile, READ)) { io.readFully(buf); buf.flip(); return new FileWALPointer(buf.getLong(), buf.getInt(), buf.getInt()); } catch (IOException e) { throw new IgniteCheckedException( "Failed to read checkpoint pointer from marker file: " + cpMarkerFile.getAbsolutePath(), e); } } /** {@inheritDoc} */ @Override public void startMemoryRestore(GridKernalContext kctx) throws IgniteCheckedException { if (kctx.clientNode()) return; checkpointReadLock(); try { // Preform early regions startup before restoring state. initAndStartRegions(kctx.config().getDataStorageConfiguration()); // Restore binary memory for all not WAL disabled cache groups. restoreBinaryMemory( groupsWithEnabledWal(), physicalRecords() ); if (recoveryVerboseLogging && log.isInfoEnabled()) { log.info("Partition states information after BINARY RECOVERY phase:"); dumpPartitionsInfo(cctx, log); } CheckpointStatus status = readCheckpointStatus(); RestoreLogicalState logicalState = applyLogicalUpdates( status, groupsWithEnabledWal(), logicalRecords(), true ); if (recoveryVerboseLogging && log.isInfoEnabled()) { log.info("Partition states information after LOGICAL RECOVERY phase:"); dumpPartitionsInfo(cctx, log); } walTail = tailPointer(logicalState.lastReadRecordPointer().orElse(null)); cctx.wal().onDeActivate(kctx); } catch (IgniteCheckedException e) { releaseFileLock(); throw e; } finally { checkpointReadUnlock(); } } /** * @param f Consumer. * @return Accumulated result for all page stores. */ public long forAllPageStores(ToLongFunction<PageStore> f) { long res = 0; for (CacheGroupContext gctx : cctx.cache().cacheGroups()) res += forGroupPageStores(gctx, f); return res; } /** * @param grpId Cache group id. * @param partId Partition ID. * @return Page store. * @throws IgniteCheckedException If failed. */ public PageStore getPageStore(int grpId, int partId) throws IgniteCheckedException { return storeMgr.getStore(grpId, partId); } /** * @param gctx Group context. * @param f Consumer. * @return Accumulated result for all page stores. */ public long forGroupPageStores(CacheGroupContext gctx, ToLongFunction<PageStore> f) { int groupId = gctx.groupId(); long res = 0; try { Collection<PageStore> stores = storeMgr.getStores(groupId); if (stores != null) { for (PageStore store : stores) res += f.applyAsLong(store); } } catch (IgniteCheckedException e) { throw new IgniteException(e); } return res; } /** * Calculates tail pointer for WAL at the end of logical recovery. * * @param from Start replay WAL from. * @return Tail pointer. * @throws IgniteCheckedException If failed. */ private WALPointer tailPointer(WALPointer from) throws IgniteCheckedException { WALIterator it = cctx.wal().replay(from); try { while (it.hasNextX()) { IgniteBiTuple<WALPointer, WALRecord> rec = it.nextX(); if (rec == null) break; } } finally { it.close(); } return it.lastRead().map(WALPointer::next).orElse(null); } /** * Called when all partitions have been fully restored and pre-created on node start. * * Starts checkpointing process and initiates first checkpoint. * * @throws IgniteCheckedException If first checkpoint has failed. */ @Override public void onStateRestored(AffinityTopologyVersion topVer) throws IgniteCheckedException { IgniteThread cpThread = new IgniteThread(cctx.igniteInstanceName(), "db-checkpoint-thread", checkpointer); cpThread.start(); checkpointerThread = cpThread; CheckpointProgressSnapshot chp = checkpointer.wakeupForCheckpoint(0, "node started"); if (chp != null) chp.cpBeginFut.get(); } /** * @param status Checkpoint status. * @param cacheGroupsPredicate Cache groups to restore. * @throws IgniteCheckedException If failed. * @throws StorageException In case I/O error occurred during operations with storage. */ private RestoreBinaryState performBinaryMemoryRestore( CheckpointStatus status, IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate, boolean finalizeState ) throws IgniteCheckedException { if (log.isInfoEnabled()) log.info("Checking memory state [lastValidPos=" + status.endPtr + ", lastMarked=" + status.startPtr + ", lastCheckpointId=" + status.cpStartId + ']'); WALPointer recPtr = status.endPtr; boolean apply = status.needRestoreMemory(); if (apply) { if (finalizeState) U.quietAndWarn(log, "Ignite node stopped in the middle of checkpoint. Will restore memory state and " + "finish checkpoint on node start."); cctx.pageStore().beginRecover(); WALRecord rec = cctx.wal().read(status.startPtr); if (!(rec instanceof CheckpointRecord)) throw new StorageException("Checkpoint marker doesn't point to checkpoint record " + "[ptr=" + status.startPtr + ", rec=" + rec + "]"); WALPointer cpMark = ((CheckpointRecord)rec).checkpointMark(); if (cpMark != null) { log.info("Restoring checkpoint after logical recovery, will start physical recovery from " + "back pointer: " + cpMark); recPtr = cpMark; } } else cctx.wal().notchLastCheckpointPtr(status.startPtr); AtomicReference<IgniteCheckedException> applyError = new AtomicReference<>(); StripedExecutor exec = cctx.kernalContext().getStripedExecutorService(); Semaphore semaphore = new Semaphore(semaphorePertmits(exec)); long start = U.currentTimeMillis(); long lastArchivedSegment = cctx.wal().lastArchivedSegment(); WALIterator it = cctx.wal().replay(recPtr, recordTypePredicate); RestoreBinaryState restoreBinaryState = new RestoreBinaryState(status, it, lastArchivedSegment, cacheGroupsPredicate); AtomicLong applied = new AtomicLong(); try { while (it.hasNextX()) { if (applyError.get() != null) throw applyError.get(); WALRecord rec = restoreBinaryState.next(); if (rec == null) break; switch (rec.type()) { case PAGE_RECORD: if (restoreBinaryState.needApplyBinaryUpdate()) { PageSnapshot pageSnapshot = (PageSnapshot)rec; // Here we do not require tag check because we may be applying memory changes after // several repetitive restarts and the same pages may have changed several times. int groupId = pageSnapshot.fullPageId().groupId(); int partId = partId(pageSnapshot.fullPageId().pageId()); stripedApplyPage((pageMem) -> { try { applyPageSnapshot(pageMem, pageSnapshot); applied.incrementAndGet(); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page snapshot, " + pageSnapshot); applyError.compareAndSet(null, e); } }, groupId, partId, exec, semaphore ); } break; case PART_META_UPDATE_STATE: PartitionMetaStateRecord metaStateRecord = (PartitionMetaStateRecord)rec; { int groupId = metaStateRecord.groupId(); int partId = metaStateRecord.partitionId(); stripedApplyPage((pageMem) -> { GridDhtPartitionState state = fromOrdinal(metaStateRecord.state()); if (state == null || state == GridDhtPartitionState.EVICTED) schedulePartitionDestroy(groupId, partId); else { try { cancelOrWaitPartitionDestroy(groupId, partId); } catch (IgniteCheckedException e) { U.error(log, "Failed to cancel or wait partition destroy, " + metaStateRecord); applyError.compareAndSet(null, e); } } }, groupId, partId, exec, semaphore); } break; case PARTITION_DESTROY: PartitionDestroyRecord destroyRecord = (PartitionDestroyRecord)rec; { int groupId = destroyRecord.groupId(); int partId = destroyRecord.partitionId(); stripedApplyPage((pageMem) -> { pageMem.invalidate(groupId, partId); schedulePartitionDestroy(groupId, partId); }, groupId, partId, exec, semaphore); } break; default: if (restoreBinaryState.needApplyBinaryUpdate() && rec instanceof PageDeltaRecord) { PageDeltaRecord pageDelta = (PageDeltaRecord)rec; int groupId = pageDelta.groupId(); int partId = partId(pageDelta.pageId()); stripedApplyPage((pageMem) -> { try { applyPageDelta(pageMem, pageDelta); applied.incrementAndGet(); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page delta, " + pageDelta); applyError.compareAndSet(null, e); } }, groupId, partId, exec, semaphore); } } } } finally { it.close(); } awaitApplyComplete(exec, applyError); if (!finalizeState) return null; FileWALPointer lastReadPtr = restoreBinaryState.lastReadRecordPointer().orElse(null); if (status.needRestoreMemory()) { if (restoreBinaryState.needApplyBinaryUpdate()) throw new StorageException("Failed to restore memory state (checkpoint marker is present " + "on disk, but checkpoint record is missed in WAL) " + "[cpStatus=" + status + ", lastRead=" + lastReadPtr + "]"); log.info("Finished applying memory changes [changesApplied=" + applied + ", time=" + (U.currentTimeMillis() - start) + " ms]"); assert applied.get() > 0; finalizeCheckpointOnRecovery(status.cpStartTs, status.cpStartId, status.startPtr, exec); } cpHistory.initialize(retreiveHistory()); return restoreBinaryState; } /** * Calculate the maximum number of concurrent tasks for apply through the striped executor. * * @param exec Striped executor. * @return Number of permits. */ private int semaphorePertmits(StripedExecutor exec) { // 4 task per-stripe by default. int permits = exec.stripes() * 4; long maxMemory = Runtime.getRuntime().maxMemory(); // Heuristic calculation part of heap size as a maximum number of concurrent tasks. int permits0 = (int)((maxMemory * 0.2) / (4096 * 2)); // May be for small heap. Get a low number of permits. if (permits0 < permits) permits = permits0; // Property for override any calculation. return getInteger(IGNITE_RECOVERY_SEMAPHORE_PERMITS, permits); } /** * @param exec Striped executor. * @param applyError Check error reference. */ private void awaitApplyComplete( StripedExecutor exec, AtomicReference<IgniteCheckedException> applyError ) throws IgniteCheckedException { if (applyError.get() != null) throw applyError.get(); // Fail-fast check. else { try { // Await completion apply tasks in all stripes. exec.awaitComplete(); } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } // Checking error after all task applied. if (applyError.get() != null) throw applyError.get(); } } /** * @param consumer Runnable task. * @param grpId Group Id. * @param partId Partition Id. * @param exec Striped executor. */ public void stripedApplyPage( Consumer<PageMemoryEx> consumer, int grpId, int partId, StripedExecutor exec, Semaphore semaphore ) throws IgniteCheckedException { assert consumer != null; assert exec != null; assert semaphore != null; PageMemoryEx pageMem = getPageMemoryForCacheGroup(grpId); if (pageMem == null) return; stripedApply(() -> consumer.accept(pageMem), grpId, partId, exec, semaphore); } /** * @param run Runnable task. * @param grpId Group Id. * @param partId Partition Id. * @param exec Striped executor. */ public void stripedApply( Runnable run, int grpId, int partId, StripedExecutor exec, Semaphore semaphore ) { assert run != null; assert exec != null; assert semaphore != null; int stripes = exec.stripes(); int stripe = U.stripeIdx(stripes, grpId, partId); assert stripe >= 0 && stripe <= stripes : "idx=" + stripe + ", stripes=" + stripes; try { semaphore.acquire(); } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } exec.execute(stripe, () -> { // WA for avoid assert check in PageMemory, that current thread hold chpLock. CHECKPOINT_LOCK_HOLD_COUNT.set(1); try { run.run(); } finally { CHECKPOINT_LOCK_HOLD_COUNT.set(0); semaphore.release(); } }); } /** * @param pageMem Page memory. * @param pageSnapshotRecord Page snapshot record. * @throws IgniteCheckedException If failed. */ public void applyPageSnapshot(PageMemoryEx pageMem, PageSnapshot pageSnapshotRecord) throws IgniteCheckedException { int grpId = pageSnapshotRecord.fullPageId().groupId(); long pageId = pageSnapshotRecord.fullPageId().pageId(); long page = pageMem.acquirePage(grpId, pageId, IoStatisticsHolderNoOp.INSTANCE, true); try { long pageAddr = pageMem.writeLock(grpId, pageId, page, true); try { PageUtils.putBytes(pageAddr, 0, pageSnapshotRecord.pageData()); } finally { pageMem.writeUnlock(grpId, pageId, page, null, true, true); } } finally { pageMem.releasePage(grpId, pageId, page); } } /** * @param pageMem Page memory. * @param pageDeltaRecord Page delta record. * @throws IgniteCheckedException If failed. */ private void applyPageDelta(PageMemoryEx pageMem, PageDeltaRecord pageDeltaRecord) throws IgniteCheckedException { int grpId = pageDeltaRecord.groupId(); long pageId = pageDeltaRecord.pageId(); // Here we do not require tag check because we may be applying memory changes after // several repetitive restarts and the same pages may have changed several times. long page = pageMem.acquirePage(grpId, pageId, IoStatisticsHolderNoOp.INSTANCE, true); try { long pageAddr = pageMem.writeLock(grpId, pageId, page, true); try { pageDeltaRecord.applyDelta(pageMem, pageAddr); } finally { pageMem.writeUnlock(grpId, pageId, page, null, true, true); } } finally { pageMem.releasePage(grpId, pageId, page); } } /** * Obtains PageMemory reference from cache descriptor instead of cache context. * * @param grpId Cache group id. * @return PageMemoryEx instance. * @throws IgniteCheckedException if no DataRegion is configured for a name obtained from cache descriptor. */ private PageMemoryEx getPageMemoryForCacheGroup(int grpId) throws IgniteCheckedException { if (grpId == MetaStorage.METASTORAGE_CACHE_ID) return (PageMemoryEx)dataRegion(METASTORE_DATA_REGION_NAME).pageMemory(); // TODO IGNITE-7792 add generic mapping. if (grpId == TxLog.TX_LOG_CACHE_ID) return (PageMemoryEx)dataRegion(TxLog.TX_LOG_CACHE_NAME).pageMemory(); // TODO IGNITE-5075: cache descriptor can be removed. GridCacheSharedContext sharedCtx = context(); CacheGroupDescriptor desc = sharedCtx.cache().cacheGroupDescriptors().get(grpId); if (desc == null) return null; String memPlcName = desc.config().getDataRegionName(); return (PageMemoryEx)sharedCtx.database().dataRegion(memPlcName).pageMemory(); } /** * Apply update from some iterator and with specific filters. * * @param it WalIterator. * @param recPredicate Wal record filter. * @param entryPredicate Entry filter. */ public void applyUpdatesOnRecovery( @Nullable WALIterator it, IgniteBiPredicate<WALPointer, WALRecord> recPredicate, IgnitePredicate<DataEntry> entryPredicate ) throws IgniteCheckedException { if (it == null) return; cctx.walState().runWithOutWAL(() -> { while (it.hasNext()) { IgniteBiTuple<WALPointer, WALRecord> next = it.next(); WALRecord rec = next.get2(); if (!recPredicate.apply(next.get1(), rec)) break; switch (rec.type()) { case MVCC_DATA_RECORD: case DATA_RECORD: checkpointReadLock(); try { DataRecord dataRec = (DataRecord)rec; for (DataEntry dataEntry : dataRec.writeEntries()) { if (entryPredicate.apply(dataEntry)) { checkpointReadLock(); try { int cacheId = dataEntry.cacheId(); GridCacheContext cacheCtx = cctx.cacheContext(cacheId); if (cacheCtx != null) applyUpdate(cacheCtx, dataEntry); else if (log != null) log.warning("Cache is not started. Updates cannot be applied " + "[cacheId=" + cacheId + ']'); } finally { checkpointReadUnlock(); } } } } catch (IgniteCheckedException e) { throw new IgniteException(e); } finally { checkpointReadUnlock(); } break; case MVCC_TX_RECORD: checkpointReadLock(); try { MvccTxRecord txRecord = (MvccTxRecord)rec; byte txState = convertToTxState(txRecord.state()); cctx.coordinators().updateState(txRecord.mvccVersion(), txState, true); } finally { checkpointReadUnlock(); } break; default: // Skip other records. } } }); } /** * @param status Last registered checkpoint status. * @throws IgniteCheckedException If failed to apply updates. * @throws StorageException If IO exception occurred while reading write-ahead log. */ private RestoreLogicalState applyLogicalUpdates( CheckpointStatus status, IgnitePredicate<Integer> cacheGroupsPredicate, IgniteBiPredicate<WALRecord.RecordType, WALPointer> recordTypePredicate, boolean skipFieldLookup ) throws IgniteCheckedException { if (log.isInfoEnabled()) log.info("Applying lost cache updates since last checkpoint record [lastMarked=" + status.startPtr + ", lastCheckpointId=" + status.cpStartId + ']'); if (skipFieldLookup) cctx.kernalContext().query().skipFieldLookup(true); long start = U.currentTimeMillis(); AtomicReference<IgniteCheckedException> applyError = new AtomicReference<>(); AtomicLong applied = new AtomicLong(); long lastArchivedSegment = cctx.wal().lastArchivedSegment(); StripedExecutor exec = cctx.kernalContext().getStripedExecutorService(); Semaphore semaphore = new Semaphore(semaphorePertmits(exec)); WALIterator it = cctx.wal().replay(status.startPtr, recordTypePredicate); RestoreLogicalState restoreLogicalState = new RestoreLogicalState(it, lastArchivedSegment, cacheGroupsPredicate); try { while (it.hasNextX()) { WALRecord rec = restoreLogicalState.next(); if (rec == null) break; switch (rec.type()) { case MVCC_DATA_RECORD: case DATA_RECORD: case ENCRYPTED_DATA_RECORD: DataRecord dataRec = (DataRecord)rec; for (DataEntry dataEntry : dataRec.writeEntries()) { int cacheId = dataEntry.cacheId(); DynamicCacheDescriptor cacheDesc = cctx.cache().cacheDescriptor(cacheId); // Can empty in case recovery node on blt changed. if (cacheDesc == null) continue; stripedApply(() -> { GridCacheContext cacheCtx = cctx.cacheContext(cacheId); try { applyUpdate(cacheCtx, dataEntry); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply data entry, dataEntry=" + dataEntry + ", ptr=" + dataRec.position()); applyError.compareAndSet(null, e); } applied.incrementAndGet(); }, cacheId, dataEntry.partitionId(), exec, semaphore); } break; case MVCC_TX_RECORD: MvccTxRecord txRecord = (MvccTxRecord)rec; byte txState = convertToTxState(txRecord.state()); cctx.coordinators().updateState(txRecord.mvccVersion(), txState, true); break; case PART_META_UPDATE_STATE: PartitionMetaStateRecord metaStateRecord = (PartitionMetaStateRecord)rec; GroupPartitionId groupPartitionId = new GroupPartitionId( metaStateRecord.groupId(), metaStateRecord.partitionId() ); PartitionRecoverState state = new PartitionRecoverState( (int)metaStateRecord.state(), metaStateRecord.updateCounter() ); restoreLogicalState.partitionRecoveryStates.put(groupPartitionId, state); break; case METASTORE_DATA_RECORD: MetastoreDataRecord metastoreDataRecord = (MetastoreDataRecord)rec; metaStorage.applyUpdate(metastoreDataRecord.key(), metastoreDataRecord.value()); break; case META_PAGE_UPDATE_NEXT_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_SUCCESSFUL_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_SUCCESSFUL_FULL_SNAPSHOT_ID: case META_PAGE_UPDATE_LAST_ALLOCATED_INDEX: PageDeltaRecord pageDelta = (PageDeltaRecord)rec; stripedApplyPage((pageMem) -> { try { applyPageDelta(pageMem, pageDelta); } catch (IgniteCheckedException e) { U.error(log, "Failed to apply page delta, " + pageDelta); applyError.compareAndSet(null, e); } }, pageDelta.groupId(), partId(pageDelta.pageId()), exec, semaphore); break; default: // Skip other records. } } } finally { it.close(); if (skipFieldLookup) cctx.kernalContext().query().skipFieldLookup(false); } awaitApplyComplete(exec, applyError); if (log.isInfoEnabled()) log.info("Finished applying WAL changes [updatesApplied=" + applied + ", time=" + (U.currentTimeMillis() - start) + " ms]"); for (DatabaseLifecycleListener lsnr : getDatabaseListeners(cctx.kernalContext())) lsnr.afterLogicalUpdatesApplied(this, restoreLogicalState); return restoreLogicalState; } /** * Convert {@link TransactionState} to Mvcc {@link TxState}. * * @param state TransactionState. * @return TxState. */ private byte convertToTxState(TransactionState state) { switch (state) { case PREPARED: return TxState.PREPARED; case COMMITTED: return TxState.COMMITTED; case ROLLED_BACK: return TxState.ABORTED; default: throw new IllegalStateException("Unsupported TxState."); } } /** * Wal truncate callBack. * * @param highBound WALPointer. */ public void onWalTruncated(WALPointer highBound) throws IgniteCheckedException { List<CheckpointEntry> removedFromHistory = cpHistory.onWalTruncated(highBound); for (CheckpointEntry cp : removedFromHistory) removeCheckpointFiles(cp); } /** * @param cacheCtx Cache context to apply an update. * @param dataEntry Data entry to apply. * @throws IgniteCheckedException If failed to restore. */ private void applyUpdate(GridCacheContext cacheCtx, DataEntry dataEntry) throws IgniteCheckedException { int partId = dataEntry.partitionId(); if (partId == -1) partId = cacheCtx.affinity().partition(dataEntry.key()); GridDhtLocalPartition locPart = cacheCtx.isLocal() ? null : cacheCtx.topology().forceCreatePartition(partId); switch (dataEntry.op()) { case CREATE: case UPDATE: if (dataEntry instanceof MvccDataEntry) { cacheCtx.offheap().mvccApplyUpdate( cacheCtx, dataEntry.key(), dataEntry.value(), dataEntry.writeVersion(), dataEntry.expireTime(), locPart, ((MvccDataEntry)dataEntry).mvccVer()); } else { cacheCtx.offheap().update( cacheCtx, dataEntry.key(), dataEntry.value(), dataEntry.writeVersion(), dataEntry.expireTime(), locPart, null); } if (dataEntry.partitionCounter() != 0) cacheCtx.offheap().onPartitionInitialCounterUpdated(partId, dataEntry.partitionCounter()); break; case DELETE: if (dataEntry instanceof MvccDataEntry) { cacheCtx.offheap().mvccApplyUpdate( cacheCtx, dataEntry.key(), null, dataEntry.writeVersion(), 0L, locPart, ((MvccDataEntry)dataEntry).mvccVer()); } else cacheCtx.offheap().remove(cacheCtx, dataEntry.key(), partId, locPart); if (dataEntry.partitionCounter() != 0) cacheCtx.offheap().onPartitionInitialCounterUpdated(partId, dataEntry.partitionCounter()); break; case READ: // do nothing break; default: throw new IgniteCheckedException("Invalid operation for WAL entry update: " + dataEntry.op()); } } /** * @throws IgniteCheckedException If failed. */ private void finalizeCheckpointOnRecovery( long cpTs, UUID cpId, WALPointer walPtr, StripedExecutor exec ) throws IgniteCheckedException { assert cpTs != 0; long start = System.currentTimeMillis(); Collection<DataRegion> regions = dataRegions(); Collection<GridMultiCollectionWrapper<FullPageId>> res = new ArrayList(regions.size()); int pagesNum = 0; // Collect collection of dirty pages from all regions. for (DataRegion memPlc : regions) { if (memPlc.config().isPersistenceEnabled()){ GridMultiCollectionWrapper<FullPageId> nextCpPagesCol = ((PageMemoryEx)memPlc.pageMemory()).beginCheckpoint(); pagesNum += nextCpPagesCol.size(); res.add(nextCpPagesCol); } } // Sort and split all dirty pages set to several stripes. GridMultiCollectionWrapper<FullPageId> pages = splitAndSortCpPagesIfNeeded( new IgniteBiTuple<>(res, pagesNum), exec.stripes()); // Identity stores set for future fsync. Collection<PageStore> updStores = new GridConcurrentHashSet<>(); AtomicInteger cpPagesCnt = new AtomicInteger(); // Shared refernce for tracking exception during write pages. AtomicReference<IgniteCheckedException> writePagesError = new AtomicReference<>(); for (int i = 0; i < pages.collectionsSize(); i++) { // Calculate stripe index. int stripeIdx = i % exec.stripes(); // Inner collection index. int innerIdx = i; exec.execute(stripeIdx, () -> { // Local buffer for write pages. ByteBuffer writePageBuf = ByteBuffer.allocateDirect(pageSize()); writePageBuf.order(ByteOrder.nativeOrder()); Collection<FullPageId> pages0 = pages.innerCollection(innerIdx); FullPageId pageId = null; try { for (FullPageId fullId : pages0) { // Fail-fast break if some exception occurred. if (writePagesError.get() != null) break; writePageBuf.rewind(); PageMemoryEx pageMem = getPageMemoryForCacheGroup(fullId.groupId()); // Write page content to writePageBuf. Integer tag = pageMem.getForCheckpoint(fullId, writePageBuf, null); assert tag == null || tag != PageMemoryImpl.TRY_AGAIN_TAG : "Lock is held by other thread for page " + fullId; if (tag != null) { writePageBuf.rewind(); // Save pageId to local variable for future using if exception occurred. pageId = fullId; // Write writePageBuf to page store. PageStore store = storeMgr.writeInternal( fullId.groupId(), fullId.pageId(), writePageBuf, tag, true); writePageBuf.rewind(); // Save store for future fsync. updStores.add(store); } } // Add number of handled pages. cpPagesCnt.addAndGet(pages0.size()); } catch (IgniteCheckedException e) { U.error(log, "Failed to write page to pageStore, pageId=" + pageId); writePagesError.compareAndSet(null, e); } }); } // Await completion all write tasks. awaitApplyComplete(exec, writePagesError); long written = U.currentTimeMillis(); // Fsync all touched stores. for (PageStore updStore : updStores) updStore.sync(); long fsync = U.currentTimeMillis(); for (DataRegion memPlc : regions) { if (memPlc.config().isPersistenceEnabled()) ((PageMemoryEx)memPlc.pageMemory()).finishCheckpoint(); } ByteBuffer tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); CheckpointEntry cp = prepareCheckpointEntry( tmpWriteBuf, cpTs, cpId, walPtr, null, CheckpointEntryType.END); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.END); cctx.pageStore().finishRecover(); if (log.isInfoEnabled()) log.info(String.format("Checkpoint finished [cpId=%s, pages=%d, markPos=%s, " + "pagesWrite=%dms, fsync=%dms, total=%dms]", cpId, cpPagesCnt.get(), walPtr, written - start, fsync - written, fsync - start)); } /** * Prepares checkpoint entry containing WAL pointer to checkpoint record. * Writes into given {@code ptrBuf} WAL pointer content. * * @param entryBuf Buffer to fill * @param cpTs Checkpoint timestamp. * @param cpId Checkpoint id. * @param ptr WAL pointer containing record. * @param rec Checkpoint WAL record. * @param type Checkpoint type. * @return Checkpoint entry. */ private CheckpointEntry prepareCheckpointEntry( ByteBuffer entryBuf, long cpTs, UUID cpId, WALPointer ptr, @Nullable CheckpointRecord rec, CheckpointEntryType type ) { assert ptr instanceof FileWALPointer; FileWALPointer filePtr = (FileWALPointer)ptr; entryBuf.rewind(); entryBuf.putLong(filePtr.index()); entryBuf.putInt(filePtr.fileOffset()); entryBuf.putInt(filePtr.length()); entryBuf.flip(); return createCheckPointEntry(cpTs, ptr, cpId, rec, type); } /** * Writes checkpoint entry buffer {@code entryBuf} to specified checkpoint file with 2-phase protocol. * * @param entryBuf Checkpoint entry buffer to write. * @param cp Checkpoint entry. * @param type Checkpoint entry type. * @throws StorageException If failed to write checkpoint entry. */ public void writeCheckpointEntry(ByteBuffer entryBuf, CheckpointEntry cp, CheckpointEntryType type) throws StorageException { String fileName = checkpointFileName(cp, type); String tmpFileName = fileName + FilePageStoreManager.TMP_SUFFIX; try { try (FileIO io = ioFactory.create(Paths.get(cpDir.getAbsolutePath(), skipSync ? fileName : tmpFileName).toFile(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) { io.writeFully(entryBuf); entryBuf.clear(); if (!skipSync) io.force(true); } if (!skipSync) Files.move(Paths.get(cpDir.getAbsolutePath(), tmpFileName), Paths.get(cpDir.getAbsolutePath(), fileName)); } catch (IOException e) { throw new StorageException("Failed to write checkpoint entry [ptr=" + cp.checkpointMark() + ", cpTs=" + cp.timestamp() + ", cpId=" + cp.checkpointId() + ", type=" + type + "]", e); } } /** {@inheritDoc} */ @Override public AtomicInteger writtenPagesCounter() { return writtenPagesCntr; } /** {@inheritDoc} */ @Override public AtomicInteger syncedPagesCounter() { return syncedPagesCntr; } /** {@inheritDoc} */ @Override public AtomicInteger evictedPagesCntr() { return evictedPagesCntr; } /** {@inheritDoc} */ @Override public int currentCheckpointPagesCount() { return currCheckpointPagesCnt; } /** * @param cpTs Checkpoint timestamp. * @param cpId Checkpoint ID. * @param type Checkpoint type. * @return Checkpoint file name. */ private static String checkpointFileName(long cpTs, UUID cpId, CheckpointEntryType type) { return cpTs + "-" + cpId + "-" + type + ".bin"; } /** * @param cp Checkpoint entry. * @param type Checkpoint type. * @return Checkpoint file name. */ public static String checkpointFileName(CheckpointEntry cp, CheckpointEntryType type) { return checkpointFileName(cp.timestamp(), cp.checkpointId(), type); } /** * Replace thread local with buffers. Thread local should provide direct buffer with one page in length. * * @param threadBuf new thread-local with buffers for the checkpoint threads. */ public void setThreadBuf(final ThreadLocal<ByteBuffer> threadBuf) { this.threadBuf = threadBuf; } /** * @param cpTs Checkpoint timestamp. * @param ptr Wal pointer of checkpoint. * @param cpId Checkpoint ID. * @param rec Checkpoint record. * @param type Checkpoint type. * * @return Checkpoint entry. */ public CheckpointEntry createCheckPointEntry( long cpTs, WALPointer ptr, UUID cpId, @Nullable CheckpointRecord rec, CheckpointEntryType type ) { assert cpTs > 0; assert ptr != null; assert cpId != null; assert type != null; Map<Integer, CacheState> cacheGrpStates = null; // Do not hold groups state in-memory if there is no space in the checkpoint history to prevent possible OOM. // In this case the actual group states will be readed from WAL by demand. if (rec != null && cpHistory.hasSpace()) cacheGrpStates = rec.cacheGroupStates(); return new CheckpointEntry(cpTs, ptr, cpId, cacheGrpStates); } /** * @return Checkpoint history. */ @Nullable public CheckpointHistory checkpointHistory() { return cpHistory; } /** * Adds given partition to checkpointer destroy queue. * * @param grpId Group ID. * @param partId Partition ID. */ public void schedulePartitionDestroy(int grpId, int partId) { Checkpointer cp = checkpointer; if (cp != null) cp.schedulePartitionDestroy(cctx.cache().cacheGroup(grpId), grpId, partId); } /** * Cancels or wait for partition destroy. * * @param grpId Group ID. * @param partId Partition ID. * @throws IgniteCheckedException If failed. */ public void cancelOrWaitPartitionDestroy(int grpId, int partId) throws IgniteCheckedException { Checkpointer cp = checkpointer; if (cp != null) cp.cancelOrWaitPartitionDestroy(grpId, partId); } /** * Timeout for checkpoint read lock acquisition. * * @return Timeout for checkpoint read lock acquisition in milliseconds. */ @Override public long checkpointReadLockTimeout() { return checkpointReadLockTimeout; } /** * Sets timeout for checkpoint read lock acquisition. * * @param val New timeout in milliseconds, non-positive value denotes infinite timeout. */ @Override public void checkpointReadLockTimeout(long val) { checkpointReadLockTimeout = val; } /** * Partition destroy queue. */ private static class PartitionDestroyQueue { /** */ private final ConcurrentMap<T2<Integer, Integer>, PartitionDestroyRequest> pendingReqs = new ConcurrentHashMap<>(); /** * @param grpCtx Group context. * @param partId Partition ID to destroy. */ private void addDestroyRequest(@Nullable CacheGroupContext grpCtx, int grpId, int partId) { PartitionDestroyRequest req = new PartitionDestroyRequest(grpId, partId); PartitionDestroyRequest old = pendingReqs.putIfAbsent(new T2<>(grpId, partId), req); assert old == null || grpCtx == null : "Must wait for old destroy request to finish before adding a new one " + "[grpId=" + grpId + ", grpName=" + grpCtx.cacheOrGroupName() + ", partId=" + partId + ']'; } /** * @param destroyId Destroy ID. * @return Destroy request to complete if was not concurrently cancelled. */ private PartitionDestroyRequest beginDestroy(T2<Integer, Integer> destroyId) { PartitionDestroyRequest rmvd = pendingReqs.remove(destroyId); return rmvd == null ? null : rmvd.beginDestroy() ? rmvd : null; } /** * @param grpId Group ID. * @param partId Partition ID. * @return Destroy request to wait for if destroy has begun. */ private PartitionDestroyRequest cancelDestroy(int grpId, int partId) { PartitionDestroyRequest rmvd = pendingReqs.remove(new T2<>(grpId, partId)); return rmvd == null ? null : !rmvd.cancel() ? rmvd : null; } } /** * Partition destroy request. */ private static class PartitionDestroyRequest { /** */ private final int grpId; /** */ private final int partId; /** Destroy cancelled flag. */ private boolean cancelled; /** Destroy future. Not null if partition destroy has begun. */ private GridFutureAdapter<Void> destroyFut; /** * @param grpId Group ID. * @param partId Partition ID. */ private PartitionDestroyRequest(int grpId, int partId) { this.grpId = grpId; this.partId = partId; } /** * Cancels partition destroy request. * * @return {@code False} if this request needs to be waited for. */ private synchronized boolean cancel() { if (destroyFut != null) { assert !cancelled; return false; } cancelled = true; return true; } /** * Initiates partition destroy. * * @return {@code True} if destroy request should be executed, {@code false} otherwise. */ private synchronized boolean beginDestroy() { if (cancelled) { assert destroyFut == null; return false; } if (destroyFut != null) return false; destroyFut = new GridFutureAdapter<>(); return true; } /** * */ private synchronized void onDone(Throwable err) { assert destroyFut != null; destroyFut.onDone(err); } /** * */ private void waitCompleted() throws IgniteCheckedException { GridFutureAdapter<Void> fut; synchronized (this) { assert destroyFut != null; fut = destroyFut; } fut.get(); } /** {@inheritDoc} */ @Override public String toString() { return "PartitionDestroyRequest [grpId=" + grpId + ", partId=" + partId + ']'; } } /** * Checkpointer object is used for notification on checkpoint begin, predicate is {@link #scheduledCp}<code>.nextCpTs - now * > 0 </code>. Method {@link #wakeupForCheckpoint} uses notify, {@link #waitCheckpointEvent} uses wait */ @SuppressWarnings("NakedNotify") public class Checkpointer extends GridWorker { /** Checkpoint started log message format. */ private static final String CHECKPOINT_STARTED_LOG_FORMAT = "Checkpoint started [checkpointId=%s, startPtr=%s," + " checkpointBeforeLockTime=%dms, checkpointLockWait=%dms, checkpointListenersExecuteTime=%dms, " + "checkpointLockHoldTime=%dms, walCpRecordFsyncDuration=%dms, %s pages=%d, reason='%s']"; /** Temporary write buffer. */ private final ByteBuffer tmpWriteBuf; /** Next scheduled checkpoint progress. */ private volatile CheckpointProgress scheduledCp; /** Current checkpoint. This field is updated only by checkpoint thread. */ @Nullable private volatile CheckpointProgress curCpProgress; /** Shutdown now. */ private volatile boolean shutdownNow; /** */ private long lastCpTs; /** Pause detector. */ private final LongJVMPauseDetector pauseDetector; /** Long JVM pause threshold. */ private final int longJvmPauseThreshold = getInteger(IGNITE_JVM_PAUSE_DETECTOR_THRESHOLD, DEFAULT_JVM_PAUSE_DETECTOR_THRESHOLD); /** * @param gridName Grid name. * @param name Thread name. * @param log Logger. */ protected Checkpointer(@Nullable String gridName, String name, IgniteLogger log) { super(gridName, name, log, cctx.kernalContext().workersRegistry()); scheduledCp = new CheckpointProgress(U.currentTimeMillis() + checkpointFreq); tmpWriteBuf = ByteBuffer.allocateDirect(pageSize()); tmpWriteBuf.order(ByteOrder.nativeOrder()); pauseDetector = cctx.kernalContext().longJvmPauseDetector(); } /** * @return Progress of current chekpoint or {@code null}, if isn't checkpoint at this moment. */ public @Nullable CheckpointProgress currentProgress(){ return curCpProgress; } /** {@inheritDoc} */ @Override protected void body() { Throwable err = null; try { while (!isCancelled()) { waitCheckpointEvent(); if (skipCheckpointOnNodeStop && (isCancelled() || shutdownNow)) { if (log.isInfoEnabled()) log.warning("Skipping last checkpoint because node is stopping."); return; } GridFutureAdapter<Void> enableChangeApplied = GridCacheDatabaseSharedManager.this.enableChangeApplied; if (enableChangeApplied != null) { enableChangeApplied.onDone(); GridCacheDatabaseSharedManager.this.enableChangeApplied = null; } if (checkpointsEnabled) doCheckpoint(); else { synchronized (this) { scheduledCp.nextCpTs = U.currentTimeMillis() + checkpointFreq; } } } } catch (Throwable t) { err = t; scheduledCp.cpFinishFut.onDone(t); throw t; } finally { if (err == null && !(stopping && isCancelled)) err = new IllegalStateException("Thread is terminated unexpectedly: " + name()); if (err instanceof OutOfMemoryError) cctx.kernalContext().failure().process(new FailureContext(CRITICAL_ERROR, err)); else if (err != null) cctx.kernalContext().failure().process(new FailureContext(SYSTEM_WORKER_TERMINATION, err)); } // Final run after the cancellation. if (checkpointsEnabled && !shutdownNow) { try { doCheckpoint(); scheduledCp.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); } catch (Throwable e) { scheduledCp.cpFinishFut.onDone(e); } } } /** * */ private CheckpointProgressSnapshot wakeupForCheckpoint(long delayFromNow, String reason) { CheckpointProgress sched = scheduledCp; long next = U.currentTimeMillis() + delayFromNow; if (sched.nextCpTs <= next) return new CheckpointProgressSnapshot(sched); CheckpointProgressSnapshot ret; synchronized (this) { sched = scheduledCp; if (sched.nextCpTs > next) { sched.reason = reason; sched.nextCpTs = next; } ret = new CheckpointProgressSnapshot(sched); notifyAll(); } return ret; } /** * @param snapshotOperation Snapshot operation. */ public IgniteInternalFuture wakeupForSnapshotCreation(SnapshotOperation snapshotOperation) { GridFutureAdapter<Object> ret; synchronized (this) { scheduledCp.nextCpTs = U.currentTimeMillis(); scheduledCp.reason = "snapshot"; scheduledCp.nextSnapshot = true; scheduledCp.snapshotOperation = snapshotOperation; ret = scheduledCp.cpBeginFut; notifyAll(); } return ret; } /** * */ private void doCheckpoint() { Checkpoint chp = null; try { CheckpointMetricsTracker tracker = new CheckpointMetricsTracker(); try { chp = markCheckpointBegin(tracker); } catch (Exception e) { if (curCpProgress != null) curCpProgress.cpFinishFut.onDone(e); // In case of checkpoint initialization error node should be invalidated and stopped. cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); throw new IgniteException(e); // Re-throw as unchecked exception to force stopping checkpoint thread. } updateHeartbeat(); currCheckpointPagesCnt = chp.pagesSize; writtenPagesCntr = new AtomicInteger(); syncedPagesCntr = new AtomicInteger(); evictedPagesCntr = new AtomicInteger(); boolean success = false; int destroyedPartitionsCnt; try { if (chp.hasDelta()) { // Identity stores set. ConcurrentLinkedHashMap<PageStore, LongAdder> updStores = new ConcurrentLinkedHashMap<>(); CountDownFuture doneWriteFut = new CountDownFuture( asyncRunner == null ? 1 : chp.cpPages.collectionsSize()); tracker.onPagesWriteStart(); final int totalPagesToWriteCnt = chp.cpPages.size(); if (asyncRunner != null) { for (int i = 0; i < chp.cpPages.collectionsSize(); i++) { Runnable write = new WriteCheckpointPages( tracker, chp.cpPages.innerCollection(i), updStores, doneWriteFut, totalPagesToWriteCnt, new Runnable() { @Override public void run() { updateHeartbeat(); } }, asyncRunner ); try { asyncRunner.execute(write); } catch (RejectedExecutionException ignore) { // Run the task synchronously. updateHeartbeat(); write.run(); } } } else { // Single-threaded checkpoint. updateHeartbeat(); Runnable write = new WriteCheckpointPages( tracker, chp.cpPages, updStores, doneWriteFut, totalPagesToWriteCnt, new Runnable() { @Override public void run() { updateHeartbeat(); } }, null); write.run(); } updateHeartbeat(); // Wait and check for errors. doneWriteFut.get(); // Must re-check shutdown flag here because threads may have skipped some pages. // If so, we should not put finish checkpoint mark. if (shutdownNow) { chp.progress.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); return; } tracker.onFsyncStart(); if (!skipSync) { for (Map.Entry<PageStore, LongAdder> updStoreEntry : updStores.entrySet()) { if (shutdownNow) { chp.progress.cpFinishFut.onDone(new NodeStoppingException("Node is stopping.")); return; } blockingSectionBegin(); try { updStoreEntry.getKey().sync(); } finally { blockingSectionEnd(); } syncedPagesCntr.addAndGet(updStoreEntry.getValue().intValue()); } } } else { tracker.onPagesWriteStart(); tracker.onFsyncStart(); } snapshotMgr.afterCheckpointPageWritten(); destroyedPartitionsCnt = destroyEvictedPartitions(); // Must mark successful checkpoint only if there are no exceptions or interrupts. success = true; } finally { if (success) markCheckpointEnd(chp); } tracker.onEnd(); if (chp.hasDelta() || destroyedPartitionsCnt > 0) { if (printCheckpointStats) { if (log.isInfoEnabled()) { String walSegsCoveredMsg = prepareWalSegsCoveredMsg(chp.walSegsCoveredRange); log.info(String.format("Checkpoint finished [cpId=%s, pages=%d, markPos=%s, " + "walSegmentsCleared=%d, walSegmentsCovered=%s, markDuration=%dms, pagesWrite=%dms, fsync=%dms, " + "total=%dms]", chp.cpEntry != null ? chp.cpEntry.checkpointId() : "", chp.pagesSize, chp.cpEntry != null ? chp.cpEntry.checkpointMark() : "", chp.walFilesDeleted, walSegsCoveredMsg, tracker.markDuration(), tracker.pagesWriteDuration(), tracker.fsyncDuration(), tracker.totalDuration())); } } } updateMetrics(chp, tracker); } catch (IgniteCheckedException e) { if (chp != null) chp.progress.cpFinishFut.onDone(e); cctx.kernalContext().failure().process(new FailureContext(FailureType.CRITICAL_ERROR, e)); } } /** * @param chp Checkpoint. * @param tracker Tracker. */ private void updateMetrics(Checkpoint chp, CheckpointMetricsTracker tracker) { if (persStoreMetrics.metricsEnabled()) { persStoreMetrics.onCheckpoint( tracker.lockWaitDuration(), tracker.markDuration(), tracker.pagesWriteDuration(), tracker.fsyncDuration(), tracker.totalDuration(), chp.pagesSize, tracker.dataPagesWritten(), tracker.cowPagesWritten(), forAllPageStores(PageStore::size), forAllPageStores(PageStore::getSparseSize)); } } /** */ private String prepareWalSegsCoveredMsg(IgniteBiTuple<Long, Long> walRange) { String res; long startIdx = walRange.get1(); long endIdx = walRange.get2(); if (endIdx < 0 || endIdx < startIdx) res = "[]"; else if (endIdx == startIdx) res = "[" + endIdx + "]"; else res = "[" + startIdx + " - " + endIdx + "]"; return res; } /** * Processes all evicted partitions scheduled for destroy. * * @throws IgniteCheckedException If failed. * * @return The number of destroyed partition files. */ private int destroyEvictedPartitions() throws IgniteCheckedException { PartitionDestroyQueue destroyQueue = curCpProgress.destroyQueue; if (destroyQueue.pendingReqs.isEmpty()) return 0; List<PartitionDestroyRequest> reqs = null; for (final PartitionDestroyRequest req : destroyQueue.pendingReqs.values()) { if (!req.beginDestroy()) continue; final int grpId = req.grpId; final int partId = req.partId; CacheGroupContext grp = cctx.cache().cacheGroup(grpId); assert grp != null : "Cache group is not initialized [grpId=" + grpId + "]"; assert grp.offheap() instanceof GridCacheOffheapManager : "Destroying partition files when persistence is off " + grp.offheap(); final GridCacheOffheapManager offheap = (GridCacheOffheapManager) grp.offheap(); Runnable destroyPartTask = () -> { try { offheap.destroyPartitionStore(grpId, partId); req.onDone(null); if (log.isDebugEnabled()) log.debug("Partition file has destroyed [grpId=" + grpId + ", partId=" + partId + "]"); } catch (Exception e) { req.onDone(new IgniteCheckedException( "Partition file destroy has failed [grpId=" + grpId + ", partId=" + partId + "]", e)); } }; if (asyncRunner != null) { try { asyncRunner.execute(destroyPartTask); } catch (RejectedExecutionException ignore) { // Run the task synchronously. destroyPartTask.run(); } } else destroyPartTask.run(); if (reqs == null) reqs = new ArrayList<>(); reqs.add(req); } if (reqs != null) for (PartitionDestroyRequest req : reqs) req.waitCompleted(); destroyQueue.pendingReqs.clear(); return reqs != null ? reqs.size() : 0; } /** * @param grpCtx Group context. Can be {@code null} in case of crash recovery. * @param grpId Group ID. * @param partId Partition ID. */ private void schedulePartitionDestroy(@Nullable CacheGroupContext grpCtx, int grpId, int partId) { synchronized (this) { scheduledCp.destroyQueue.addDestroyRequest(grpCtx, grpId, partId); } if (log.isDebugEnabled()) log.debug("Partition file has been scheduled to destroy [grpId=" + grpId + ", partId=" + partId + "]"); if (grpCtx != null) wakeupForCheckpoint(PARTITION_DESTROY_CHECKPOINT_TIMEOUT, "partition destroy"); } /** * @param grpId Group ID. * @param partId Partition ID. */ private void cancelOrWaitPartitionDestroy(int grpId, int partId) throws IgniteCheckedException { PartitionDestroyRequest req; synchronized (this) { req = scheduledCp.destroyQueue.cancelDestroy(grpId, partId); } if (req != null) req.waitCompleted(); CheckpointProgress cur; synchronized (this) { cur = curCpProgress; if (cur != null) req = cur.destroyQueue.cancelDestroy(grpId, partId); } if (req != null) req.waitCompleted(); if (req != null && log.isDebugEnabled()) log.debug("Partition file destroy has cancelled [grpId=" + grpId + ", partId=" + partId + "]"); } /** * */ private void waitCheckpointEvent() { boolean cancel = false; try { synchronized (this) { long remaining; while ((remaining = scheduledCp.nextCpTs - U.currentTimeMillis()) > 0 && !isCancelled()) { blockingSectionBegin(); try { wait(remaining); } finally { blockingSectionEnd(); } } } } catch (InterruptedException ignored) { Thread.currentThread().interrupt(); cancel = true; } if (cancel) isCancelled = true; } /** * */ @SuppressWarnings("TooBroadScope") private Checkpoint markCheckpointBegin(CheckpointMetricsTracker tracker) throws IgniteCheckedException { long cpTs = updateLastCheckpointTime(); CheckpointProgress curr = updateCurrentCheckpointProgress(); CheckpointRecord cpRec = new CheckpointRecord(memoryRecoveryRecordPtr); memoryRecoveryRecordPtr = null; CheckpointEntry cp = null; IgniteFuture snapFut = null; IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> cpPagesTuple; boolean hasPages, hasPartitionsToDestroy; DbCheckpointContextImpl ctx0 = new DbCheckpointContextImpl(curr, new PartitionAllocationMap()); internalReadLock(); try { for (DbCheckpointListener lsnr : lsnrs) lsnr.beforeCheckpointBegin(ctx0); ctx0.awaitPendingTasksFinished(); } finally { internalReadUnlock(); } tracker.onLockWaitStart(); checkpointLock.writeLock().lock(); try { assert curCpProgress == curr : "Concurrent checkpoint begin should not be happened"; tracker.onMarkStart(); // Listeners must be invoked before we write checkpoint record to WAL. for (DbCheckpointListener lsnr : lsnrs) lsnr.onMarkCheckpointBegin(ctx0); ctx0.awaitPendingTasksFinished(); tracker.onListenersExecuteEnd(); if (curr.nextSnapshot) snapFut = snapshotMgr.onMarkCheckPointBegin(curr.snapshotOperation, ctx0.partitionStatMap()); fillCacheGroupState(cpRec); cpPagesTuple = beginAllCheckpoints(); hasPages = hasPageForWrite(cpPagesTuple.get1()); hasPartitionsToDestroy = !curr.destroyQueue.pendingReqs.isEmpty(); WALPointer cpPtr = null; if (hasPages || curr.nextSnapshot || hasPartitionsToDestroy) { // No page updates for this checkpoint are allowed from now on. cpPtr = cctx.wal().log(cpRec); if (cpPtr == null) cpPtr = CheckpointStatus.NULL_PTR; } if (hasPages || hasPartitionsToDestroy) { cp = prepareCheckpointEntry( tmpWriteBuf, cpTs, cpRec.checkpointId(), cpPtr, cpRec, CheckpointEntryType.START); cpHistory.addCheckpoint(cp); } } finally { checkpointLock.writeLock().unlock(); tracker.onLockRelease(); } DbCheckpointListener.Context ctx = createOnCheckpointBeginContext(ctx0, hasPages); curr.cpBeginFut.onDone(); for (DbCheckpointListener lsnr : lsnrs) lsnr.onCheckpointBegin(ctx); if (snapFut != null) { try { snapFut.get(); } catch (IgniteException e) { U.error(log, "Failed to wait for snapshot operation initialization: " + curr.snapshotOperation, e); } } if (hasPages || hasPartitionsToDestroy) { assert cp != null; assert cp.checkpointMark() != null; tracker.onWalCpRecordFsyncStart(); // Sync log outside the checkpoint write lock. cctx.wal().flush(cp.checkpointMark(), true); tracker.onWalCpRecordFsyncEnd(); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.START); GridMultiCollectionWrapper<FullPageId> cpPages = splitAndSortCpPagesIfNeeded( cpPagesTuple, persistenceCfg.getCheckpointThreads()); if (printCheckpointStats && log.isInfoEnabled()) { long possibleJvmPauseDur = possibleLongJvmPauseDuration(tracker); log.info( String.format( CHECKPOINT_STARTED_LOG_FORMAT, cpRec.checkpointId(), cp.checkpointMark(), tracker.beforeLockDuration(), tracker.lockWaitDuration(), tracker.listenersExecuteDuration(), tracker.lockHoldDuration(), tracker.walCpRecordFsyncDuration(), possibleJvmPauseDur > 0 ? "possibleJvmPauseDuration=" + possibleJvmPauseDur + "ms," : "", cpPages.size(), curr.reason ) ); } return new Checkpoint(cp, cpPages, curr); } else { if (curr.nextSnapshot) cctx.wal().flush(null, true); if (printCheckpointStats) { if (log.isInfoEnabled()) LT.info(log, String.format("Skipping checkpoint (no pages were modified) [" + "checkpointBeforeLockTime=%dms, checkpointLockWait=%dms, " + "checkpointListenersExecuteTime=%dms, checkpointLockHoldTime=%dms, reason='%s']", tracker.beforeLockDuration(), tracker.lockWaitDuration(), tracker.listenersExecuteDuration(), tracker.lockHoldDuration(), curr.reason)); } return new Checkpoint(null, new GridMultiCollectionWrapper<>(new Collection[0]), curr); } } /** * @param tracker Checkpoint metrics tracker. * @return Duration of possible JVM pause, if it was detected, or {@code -1} otherwise. */ private long possibleLongJvmPauseDuration(CheckpointMetricsTracker tracker) { if (LongJVMPauseDetector.enabled()) { if (tracker.lockWaitDuration() + tracker.lockHoldDuration() > longJvmPauseThreshold) { long now = System.currentTimeMillis(); // We must get last wake up time before search possible pause in events map. long wakeUpTime = pauseDetector.getLastWakeUpTime(); IgniteBiTuple<Long, Long> lastLongPause = pauseDetector.getLastLongPause(); if (lastLongPause != null && tracker.checkpointStartTime() < lastLongPause.get1()) return lastLongPause.get2(); if (now - wakeUpTime > longJvmPauseThreshold) return now - wakeUpTime; } } return -1L; } /** * Take read lock for internal use. */ private void internalReadUnlock() { checkpointLock.readLock().unlock(); if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() - 1); } /** * Release read lock. */ private void internalReadLock() { checkpointLock.readLock().lock(); if (ASSERTION_ENABLED) CHECKPOINT_LOCK_HOLD_COUNT.set(CHECKPOINT_LOCK_HOLD_COUNT.get() + 1); } /** * Fill cache group state in checkpoint record. * * @param cpRec Checkpoint record for filling. * @throws IgniteCheckedException if fail. */ private void fillCacheGroupState(CheckpointRecord cpRec) throws IgniteCheckedException { GridCompoundFuture grpHandleFut = asyncRunner == null ? null : new GridCompoundFuture(); for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal() || !grp.walEnabled()) continue; Runnable r = () -> { ArrayList<GridDhtLocalPartition> parts = new ArrayList<>(grp.topology().localPartitions().size()); for (GridDhtLocalPartition part : grp.topology().currentLocalPartitions()) parts.add(part); CacheState state = new CacheState(parts.size()); for (GridDhtLocalPartition part : parts) { state.addPartitionState( part.id(), part.dataStore().fullSize(), part.updateCounter(), (byte)part.state().ordinal() ); } synchronized (cpRec) { cpRec.addCacheGroupState(grp.groupId(), state); } }; if (asyncRunner == null) r.run(); else try { GridFutureAdapter<?> res = new GridFutureAdapter<>(); asyncRunner.execute(U.wrapIgniteFuture(r, res)); grpHandleFut.add(res); } catch (RejectedExecutionException e) { assert false : "Task should never be rejected by async runner"; throw new IgniteException(e); //to protect from disabled asserts and call to failure handler } } if (grpHandleFut != null) { grpHandleFut.markInitialized(); grpHandleFut.get(); } } /** * @return Last checkpoint time. */ private long updateLastCheckpointTime() { long cpTs = System.currentTimeMillis(); // This can happen in an unlikely event of two checkpoints happening // within a currentTimeMillis() granularity window. if (cpTs == lastCpTs) cpTs++; lastCpTs = cpTs; return cpTs; } /** * Update current checkpoint progress by scheduled. * * @return Current checkpoint progress. */ @NotNull private GridCacheDatabaseSharedManager.CheckpointProgress updateCurrentCheckpointProgress() { final CheckpointProgress curr; synchronized (this) { curr = scheduledCp; curr.started = true; if (curr.reason == null) curr.reason = "timeout"; // It is important that we assign a new progress object before checkpoint mark in page memory. scheduledCp = new CheckpointProgress(U.currentTimeMillis() + checkpointFreq); curCpProgress = curr; } return curr; } /** */ private DbCheckpointListener.Context createOnCheckpointBeginContext( DbCheckpointListener.Context delegate, boolean hasPages ) { return new DbCheckpointListener.Context() { /** {@inheritDoc} */ @Override public boolean nextSnapshot() { return delegate.nextSnapshot(); } /** {@inheritDoc} */ @Override public PartitionAllocationMap partitionStatMap() { return delegate.partitionStatMap(); } /** {@inheritDoc} */ @Override public boolean needToSnapshot(String cacheOrGrpName) { return delegate.needToSnapshot(cacheOrGrpName); } /** {@inheritDoc} */ @Override public @Nullable Executor executor() { return delegate.executor(); } /** {@inheritDoc} */ @Override public boolean hasPages() { return hasPages; } }; } /** * Check that at least one collection is not empty. * * @param cpPagesCollWrapper Collection of {@link GridMultiCollectionWrapper} checkpoint pages. */ private boolean hasPageForWrite(Collection<GridMultiCollectionWrapper<FullPageId>> cpPagesCollWrapper) { boolean hasPages = false; for (Collection c : cpPagesCollWrapper) if (!c.isEmpty()) { hasPages = true; break; } return hasPages; } /** * @return tuple with collections of FullPageIds obtained from each PageMemory and overall number of dirty * pages. */ private IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> beginAllCheckpoints() { Collection<GridMultiCollectionWrapper<FullPageId>> res = new ArrayList(dataRegions().size()); int pagesNum = 0; for (DataRegion memPlc : dataRegions()) { if (!memPlc.config().isPersistenceEnabled()) continue; GridMultiCollectionWrapper<FullPageId> nextCpPagesCol = ((PageMemoryEx)memPlc.pageMemory()).beginCheckpoint(); pagesNum += nextCpPagesCol.size(); res.add(nextCpPagesCol); } currCheckpointPagesCnt = pagesNum; return new IgniteBiTuple<>(res, pagesNum); } /** * @param chp Checkpoint snapshot. */ private void markCheckpointEnd(Checkpoint chp) throws IgniteCheckedException { synchronized (this) { writtenPagesCntr = null; syncedPagesCntr = null; evictedPagesCntr = null; for (DataRegion memPlc : dataRegions()) { if (!memPlc.config().isPersistenceEnabled()) continue; ((PageMemoryEx)memPlc.pageMemory()).finishCheckpoint(); } currCheckpointPagesCnt = 0; } if (chp.hasDelta()) { CheckpointEntry cp = prepareCheckpointEntry( tmpWriteBuf, chp.cpEntry.timestamp(), chp.cpEntry.checkpointId(), chp.cpEntry.checkpointMark(), null, CheckpointEntryType.END); writeCheckpointEntry(tmpWriteBuf, cp, CheckpointEntryType.END); cctx.wal().notchLastCheckpointPtr(chp.cpEntry.checkpointMark()); } List<CheckpointEntry> removedFromHistory = cpHistory.onCheckpointFinished(chp, truncateWalOnCpFinish); for (CheckpointEntry cp : removedFromHistory) removeCheckpointFiles(cp); if (chp.progress != null) chp.progress.cpFinishFut.onDone(); } /** {@inheritDoc} */ @Override public void cancel() { if (log.isDebugEnabled()) log.debug("Cancelling grid runnable: " + this); // Do not interrupt runner thread. isCancelled = true; synchronized (this) { notifyAll(); } } /** * */ public void shutdownNow() { shutdownNow = true; if (!isCancelled) cancel(); } /** * Context with information about current snapshots. */ private class DbCheckpointContextImpl implements DbCheckpointListener.Context { /** Current checkpoint progress. */ private final CheckpointProgress curr; /** Partition map. */ private final PartitionAllocationMap map; /** Pending tasks from executor. */ private GridCompoundFuture pendingTaskFuture; /** * @param curr Current checkpoint progress. * @param map Partition map. */ private DbCheckpointContextImpl(CheckpointProgress curr, PartitionAllocationMap map) { this.curr = curr; this.map = map; this.pendingTaskFuture = asyncRunner == null ? null : new GridCompoundFuture(); } /** {@inheritDoc} */ @Override public boolean nextSnapshot() { return curr.nextSnapshot; } /** {@inheritDoc} */ @Override public PartitionAllocationMap partitionStatMap() { return map; } /** {@inheritDoc} */ @Override public boolean needToSnapshot(String cacheOrGrpName) { return curr.snapshotOperation.cacheGroupIds().contains(CU.cacheId(cacheOrGrpName)); } /** {@inheritDoc} */ @Override public Executor executor() { return asyncRunner == null ? null : cmd -> { try { GridFutureAdapter<?> res = new GridFutureAdapter<>(); asyncRunner.execute(U.wrapIgniteFuture(cmd, res)); pendingTaskFuture.add(res); } catch (RejectedExecutionException e) { assert false : "A task should never be rejected by async runner"; } }; } /** {@inheritDoc} */ @Override public boolean hasPages() { throw new IllegalStateException( "Property is unknown at this moment. You should use onCheckpointBegin() method." ); } /** * Await all async tasks from executor was finished. * * @throws IgniteCheckedException if fail. */ public void awaitPendingTasksFinished() throws IgniteCheckedException { GridCompoundFuture pendingFut = this.pendingTaskFuture; this.pendingTaskFuture = new GridCompoundFuture(); if (pendingFut != null) { pendingFut.markInitialized(); pendingFut.get(); } } } } /** * Reorders list of checkpoint pages and splits them into needed number of sublists according to * {@link DataStorageConfiguration#getCheckpointThreads()} and * {@link DataStorageConfiguration#getCheckpointWriteOrder()}. * * @param cpPagesTuple Checkpoint pages tuple. * @param threads Checkpoint runner threads. */ private GridMultiCollectionWrapper<FullPageId> splitAndSortCpPagesIfNeeded( IgniteBiTuple<Collection<GridMultiCollectionWrapper<FullPageId>>, Integer> cpPagesTuple, int threads ) throws IgniteCheckedException { FullPageId[] pagesArr = new FullPageId[cpPagesTuple.get2()]; int realPagesArrSize = 0; for (GridMultiCollectionWrapper<FullPageId> colWrapper : cpPagesTuple.get1()) { for (int i = 0; i < colWrapper.collectionsSize(); i++) for (FullPageId page : colWrapper.innerCollection(i)) { if (realPagesArrSize == pagesArr.length) throw new AssertionError("Incorrect estimated dirty pages number: " + pagesArr.length); pagesArr[realPagesArrSize++] = page; } } FullPageId fakeMaxFullPageId = new FullPageId(Long.MAX_VALUE, Integer.MAX_VALUE); // Some pages may have been replaced, need to fill end of array with fake ones to prevent NPE during sort. for (int i = realPagesArrSize; i < pagesArr.length; i++) pagesArr[i] = fakeMaxFullPageId; if (persistenceCfg.getCheckpointWriteOrder() == CheckpointWriteOrder.SEQUENTIAL) { Comparator<FullPageId> cmp = new Comparator<FullPageId>() { @Override public int compare(FullPageId o1, FullPageId o2) { int cmp = Long.compare(o1.groupId(), o2.groupId()); if (cmp != 0) return cmp; return Long.compare(PageIdUtils.effectivePageId(o1.pageId()), PageIdUtils.effectivePageId(o2.pageId())); } }; if (pagesArr.length >= parallelSortThreshold) parallelSortInIsolatedPool(pagesArr, cmp); else Arrays.sort(pagesArr, cmp); } int pagesSubLists = threads == 1 ? 1 : threads * 4; // Splitting pages to (threads * 4) subtasks. If any thread will be faster, it will help slower threads. Collection[] pagesSubListArr = new Collection[pagesSubLists]; for (int i = 0; i < pagesSubLists; i++) { int from = (int)((long)realPagesArrSize * i / pagesSubLists); int to = (int)((long)realPagesArrSize * (i + 1) / pagesSubLists); pagesSubListArr[i] = new GridReadOnlyArrayView(pagesArr, from, to); } return new GridMultiCollectionWrapper<FullPageId>(pagesSubListArr); } /** * Performs parallel sort in isolated fork join pool. * * @param pagesArr Pages array. * @param cmp Cmp. */ private static void parallelSortInIsolatedPool( FullPageId[] pagesArr, Comparator<FullPageId> cmp ) throws IgniteCheckedException { ForkJoinPool.ForkJoinWorkerThreadFactory factory = new ForkJoinPool.ForkJoinWorkerThreadFactory() { @Override public ForkJoinWorkerThread newThread(ForkJoinPool pool) { ForkJoinWorkerThread worker = ForkJoinPool.defaultForkJoinWorkerThreadFactory.newThread(pool); worker.setName("checkpoint-pages-sorter-" + worker.getPoolIndex()); return worker; } }; ForkJoinPool forkJoinPool = new ForkJoinPool(PARALLEL_SORT_THREADS + 1, factory, null, false); ForkJoinTask sortTask = forkJoinPool.submit(() -> Arrays.parallelSort(pagesArr, cmp)); try { sortTask.get(); } catch (InterruptedException e) { throw new IgniteInterruptedCheckedException(e); } catch (ExecutionException e) { throw new IgniteCheckedException("Failed to perform pages array parallel sort", e.getCause()); } forkJoinPool.shutdown(); } /** Pages write task */ private class WriteCheckpointPages implements Runnable { /** */ private final CheckpointMetricsTracker tracker; /** Collection of page IDs to write under this task. Overall pages to write may be greater than this collection */ private final Collection<FullPageId> writePageIds; /** */ private final ConcurrentLinkedHashMap<PageStore, LongAdder> updStores; /** */ private final CountDownFuture doneFut; /** Total pages to write, counter may be greater than {@link #writePageIds} size */ private final int totalPagesToWrite; /** */ private final Runnable beforePageWrite; /** If any pages were skipped, new task with remaining pages will be submitted here. */ private final ExecutorService retryWriteExecutor; /** * Creates task for write pages * * @param tracker * @param writePageIds Collection of page IDs to write. * @param updStores * @param doneFut * @param totalPagesToWrite total pages to be written under this checkpoint * @param beforePageWrite Action to be performed before every page write. * @param retryWriteExecutor Retry write executor. */ private WriteCheckpointPages( final CheckpointMetricsTracker tracker, final Collection<FullPageId> writePageIds, final ConcurrentLinkedHashMap<PageStore, LongAdder> updStores, final CountDownFuture doneFut, final int totalPagesToWrite, final Runnable beforePageWrite, final ExecutorService retryWriteExecutor ) { this.tracker = tracker; this.writePageIds = writePageIds; this.updStores = updStores; this.doneFut = doneFut; this.totalPagesToWrite = totalPagesToWrite; this.beforePageWrite = beforePageWrite; this.retryWriteExecutor = retryWriteExecutor; } /** {@inheritDoc} */ @Override public void run() { snapshotMgr.beforeCheckpointPageWritten(); Collection<FullPageId> writePageIds = this.writePageIds; try { List<FullPageId> pagesToRetry = writePages(writePageIds); if (pagesToRetry.isEmpty()) doneFut.onDone((Void)null); else { if (retryWriteExecutor == null) { while (!pagesToRetry.isEmpty()) pagesToRetry = writePages(pagesToRetry); doneFut.onDone((Void)null); } else { // Submit current retry pages to the end of the queue to avoid starvation. WriteCheckpointPages retryWritesTask = new WriteCheckpointPages( tracker, pagesToRetry, updStores, doneFut, totalPagesToWrite, beforePageWrite, retryWriteExecutor); retryWriteExecutor.submit(retryWritesTask); } } } catch (Throwable e) { doneFut.onDone(e); } } /** * @param writePageIds Collections of pages to write. * @return pagesToRetry Pages which should be retried. */ private List<FullPageId> writePages(Collection<FullPageId> writePageIds) throws IgniteCheckedException { ByteBuffer tmpWriteBuf = threadBuf.get(); List<FullPageId> pagesToRetry = new ArrayList<>(); for (FullPageId fullId : writePageIds) { if (checkpointer.shutdownNow) break; tmpWriteBuf.rewind(); beforePageWrite.run(); snapshotMgr.beforePageWrite(fullId); int grpId = fullId.groupId(); PageMemoryEx pageMem; // TODO IGNITE-7792 add generic mapping. if (grpId == MetaStorage.METASTORAGE_CACHE_ID) pageMem = (PageMemoryEx)metaStorage.pageMemory(); else if (grpId == TxLog.TX_LOG_CACHE_ID) pageMem = (PageMemoryEx)dataRegion(TxLog.TX_LOG_CACHE_NAME).pageMemory(); else { CacheGroupContext grp = context().cache().cacheGroup(grpId); DataRegion region = grp != null ? grp.dataRegion() : null; if (region == null || !region.config().isPersistenceEnabled()) continue; pageMem = (PageMemoryEx)region.pageMemory(); } Integer tag = pageMem.getForCheckpoint( fullId, tmpWriteBuf, persStoreMetrics.metricsEnabled() ? tracker : null); if (tag != null) { if (tag == PageMemoryImpl.TRY_AGAIN_TAG) { pagesToRetry.add(fullId); continue; } assert PageIO.getType(tmpWriteBuf) != 0 : "Invalid state. Type is 0! pageId = " + U.hexLong(fullId.pageId()); assert PageIO.getVersion(tmpWriteBuf) != 0 : "Invalid state. Version is 0! pageId = " + U.hexLong(fullId.pageId()); tmpWriteBuf.rewind(); if (persStoreMetrics.metricsEnabled()) { int pageType = PageIO.getType(tmpWriteBuf); if (PageIO.isDataPageType(pageType)) tracker.onDataPageWritten(); } writtenPagesCntr.incrementAndGet(); PageStore store = storeMgr.writeInternal(grpId, fullId.pageId(), tmpWriteBuf, tag, true); updStores.computeIfAbsent(store, k -> new LongAdder()).increment(); } } return pagesToRetry; } } /** * */ public static class Checkpoint { /** Checkpoint entry. */ @Nullable private final CheckpointEntry cpEntry; /** Checkpoint pages. */ private final GridMultiCollectionWrapper<FullPageId> cpPages; /** */ private final CheckpointProgress progress; /** Number of deleted WAL files. */ private int walFilesDeleted; /** WAL segments fully covered by this checkpoint. */ private IgniteBiTuple<Long, Long> walSegsCoveredRange; /** */ private final int pagesSize; /** * @param cpEntry Checkpoint entry. * @param cpPages Pages to write to the page store. * @param progress Checkpoint progress status. */ private Checkpoint( @Nullable CheckpointEntry cpEntry, @NotNull GridMultiCollectionWrapper<FullPageId> cpPages, CheckpointProgress progress ) { this.cpEntry = cpEntry; this.cpPages = cpPages; this.progress = progress; pagesSize = cpPages.size(); } /** * @return {@code true} if this checkpoint contains at least one dirty page. */ public boolean hasDelta() { return pagesSize != 0; } /** * @param walFilesDeleted Wal files deleted. */ public void walFilesDeleted(int walFilesDeleted) { this.walFilesDeleted = walFilesDeleted; } /** * @param walSegsCoveredRange WAL segments fully covered by this checkpoint. */ public void walSegsCoveredRange(final IgniteBiTuple<Long, Long> walSegsCoveredRange) { this.walSegsCoveredRange = walSegsCoveredRange; } } /** * */ public static class CheckpointStatus { /** Null checkpoint UUID. */ private static final UUID NULL_UUID = new UUID(0L, 0L); /** Null WAL pointer. */ public static final WALPointer NULL_PTR = new FileWALPointer(0, 0, 0); /** */ private long cpStartTs; /** */ private UUID cpStartId; /** */ @GridToStringInclude private WALPointer startPtr; /** */ private UUID cpEndId; /** */ @GridToStringInclude private WALPointer endPtr; /** * @param cpStartId Checkpoint start ID. * @param startPtr Checkpoint start pointer. * @param cpEndId Checkpoint end ID. * @param endPtr Checkpoint end pointer. */ private CheckpointStatus(long cpStartTs, UUID cpStartId, WALPointer startPtr, UUID cpEndId, WALPointer endPtr) { this.cpStartTs = cpStartTs; this.cpStartId = cpStartId; this.startPtr = startPtr; this.cpEndId = cpEndId; this.endPtr = endPtr; } /** * @return {@code True} if need perform binary memory recovery. Only records {@link PageDeltaRecord} * and {@link PageSnapshot} needs to be applyed from {@link #cpStartId}. */ public boolean needRestoreMemory() { return !F.eq(cpStartId, cpEndId) && !F.eq(NULL_UUID, cpStartId); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(CheckpointStatus.class, this); } } /** * Data class representing the state of running/scheduled checkpoint. */ public static class CheckpointProgress { /** Scheduled time of checkpoint. */ private volatile long nextCpTs; /** Checkpoint begin phase future. */ private GridFutureAdapter cpBeginFut = new GridFutureAdapter<>(); /** Checkpoint finish phase future. */ private GridFutureAdapter cpFinishFut = new GridFutureAdapter<Void>() { @Override protected boolean onDone(@Nullable Void res, @Nullable Throwable err, boolean cancel) { if (err != null && !cpBeginFut.isDone()) cpBeginFut.onDone(err); return super.onDone(res, err, cancel); } }; /** Flag indicates that snapshot operation will be performed after checkpoint. */ private volatile boolean nextSnapshot; /** Flag indicates that checkpoint is started. */ private volatile boolean started; /** Snapshot operation that should be performed if {@link #nextSnapshot} set to true. */ private volatile SnapshotOperation snapshotOperation; /** Partitions destroy queue. */ private final PartitionDestroyQueue destroyQueue = new PartitionDestroyQueue(); /** Wakeup reason. */ private String reason; /** * @param nextCpTs Next checkpoint timestamp. */ private CheckpointProgress(long nextCpTs) { this.nextCpTs = nextCpTs; } /** */ public boolean started() { return cpBeginFut.isDone(); } /** */ public boolean finished() { return cpFinishFut.isDone(); } } /** * */ private static class CheckpointProgressSnapshot implements CheckpointFuture { /** */ private final boolean started; /** */ private final GridFutureAdapter<Object> cpBeginFut; /** */ private final GridFutureAdapter<Object> cpFinishFut; /** */ CheckpointProgressSnapshot(CheckpointProgress cpProgress) { started = cpProgress.started; cpBeginFut = cpProgress.cpBeginFut; cpFinishFut = cpProgress.cpFinishFut; } /** {@inheritDoc} */ @Override public GridFutureAdapter beginFuture() { return cpBeginFut; } /** {@inheritDoc} */ @Override public GridFutureAdapter<Object> finishFuture() { return cpFinishFut; } /** {@inheritDoc} */ @Override public boolean started() { return started; } } /** * */ public static class FileLockHolder implements AutoCloseable { /** Lock file name. */ private static final String lockFileName = "lock"; /** File. */ private File file; /** Channel. */ private RandomAccessFile lockFile; /** Lock. */ private volatile FileLock lock; /** Kernal context to generate Id of locked node in file. */ @NotNull private GridKernalContext ctx; /** Logger. */ private IgniteLogger log; /** * @param path Path. */ public FileLockHolder(String path, @NotNull GridKernalContext ctx, IgniteLogger log) { try { file = Paths.get(path, lockFileName).toFile(); lockFile = new RandomAccessFile(file, "rw"); this.ctx = ctx; this.log = log; } catch (IOException e) { throw new IgniteException(e); } } /** * @param lockWaitTimeMillis During which time thread will try capture file lock. * @throws IgniteCheckedException If failed to capture file lock. */ public void tryLock(long lockWaitTimeMillis) throws IgniteCheckedException { assert lockFile != null; FileChannel ch = lockFile.getChannel(); SB sb = new SB(); //write node id sb.a("[").a(ctx.localNodeId().toString()).a("]"); //write ip addresses final GridDiscoveryManager discovery = ctx.discovery(); if (discovery != null) { //discovery may be not up and running final ClusterNode node = discovery.localNode(); if (node != null) sb.a(node.addresses()); } //write ports sb.a("["); Iterator<GridPortRecord> it = ctx.ports().records().iterator(); while (it.hasNext()) { GridPortRecord rec = it.next(); sb.a(rec.protocol()).a(":").a(rec.port()); if (it.hasNext()) sb.a(", "); } sb.a("]"); String failMsg; try { String content = null; // Try to get lock, if not available wait 1 sec and re-try. for (int i = 0; i < lockWaitTimeMillis; i += 1000) { try { lock = ch.tryLock(0, 1, false); if (lock != null && lock.isValid()) { writeContent(sb.toString()); return; } } catch (OverlappingFileLockException ignore) { if (content == null) content = readContent(); log.warning("Failed to acquire file lock. Will try again in 1s " + "[nodeId=" + ctx.localNodeId() + ", holder=" + content + ", path=" + file.getAbsolutePath() + ']'); } U.sleep(1000); } if (content == null) content = readContent(); failMsg = "Failed to acquire file lock [holder=" + content + ", time=" + (lockWaitTimeMillis / 1000) + " sec, path=" + file.getAbsolutePath() + ']'; } catch (Exception e) { throw new IgniteCheckedException(e); } if (failMsg != null) throw new IgniteCheckedException(failMsg); } /** * Write node id (who captured lock) into lock file. * * @param content Node id. * @throws IOException if some fail while write node it. */ private void writeContent(String content) throws IOException { FileChannel ch = lockFile.getChannel(); byte[] bytes = content.getBytes(); ByteBuffer buf = ByteBuffer.allocate(bytes.length); buf.put(bytes); buf.flip(); ch.write(buf, 1); ch.force(false); } /** * */ private String readContent() throws IOException { FileChannel ch = lockFile.getChannel(); ByteBuffer buf = ByteBuffer.allocate((int)(ch.size() - 1)); ch.read(buf, 1); String content = new String(buf.array()); buf.clear(); return content; } /** Locked or not. */ public boolean isLocked() { return lock != null && lock.isValid(); } /** Releases file lock */ public void release() { U.releaseQuiet(lock); } /** Closes file channel */ @Override public void close() { release(); U.closeQuiet(lockFile); } /** * @return Absolute path to lock file. */ private String lockPath() { return file.getAbsolutePath(); } } /** {@inheritDoc} */ @Override public DataStorageMetrics persistentStoreMetrics() { return new DataStorageMetricsSnapshot(persStoreMetrics); } /** * */ public DataStorageMetricsImpl persistentStoreMetricsImpl() { return persStoreMetrics; } /** {@inheritDoc} */ @Override public MetaStorage metaStorage() { return metaStorage; } /** {@inheritDoc} */ @Override public void notifyMetaStorageSubscribersOnReadyForRead() throws IgniteCheckedException { metastorageLifecycleLsnrs = cctx.kernalContext().internalSubscriptionProcessor().getMetastorageSubscribers(); readMetastore(); } /** {@inheritDoc} */ @Override public boolean walEnabled(int grpId, boolean local) { if (local) return !initiallyLocalWalDisabledGrps.contains(grpId); else return !initiallyGlobalWalDisabledGrps.contains(grpId); } /** {@inheritDoc} */ @Override public void walEnabled(int grpId, boolean enabled, boolean local) { String key = walGroupIdToKey(grpId, local); checkpointReadLock(); try { if (enabled) metaStorage.remove(key); else { metaStorage.write(key, true); lastCheckpointInapplicableForWalRebalance(grpId); } } catch (IgniteCheckedException e) { throw new IgniteException("Failed to write cache group WAL state [grpId=" + grpId + ", enabled=" + enabled + ']', e); } finally { checkpointReadUnlock(); } } /** * Checks that checkpoint with timestamp {@code cpTs} is inapplicable as start point for WAL rebalance for given group {@code grpId}. * * @param cpTs Checkpoint timestamp. * @param grpId Group ID. * @return {@code true} if checkpoint {@code cpTs} is inapplicable as start point for WAL rebalance for {@code grpId}. * @throws IgniteCheckedException If failed to check. */ public boolean isCheckpointInapplicableForWalRebalance(Long cpTs, int grpId) throws IgniteCheckedException { return metaStorage.read(checkpointInapplicableCpAndGroupIdToKey(cpTs, grpId)) != null; } /** * Set last checkpoint as inapplicable for WAL rebalance for given group {@code grpId}. * * @param grpId Group ID. */ @Override public void lastCheckpointInapplicableForWalRebalance(int grpId) { checkpointReadLock(); try { CheckpointEntry lastCp = cpHistory.lastCheckpoint(); long lastCpTs = lastCp != null ? lastCp.timestamp() : 0; if (lastCpTs != 0) metaStorage.write(checkpointInapplicableCpAndGroupIdToKey(lastCpTs, grpId), true); } catch (IgniteCheckedException e) { log.error("Failed to mark last checkpoint as inapplicable for WAL rebalance for group: " + grpId, e); } finally { checkpointReadUnlock(); } } /** * */ private void fillWalDisabledGroups() { assert metaStorage != null; try { metaStorage.iterate(WAL_KEY_PREFIX, (key, val) -> { T2<Integer, Boolean> t2 = walKeyToGroupIdAndLocalFlag(key); if (t2 != null) { if (t2.get2()) initiallyLocalWalDisabledGrps.add(t2.get1()); else initiallyGlobalWalDisabledGrps.add(t2.get1()); } }, false); } catch (IgniteCheckedException e) { throw new IgniteException("Failed to read cache groups WAL state.", e); } } /** * Convert cache group ID to WAL state key. * * @param grpId Group ID. * @return Key. */ private static String walGroupIdToKey(int grpId, boolean local) { if (local) return WAL_LOCAL_KEY_PREFIX + grpId; else return WAL_GLOBAL_KEY_PREFIX + grpId; } /** * Convert checkpoint timestamp and cache group ID to key for {@link #CHECKPOINT_INAPPLICABLE_FOR_REBALANCE} metastorage records. * * @param cpTs Checkpoint timestamp. * @param grpId Group ID. * @return Key. */ private static String checkpointInapplicableCpAndGroupIdToKey(long cpTs, int grpId) { return CHECKPOINT_INAPPLICABLE_FOR_REBALANCE + cpTs + "-" + grpId; } /** * Convert WAL state key to cache group ID. * * @param key Key. * @return Group ID. */ private static T2<Integer, Boolean> walKeyToGroupIdAndLocalFlag(String key) { if (key.startsWith(WAL_LOCAL_KEY_PREFIX)) return new T2<>(Integer.parseInt(key.substring(WAL_LOCAL_KEY_PREFIX.length())), true); else if (key.startsWith(WAL_GLOBAL_KEY_PREFIX)) return new T2<>(Integer.parseInt(key.substring(WAL_GLOBAL_KEY_PREFIX.length())), false); else return null; } /** * Method dumps partitions info see {@link #dumpPartitionsInfo(CacheGroupContext, IgniteLogger)} * for all persistent cache groups. * * @param cctx Shared context. * @param log Logger. * @throws IgniteCheckedException If failed. */ private static void dumpPartitionsInfo(GridCacheSharedContext cctx, IgniteLogger log) throws IgniteCheckedException { for (CacheGroupContext grp : cctx.cache().cacheGroups()) { if (grp.isLocal() || !grp.persistenceEnabled()) continue; dumpPartitionsInfo(grp, log); } } /** * Retrieves from page memory meta information about given {@code grp} group partitions * and dumps this information to log INFO level. * * @param grp Cache group. * @param log Logger. * @throws IgniteCheckedException If failed. */ private static void dumpPartitionsInfo(CacheGroupContext grp, IgniteLogger log) throws IgniteCheckedException { PageMemoryEx pageMem = (PageMemoryEx)grp.dataRegion().pageMemory(); IgnitePageStoreManager pageStore = grp.shared().pageStore(); assert pageStore != null : "Persistent cache should have initialize page store manager."; for (int p = 0; p < grp.affinity().partitions(); p++) { if (grp.topology().localPartition(p) != null) { GridDhtLocalPartition part = grp.topology().localPartition(p); log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=" + part.state() + ", counter=" + part.updateCounter() + ", size=" + part.fullSize() + "]"); continue; } if (!pageStore.exists(grp.groupId(), p)) continue; pageStore.ensure(grp.groupId(), p); if (pageStore.pages(grp.groupId(), p) <= 1) { log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=N/A (only file header) ]"); continue; } long partMetaId = pageMem.partitionMetaPageId(grp.groupId(), p); long partMetaPage = pageMem.acquirePage(grp.groupId(), partMetaId); try { long pageAddr = pageMem.readLock(grp.groupId(), partMetaId, partMetaPage); try { PagePartitionMetaIO io = PagePartitionMetaIO.VERSIONS.forPage(pageAddr); GridDhtPartitionState partitionState = fromOrdinal(io.getPartitionState(pageAddr)); String state = partitionState != null ? partitionState.toString() : "N/A"; long updateCounter = io.getUpdateCounter(pageAddr); long size = io.getSize(pageAddr); log.info("Partition [grp=" + grp.cacheOrGroupName() + ", id=" + p + ", state=" + state + ", counter=" + updateCounter + ", size=" + size + "]"); } finally { pageMem.readUnlock(grp.groupId(), partMetaId, partMetaPage); } } finally { pageMem.releasePage(grp.groupId(), partMetaId, partMetaPage); } } } /** * Recovery lifecycle for read-write metastorage. */ private class MetastorageRecoveryLifecycle implements DatabaseLifecycleListener { /** {@inheritDoc} */ @Override public void beforeBinaryMemoryRestore(IgniteCacheDatabaseSharedManager mgr) throws IgniteCheckedException { cctx.pageStore().initializeForMetastorage(); } /** {@inheritDoc} */ @Override public void afterBinaryMemoryRestore( IgniteCacheDatabaseSharedManager mgr, RestoreBinaryState restoreState ) throws IgniteCheckedException { assert metaStorage == null; metaStorage = createMetastorage(false); } } /** * @return Cache group predicate that passes only Metastorage cache group id. */ private IgnitePredicate<Integer> onlyMetastorageGroup() { return groupId -> MetaStorage.METASTORAGE_CACHE_ID == groupId; } /** * @return Cache group predicate that passes only cache groups with enabled WAL. */ private IgnitePredicate<Integer> groupsWithEnabledWal() { return groupId -> !initiallyGlobalWalDisabledGrps.contains(groupId) && !initiallyLocalWalDisabledGrps.contains(groupId); } /** * @return WAL records predicate that passes only Metastorage data records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> onlyMetastorageRecords() { return (type, ptr) -> type == METASTORE_DATA_RECORD; } /** * @return WAL records predicate that passes only physical and mixed WAL records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> physicalRecords() { return (type, ptr) -> type.purpose() == WALRecord.RecordPurpose.PHYSICAL || type.purpose() == WALRecord.RecordPurpose.MIXED; } /** * @return WAL records predicate that passes only logical and mixed WAL records. */ private IgniteBiPredicate<WALRecord.RecordType, WALPointer> logicalRecords() { return (type, ptr) -> type.purpose() == WALRecord.RecordPurpose.LOGICAL || type.purpose() == WALRecord.RecordPurpose.MIXED; } /** * Abstract class to create restore context. */ private abstract class RestoreStateContext { /** Last archived segment. */ protected final long lastArchivedSegment; /** WAL iterator. */ private final WALIterator iterator; /** Only {@link WalRecordCacheGroupAware} records satisfied this predicate will be applied. */ private final IgnitePredicate<Integer> cacheGroupPredicate; /** * @param iterator WAL iterator. * @param lastArchivedSegment Last archived segment index. * @param cacheGroupPredicate Cache groups predicate. */ protected RestoreStateContext( WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupPredicate ) { this.iterator = iterator; this.lastArchivedSegment = lastArchivedSegment; this.cacheGroupPredicate = cacheGroupPredicate; } /** * Advance iterator to the next record. * * @return WALRecord entry. * @throws IgniteCheckedException If CRC check fail during binary recovery state or another exception occurring. */ public WALRecord next() throws IgniteCheckedException { try { for (;;) { if (!iterator.hasNextX()) return null; IgniteBiTuple<WALPointer, WALRecord> tup = iterator.nextX(); if (tup == null) return null; WALRecord rec = tup.get2(); WALPointer ptr = tup.get1(); rec.position(ptr); // Filter out records by group id. if (rec instanceof WalRecordCacheGroupAware) { WalRecordCacheGroupAware grpAwareRecord = (WalRecordCacheGroupAware) rec; if (!cacheGroupPredicate.apply(grpAwareRecord.groupId())) continue; } // Filter out data entries by group id. if (rec instanceof DataRecord) rec = filterEntriesByGroupId((DataRecord) rec); return rec; } } catch (IgniteCheckedException e) { boolean throwsCRCError = throwsCRCError(); if (X.hasCause(e, IgniteDataIntegrityViolationException.class)) { if (throwsCRCError) throw e; else return null; } log.error("There is an error during restore state [throwsCRCError=" + throwsCRCError + ']', e); throw e; } } /** * Filter outs data entries from given data record that not satisfy {@link #cacheGroupPredicate}. * * @param record Original data record. * @return Data record with filtered data entries. */ private DataRecord filterEntriesByGroupId(DataRecord record) { List<DataEntry> filteredEntries = record.writeEntries().stream() .filter(entry -> { int cacheId = entry.cacheId(); return cctx.cacheContext(cacheId) != null && cacheGroupPredicate.apply(cctx.cacheContext(cacheId).groupId()); }) .collect(Collectors.toList()); return record.setWriteEntries(filteredEntries); } /** * * @return Last read WAL record pointer. */ public Optional<FileWALPointer> lastReadRecordPointer() { return iterator.lastRead().map(ptr -> (FileWALPointer)ptr); } /** * * @return Flag indicates need throws CRC exception or not. */ public boolean throwsCRCError() { return lastReadRecordPointer().filter(ptr -> ptr.index() <= lastArchivedSegment).isPresent(); } } /** * Restore memory context. Tracks the safety of binary recovery. */ public class RestoreBinaryState extends RestoreStateContext { /** Checkpoint status. */ private final CheckpointStatus status; /** The flag indicates need to apply the binary update or no needed. */ private boolean needApplyBinaryUpdates; /** * @param status Checkpoint status. * @param iterator WAL iterator. * @param lastArchivedSegment Last archived segment index. * @param cacheGroupsPredicate Cache groups predicate. */ public RestoreBinaryState( CheckpointStatus status, WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupsPredicate ) { super(iterator, lastArchivedSegment, cacheGroupsPredicate); this.status = status; this.needApplyBinaryUpdates = status.needRestoreMemory(); } /** * Advance iterator to the next record. * * @return WALRecord entry. * @throws IgniteCheckedException If CRC check fail during binary recovery state or another exception occurring. */ @Override public WALRecord next() throws IgniteCheckedException { WALRecord rec = super.next(); if (rec == null) return null; if (rec.type() == CHECKPOINT_RECORD) { CheckpointRecord cpRec = (CheckpointRecord)rec; // We roll memory up until we find a checkpoint start record registered in the status. if (F.eq(cpRec.checkpointId(), status.cpStartId)) { log.info("Found last checkpoint marker [cpId=" + cpRec.checkpointId() + ", pos=" + rec.position() + ']'); needApplyBinaryUpdates = false; } else if (!F.eq(cpRec.checkpointId(), status.cpEndId)) U.warn(log, "Found unexpected checkpoint marker, skipping [cpId=" + cpRec.checkpointId() + ", expCpId=" + status.cpStartId + ", pos=" + rec.position() + ']'); } return rec; } /** * * @return Flag indicates need apply binary record or not. */ public boolean needApplyBinaryUpdate() { return needApplyBinaryUpdates; } /** * * @return Flag indicates need throws CRC exception or not. */ @Override public boolean throwsCRCError() { log.info("Throws CRC error check [needApplyBinaryUpdates=" + needApplyBinaryUpdates + ", lastArchivedSegment=" + lastArchivedSegment + ", lastRead=" + lastReadRecordPointer() + ']'); if (needApplyBinaryUpdates) return true; return super.throwsCRCError(); } } /** * Restore logical state context. Tracks the safety of logical recovery. */ public class RestoreLogicalState extends RestoreStateContext { /** States of partitions recovered during applying logical updates. */ private final Map<GroupPartitionId, PartitionRecoverState> partitionRecoveryStates = new HashMap<>(); /** * @param lastArchivedSegment Last archived segment index. */ public RestoreLogicalState(WALIterator iterator, long lastArchivedSegment, IgnitePredicate<Integer> cacheGroupsPredicate) { super(iterator, lastArchivedSegment, cacheGroupsPredicate); } /** * @return Map of restored partition states for cache groups. */ public Map<GroupPartitionId, PartitionRecoverState> partitionRecoveryStates() { return Collections.unmodifiableMap(partitionRecoveryStates); } } /** Indicates checkpoint read lock acquisition failure which did not lead to node invalidation. */ private static class CheckpointReadLockTimeoutException extends IgniteCheckedException { /** */ private static final long serialVersionUID = 0L; /** */ private CheckpointReadLockTimeoutException(String msg) { super(msg); } } }
IGNITE-11536 Unused imports fix Signed-off-by: Ivan Rakov <[email protected]>
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/GridCacheDatabaseSharedManager.java
IGNITE-11536 Unused imports fix
<ide><path>odules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/GridCacheDatabaseSharedManager.java <ide> import java.util.regex.Matcher; <ide> import java.util.regex.Pattern; <ide> import java.util.stream.Collectors; <del> <add>import org.apache.ignite.DataRegionMetricsProvider; <ide> import org.apache.ignite.DataStorageMetrics; <ide> import org.apache.ignite.IgniteCheckedException; <ide> import org.apache.ignite.IgniteException; <ide> import org.apache.ignite.IgniteInterruptedException; <ide> import org.apache.ignite.IgniteLogger; <ide> import org.apache.ignite.IgniteSystemProperties; <del>import org.apache.ignite.DataRegionMetricsProvider; <ide> import org.apache.ignite.cluster.ClusterNode; <ide> import org.apache.ignite.configuration.CacheConfiguration; <ide> import org.apache.ignite.configuration.CheckpointWriteOrder; <ide> import org.apache.ignite.failure.FailureContext; <ide> import org.apache.ignite.failure.FailureType; <ide> import org.apache.ignite.internal.GridKernalContext; <del>import org.apache.ignite.internal.IgniteEx; <ide> import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; <ide> import org.apache.ignite.internal.IgniteInternalFuture; <ide> import org.apache.ignite.internal.IgniteInterruptedCheckedException; <ide> import static org.apache.ignite.failure.FailureType.CRITICAL_ERROR; <ide> import static org.apache.ignite.failure.FailureType.SYSTEM_CRITICAL_OPERATION_TIMEOUT; <ide> import static org.apache.ignite.failure.FailureType.SYSTEM_WORKER_TERMINATION; <del>import static org.apache.ignite.internal.IgnitionEx.grid; <ide> import static org.apache.ignite.internal.LongJVMPauseDetector.DEFAULT_JVM_PAUSE_DETECTOR_THRESHOLD; <ide> import static org.apache.ignite.internal.pagemem.PageIdUtils.partId; <ide> import static org.apache.ignite.internal.pagemem.wal.record.WALRecord.RecordType.CHECKPOINT_RECORD;
Java
mit
4a6f211d8a171f6105d5f561c3205d975a814385
0
nunull/QuickStarter,nunull/QuickStarter,nunull/QuickStarter,nunull/QuickStarter
package de.dqi11.quickStarter.gui; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.FileInputStream; import java.util.LinkedList; import java.util.Observable; import javax.swing.BorderFactory; import javax.swing.DefaultListModel; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.SwingUtilities; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import de.dqi11.quickStarter.controller.Search; import de.dqi11.quickStarter.modules.ModuleAction; /** * Represents the main-application window, which * shows the search-field and ModuleActions. */ public class MainWindow extends Observable { private final int WIDTH = 500; private final int TEXTFIELD_HEIGHT = 50; private final int ADVICESLIST_MAXHEIGHT = 450; private boolean visible; private JFrame mainFrame; private JPanel mainPanel; private JTextField textField; private JLabel errorLabel; private JList<ModuleAction> advicesList; private DefaultListModel<ModuleAction> moduleActionsListModel; private KeyListener keyListener; private DocumentListener documentListener; private LinkedList<ModuleAction> moduleActions; private Font defaultFont; private Font boldFont; /** * Constructor. */ public MainWindow() { visible = false; } /** * Initializes the whole GUI. */ public void init() { initListeners(); initFonts(); initMainFrame(); initMainPanel(); initTextField(); initErrorLabel(); initModuleActionsPanel(); } /** * Initializes the actions. */ private void initListeners() { keyListener = new KeyListener() { @Override public void keyTyped(KeyEvent e) { // Nothing to do here. } @Override public void keyReleased(KeyEvent e) { // Nothing to do here. } @Override public void keyPressed(KeyEvent e) { // escape-key if(e.getKeyCode() == KeyEvent.VK_ESCAPE) { toggleApplication(); // enter-key } else if(e.getKeyCode() == KeyEvent.VK_ENTER) { invokeSelectedModuleAction(); // down-arrow-key } else if(e.getKeyCode() == KeyEvent.VK_DOWN) { e.consume(); selectNext(); // up-arrow-key } else if(e.getKeyCode() == KeyEvent.VK_UP) { e.consume(); selectPrevious(); } } }; documentListener = new DocumentListener() { @Override public void removeUpdate(DocumentEvent e) { setChanged(); notifyObservers(); } @Override public void insertUpdate(DocumentEvent e) { setChanged(); notifyObservers(); } @Override public void changedUpdate(DocumentEvent e) { // Nothing to do here. } }; } /** * Initializes all fonts. */ private void initFonts() { // Initialize default font. try { defaultFont = Font.createFont(Font.TRUETYPE_FONT, new FileInputStream("res/fonts/ubuntu/Ubuntu-Light.ttf")); defaultFont = defaultFont.deriveFont(20f); } catch (Exception e) { defaultFont = new Font("Arial", Font.PLAIN, 20); } // Initialize bold font. try { boldFont = Font.createFont(Font.TRUETYPE_FONT, new FileInputStream("res/fonts/ubuntu/Ubuntu-Bold.ttf")); boldFont = boldFont.deriveFont(20f); } catch (Exception e) { boldFont = new Font("Arial", Font.BOLD, 20); } } /** * Initializes the mainFrame. */ private void initMainFrame() { mainFrame = new JFrame(); mainFrame.setUndecorated(true); mainFrame.setSize(WIDTH-20, TEXTFIELD_HEIGHT + ADVICESLIST_MAXHEIGHT); // mainFrame.setShape(new RoundRectangle2D.Double(10, 10, 100, 100, 50, 50)); mainFrame.setLocationRelativeTo(null); mainFrame.setOpacity(0.8f); mainFrame.setBackground(Color.BLACK); } /** * Initializes the mainPanel. */ private void initMainPanel() { mainPanel = new JPanel(); mainPanel.setOpaque(false); mainFrame.setContentPane(mainPanel); } /** * Initializes the textField. */ private void initTextField() { textField = new JTextField(); Border line = BorderFactory.createLineBorder(Color.BLACK); Border empty = new EmptyBorder(0, 10, 0, 0); CompoundBorder border = new CompoundBorder(line, empty); textField.setBorder(border); textField.setPreferredSize(new Dimension(WIDTH, TEXTFIELD_HEIGHT)); textField.setFont(boldFont); textField.setBackground(Color.BLACK); textField.setForeground(Color.WHITE); textField.setCaretColor(Color.WHITE); textField.addKeyListener(keyListener); textField.getDocument().addDocumentListener(documentListener); mainPanel.add(textField); } private void initErrorLabel() { errorLabel = new JLabel(); mainPanel.add(errorLabel); } /** * Initializes the advicesPanel; */ private void initModuleActionsPanel() { moduleActionsListModel = new DefaultListModel<ModuleAction>(); advicesList = new JList<ModuleAction>(moduleActionsListModel); advicesList.setCellRenderer(new ModuleActionListCellRenderer(defaultFont)); advicesList.setPreferredSize(new Dimension(WIDTH, ADVICESLIST_MAXHEIGHT)); advicesList.setOpaque(false); mainPanel.add(advicesList); } /** * Updates the shown module actions. */ public void updateModuleActions() { moduleActionsListModel.clear(); for(ModuleAction moduleAction : moduleActions) { moduleActionsListModel.addElement(moduleAction); } selectFirst(); } /** * Toggles the visibility of the application. */ public void toggleApplication() { visible = !visible; textField.setText(""); mainFrame.setVisible(visible); if(visible) { mainFrame.setAlwaysOnTop(true); mainFrame.toFront(); mainFrame.requestFocus(); mainFrame.setAlwaysOnTop(false); } } /** * Returns the current search-string. * * @return The search-string. */ public String getSearchString() { return textField.getText(); } /** * Selects the first ModuleAction. */ public void selectFirst() { advicesList.setSelectedIndex(0); } /** * Selects the next ModuleAction. */ public void selectNext() { int index = advicesList.getSelectedIndex()+1; advicesList.setSelectedIndex(index > advicesList.getLastVisibleIndex() ? advicesList.getLastVisibleIndex() : index); } /** * Selects the previous ModuleAction. */ public void selectPrevious() { int index = advicesList.getSelectedIndex()-1; advicesList.setSelectedIndex(index < 0 ? 0 : index); } /** * Returns the index of the selected ModuleAction. * * @return the index */ public int getSelectedIndex() { return advicesList.getSelectedIndex(); } /** * Returns the selected ModuleAction. * * @return the selected ModuleAction. */ public ModuleAction getSelectedModuleAction() { return advicesList.getSelectedValue(); } /** * Invokes the currently selected ModuleAction. */ public void invokeSelectedModuleAction() { Search search = new Search(getSearchString()); ModuleAction moduleAction = getSelectedModuleAction(); if(moduleAction != null) { ModuleWindow moduleWindow = moduleAction.getModuleWindow(search); toggleApplication(); if(moduleWindow == null) { moduleAction.invoke(search); } else { moduleWindow.show(); } } } /** * Sets the ModuleActions and updates the GUI. * * @param moduleActions The List of ModuleActions. */ public void setModuleActions(LinkedList<ModuleAction> moduleActions) { this.moduleActions = moduleActions; updateModuleActions(); } }
src/de/dqi11/quickStarter/gui/MainWindow.java
package de.dqi11.quickStarter.gui; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.FileInputStream; import java.util.LinkedList; import java.util.Observable; import javax.swing.BorderFactory; import javax.swing.DefaultListModel; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import de.dqi11.quickStarter.controller.Search; import de.dqi11.quickStarter.modules.ModuleAction; /** * Represents the main-application window, which * shows the search-field and ModuleActions. */ public class MainWindow extends Observable { private final int WIDTH = 500; private final int TEXTFIELD_HEIGHT = 50; private final int ADVICESLIST_MAXHEIGHT = 450; private boolean visible; private JFrame mainFrame; private JPanel mainPanel; private JTextField textField; private JLabel errorLabel; private JList<ModuleAction> advicesList; private DefaultListModel<ModuleAction> moduleActionsListModel; private KeyListener keyListener; private DocumentListener documentListener; private LinkedList<ModuleAction> moduleActions; private Font defaultFont; private Font boldFont; /** * Constructor. */ public MainWindow() { visible = false; } /** * Initializes the whole GUI. */ public void init() { initListeners(); initFonts(); initMainFrame(); initMainPanel(); initTextField(); initErrorLabel(); initModuleActionsPanel(); } /** * Initializes the actions. */ private void initListeners() { keyListener = new KeyListener() { @Override public void keyTyped(KeyEvent e) { // Nothing to do here. } @Override public void keyReleased(KeyEvent e) { // Nothing to do here. } @Override public void keyPressed(KeyEvent e) { // escape-key if(e.getKeyCode() == KeyEvent.VK_ESCAPE) { toggleApplication(); // enter-key } else if(e.getKeyCode() == KeyEvent.VK_ENTER) { invokeSelectedModuleAction(); // down-arrow-key } else if(e.getKeyCode() == KeyEvent.VK_DOWN) { e.consume(); selectNext(); // up-arrow-key } else if(e.getKeyCode() == KeyEvent.VK_UP) { e.consume(); selectPrevious(); } } }; documentListener = new DocumentListener() { @Override public void removeUpdate(DocumentEvent e) { setChanged(); notifyObservers(); } @Override public void insertUpdate(DocumentEvent e) { setChanged(); notifyObservers(); } @Override public void changedUpdate(DocumentEvent e) { // Nothing to do here. } }; } /** * Initializes all fonts. */ private void initFonts() { // Initialize default font. try { defaultFont = Font.createFont(Font.TRUETYPE_FONT, new FileInputStream("res/fonts/ubuntu/Ubuntu-Light.ttf")); defaultFont = defaultFont.deriveFont(20f); } catch (Exception e) { defaultFont = new Font("Arial", Font.PLAIN, 20); } // Initialize bold font. try { boldFont = Font.createFont(Font.TRUETYPE_FONT, new FileInputStream("res/fonts/ubuntu/Ubuntu-Bold.ttf")); boldFont = boldFont.deriveFont(20f); } catch (Exception e) { boldFont = new Font("Arial", Font.BOLD, 20); } } /** * Initializes the mainFrame. */ private void initMainFrame() { mainFrame = new JFrame(); mainFrame.setUndecorated(true); mainFrame.setSize(WIDTH-20, TEXTFIELD_HEIGHT + ADVICESLIST_MAXHEIGHT); // mainFrame.setShape(new RoundRectangle2D.Double(10, 10, 100, 100, 50, 50)); mainFrame.setLocationRelativeTo(null); mainFrame.setOpacity(0.8f); mainFrame.setBackground(Color.BLACK); } /** * Initializes the mainPanel. */ private void initMainPanel() { mainPanel = new JPanel(); mainPanel.setOpaque(false); mainFrame.setContentPane(mainPanel); } /** * Initializes the textField. */ private void initTextField() { textField = new JTextField(); Border line = BorderFactory.createLineBorder(Color.BLACK); Border empty = new EmptyBorder(0, 10, 0, 0); CompoundBorder border = new CompoundBorder(line, empty); textField.setBorder(border); textField.setPreferredSize(new Dimension(WIDTH, TEXTFIELD_HEIGHT)); textField.setFont(boldFont); textField.setBackground(Color.BLACK); textField.setForeground(Color.WHITE); textField.setCaretColor(Color.WHITE); textField.addKeyListener(keyListener); textField.getDocument().addDocumentListener(documentListener); mainPanel.add(textField); } private void initErrorLabel() { errorLabel = new JLabel(); mainPanel.add(errorLabel); } /** * Initializes the advicesPanel; */ private void initModuleActionsPanel() { moduleActionsListModel = new DefaultListModel<ModuleAction>(); advicesList = new JList<ModuleAction>(moduleActionsListModel); advicesList.setCellRenderer(new ModuleActionListCellRenderer(defaultFont)); advicesList.setPreferredSize(new Dimension(WIDTH, ADVICESLIST_MAXHEIGHT)); advicesList.setOpaque(false); mainPanel.add(advicesList); } /** * Updates the shown module actions. */ public void updateModuleActions() { moduleActionsListModel.clear(); for(ModuleAction moduleAction : moduleActions) { moduleActionsListModel.addElement(moduleAction); } selectFirst(); } /** * Toggles the visibility of the application. */ public void toggleApplication() { visible = !visible; textField.setText(""); mainFrame.setVisible(visible); } /** * Returns the current search-string. * * @return The search-string. */ public String getSearchString() { return textField.getText(); } /** * Selects the first ModuleAction. */ public void selectFirst() { advicesList.setSelectedIndex(0); } /** * Selects the next ModuleAction. */ public void selectNext() { int index = advicesList.getSelectedIndex()+1; advicesList.setSelectedIndex(index > advicesList.getLastVisibleIndex() ? advicesList.getLastVisibleIndex() : index); } /** * Selects the previous ModuleAction. */ public void selectPrevious() { int index = advicesList.getSelectedIndex()-1; advicesList.setSelectedIndex(index < 0 ? 0 : index); } /** * Returns the index of the selected ModuleAction. * * @return the index */ public int getSelectedIndex() { return advicesList.getSelectedIndex(); } /** * Returns the selected ModuleAction. * * @return the selected ModuleAction. */ public ModuleAction getSelectedModuleAction() { return advicesList.getSelectedValue(); } /** * Invokes the currently selected ModuleAction. */ public void invokeSelectedModuleAction() { Search search = new Search(getSearchString()); ModuleAction moduleAction = getSelectedModuleAction(); if(moduleAction != null) { ModuleWindow moduleWindow = moduleAction.getModuleWindow(search); toggleApplication(); if(moduleWindow == null) { moduleAction.invoke(search); } else { moduleWindow.show(); } } } /** * Sets the ModuleActions and updates the GUI. * * @param moduleActions The List of ModuleActions. */ public void setModuleActions(LinkedList<ModuleAction> moduleActions) { this.moduleActions = moduleActions; updateModuleActions(); } }
Fixed a GUI issue (Mac OSX) Toggling the GUI via the global keyboard shortcut does work now (application is bought to the front).
src/de/dqi11/quickStarter/gui/MainWindow.java
Fixed a GUI issue
<ide><path>rc/de/dqi11/quickStarter/gui/MainWindow.java <ide> import javax.swing.JList; <ide> import javax.swing.JPanel; <ide> import javax.swing.JTextField; <add>import javax.swing.SwingUtilities; <ide> import javax.swing.border.Border; <ide> import javax.swing.border.CompoundBorder; <ide> import javax.swing.border.EmptyBorder; <ide> */ <ide> public void toggleApplication() { <ide> visible = !visible; <add> <ide> textField.setText(""); <ide> mainFrame.setVisible(visible); <add> if(visible) { <add> mainFrame.setAlwaysOnTop(true); <add> mainFrame.toFront(); <add> mainFrame.requestFocus(); <add> mainFrame.setAlwaysOnTop(false); <add> } <ide> } <ide> <ide> /**
Java
lgpl-2.1
87a80ff7dfb2b46559ce083d970405c5c3687b25
0
loopingz/nuxeo-drive,ssdi-drive/nuxeo-drive,arameshkumar/base-nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,arameshkumar/nuxeo-drive,IsaacYangSLA/nuxeo-drive,ssdi-drive/nuxeo-drive,IsaacYangSLA/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,loopingz/nuxeo-drive,rsoumyassdi/nuxeo-drive,loopingz/nuxeo-drive,DirkHoffmann/nuxeo-drive,DirkHoffmann/nuxeo-drive,rsoumyassdi/nuxeo-drive,loopingz/nuxeo-drive,IsaacYangSLA/nuxeo-drive,ssdi-drive/nuxeo-drive,IsaacYangSLA/nuxeo-drive,arameshkumar/base-nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/nuxeo-drive,DirkHoffmann/nuxeo-drive
/* * (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Antoine Taillefer <[email protected]> */ package org.nuxeo.drive.operations; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.Serializable; import java.security.Principal; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.nuxeo.drive.adapter.FileSystemItem; import org.nuxeo.drive.adapter.FolderItem; import org.nuxeo.drive.adapter.impl.DefaultSyncRootFolderItem; import org.nuxeo.drive.adapter.impl.DefaultTopLevelFolderItem; import org.nuxeo.drive.adapter.impl.DocumentBackedFileItem; import org.nuxeo.drive.adapter.impl.DocumentBackedFolderItem; import org.nuxeo.drive.service.FileSystemItemAdapterService; import org.nuxeo.drive.service.NuxeoDriveManager; import org.nuxeo.ecm.automation.client.Session; import org.nuxeo.ecm.automation.client.jaxrs.impl.HttpAutomationClient; import org.nuxeo.ecm.automation.client.model.Blob; import org.nuxeo.ecm.automation.client.model.StringBlob; import org.nuxeo.ecm.automation.test.EmbeddedAutomationServerFeature; import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.NuxeoPrincipal; import org.nuxeo.ecm.core.api.PathRef; import org.nuxeo.ecm.core.api.security.ACE; import org.nuxeo.ecm.core.api.security.ACL; import org.nuxeo.ecm.core.api.security.ACP; import org.nuxeo.ecm.core.api.security.SecurityConstants; import org.nuxeo.ecm.core.storage.sql.DatabaseHelper; import org.nuxeo.ecm.core.storage.sql.DatabaseMySQL; import org.nuxeo.ecm.core.test.TransactionalFeature; import org.nuxeo.ecm.core.test.annotations.Granularity; import org.nuxeo.ecm.core.test.annotations.RepositoryConfig; import org.nuxeo.ecm.directory.api.DirectoryService; import org.nuxeo.ecm.platform.usermanager.UserManager; import org.nuxeo.runtime.api.Framework; import org.nuxeo.runtime.test.runner.Deploy; import org.nuxeo.runtime.test.runner.Features; import org.nuxeo.runtime.test.runner.FeaturesRunner; import org.nuxeo.runtime.test.runner.Jetty; import org.nuxeo.runtime.transaction.TransactionHelper; import com.google.inject.Inject; /** * Tests the {@link FileSystemItem} related operations. * * @author Antoine Taillefer */ @RunWith(FeaturesRunner.class) @Features({ TransactionalFeature.class, EmbeddedAutomationServerFeature.class }) @Deploy({ "org.nuxeo.ecm.platform.filemanager.core", "org.nuxeo.ecm.platform.mimetype.api", "org.nuxeo.ecm.platform.mimetype.core", "org.nuxeo.ecm.platform.types.core", "org.nuxeo.ecm.webapp.base:OSGI-INF/ecm-types-contrib.xml", "org.nuxeo.drive.core", "org.nuxeo.drive.operations" }) @RepositoryConfig(cleanup = Granularity.METHOD) @Jetty(port = 18080) public class TestFileSystemItemOperations { private static final String SYNC_ROOT_FOLDER_ITEM_ID_PREFIX = "defaultSyncRootFolderItemFactory#test#"; private static final String DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX = "defaultFileSystemItemFactory#test#"; @Inject protected CoreSession session; @Inject protected DirectoryService directoryService; @Inject protected FileSystemItemAdapterService fileSystemItemAdapterService; @Inject protected NuxeoDriveManager nuxeoDriveManager; @Inject protected HttpAutomationClient automationClient; @Inject protected Session clientSession; protected DocumentModel syncRoot1; protected DocumentModel syncRoot2; protected DocumentModel file1; protected DocumentModel file2; protected DocumentModel file3; protected DocumentModel file4; protected DocumentModel subFolder1; protected ObjectMapper mapper; /** * Initializes the test hierarchy. * * <pre> * topLevel * |-- folder1 (syncRoot1) * | |-- file1 * | |-- subFolder1 * | |-- file3 * | |-- file4 * |-- folder2 (syncRoot2) * | |-- file2 * </pre> */ @Before public void init() throws Exception { Principal administrator = session.getPrincipal(); // Create 2 sync roots syncRoot1 = session.createDocument(session.createDocumentModel("/", "folder1", "Folder")); syncRoot2 = session.createDocument(session.createDocumentModel("/", "folder2", "Folder")); // Register sync roots nuxeoDriveManager.registerSynchronizationRoot(administrator, syncRoot1, session); nuxeoDriveManager.registerSynchronizationRoot(administrator, syncRoot2, session); // Create 1 file in each sync root file1 = session.createDocumentModel("/folder1", "file1", "File"); org.nuxeo.ecm.core.api.Blob blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 1."); blob.setFilename("First file.odt"); file1.setPropertyValue("file:content", (Serializable) blob); file1 = session.createDocument(file1); file2 = session.createDocumentModel("/folder2", "file2", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 2."); blob.setFilename("Second file.odt"); file2.setPropertyValue("file:content", (Serializable) blob); file2 = session.createDocument(file2); // Create a sub-folder in sync root 1 subFolder1 = session.createDocument(session.createDocumentModel( "/folder1", "subFolder1", "Folder")); // Create 2 files in sub-folder file3 = session.createDocumentModel("/folder1/subFolder1", "file3", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 3."); blob.setFilename("Third file.odt"); file3.setPropertyValue("file:content", (Serializable) blob); file3 = session.createDocument(file3); file4 = session.createDocumentModel("/folder1/subFolder1", "file4", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 4."); blob.setFilename("Fourth file.odt"); file4.setPropertyValue("file:content", (Serializable) blob); file4 = session.createDocument(file4); session.save(); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); mapper = new ObjectMapper(); } @Test public void testGetTopLevelChildren() throws Exception { Blob topLevelFolderJSON = (Blob) clientSession.newRequest( NuxeoDriveGetTopLevelFolder.ID).execute(); assertNotNull(topLevelFolderJSON); FolderItem topLevelFolder = mapper.readValue( topLevelFolderJSON.getStream(), new TypeReference<DefaultTopLevelFolderItem>() { }); Blob topLevelChildrenJSON = (Blob) clientSession.newRequest( NuxeoDriveGetChildren.ID).set("id", topLevelFolder.getId()).execute(); List<DefaultSyncRootFolderItem> topLevelChildren = mapper.readValue( topLevelChildrenJSON.getStream(), new TypeReference<List<DefaultSyncRootFolderItem>>() { }); assertNotNull(topLevelChildren); assertEquals(2, topLevelChildren.size()); DefaultSyncRootFolderItem child = topLevelChildren.get(0); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), child.getId()); assertTrue(child.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder1", child.getName()); assertTrue(child.isFolder()); assertEquals("Administrator", child.getCreator()); assertTrue(child.getCanRename()); assertTrue(child.getCanDelete()); assertTrue(child.getCanCreateChild()); child = topLevelChildren.get(1); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId(), child.getId()); assertTrue(child.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder2", child.getName()); assertTrue(child.isFolder()); assertEquals("Administrator", child.getCreator()); assertTrue(child.getCanRename()); assertTrue(child.getCanDelete()); assertTrue(child.getCanCreateChild()); } @Test public void testFileSystemItemExists() throws Exception { // Non existing file system item Blob fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + "badId").execute(); assertNotNull(fileSystemItemExistsJSON); String fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("false", fileSystemItemExists); // Existing file system item fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).execute(); assertNotNull(fileSystemItemExistsJSON); fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("true", fileSystemItemExists); // Deleted file system item file1.followTransition("delete"); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemExistsJSON); fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("false", fileSystemItemExists); } @Test public void testGetFileSystemItem() throws Exception { // Get top level folder String topLevelFolderItemId = fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId(); Blob fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", topLevelFolderItemId).execute(); assertNotNull(fileSystemItemJSON); DefaultTopLevelFolderItem topLevelFolderItem = mapper.readValue( fileSystemItemJSON.getStream(), DefaultTopLevelFolderItem.class); assertNotNull(topLevelFolderItem); assertEquals(topLevelFolderItemId, topLevelFolderItem.getId()); assertNull(topLevelFolderItem.getParentId()); assertEquals("Nuxeo Drive", topLevelFolderItem.getName()); assertTrue(topLevelFolderItem.isFolder()); assertEquals("system", topLevelFolderItem.getCreator()); assertFalse(topLevelFolderItem.getCanRename()); assertFalse(topLevelFolderItem.getCanDelete()); assertFalse(topLevelFolderItem.getCanCreateChild()); // Get sync root fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).execute(); assertNotNull(fileSystemItemJSON); DefaultSyncRootFolderItem syncRootFolderItem = mapper.readValue( fileSystemItemJSON.getStream(), DefaultSyncRootFolderItem.class); assertNotNull(syncRootFolderItem); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), syncRootFolderItem.getId()); assertTrue(syncRootFolderItem.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder1", syncRootFolderItem.getName()); assertTrue(syncRootFolderItem.isFolder()); assertEquals("Administrator", syncRootFolderItem.getCreator()); assertTrue(syncRootFolderItem.getCanRename()); assertTrue(syncRootFolderItem.getCanDelete()); assertTrue(syncRootFolderItem.getCanCreateChild()); // Get file in sync root fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemJSON); DocumentBackedFileItem fileItem = mapper.readValue( fileSystemItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(fileItem); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId(), fileItem.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), fileItem.getParentId()); assertEquals("First file.odt", fileItem.getName()); assertFalse(fileItem.isFolder()); assertEquals("Administrator", fileItem.getCreator()); assertTrue(fileItem.getCanRename()); assertTrue(fileItem.getCanDelete()); assertTrue(fileItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file1.getId() + "/blobholder:0/First%20file.odt", fileItem.getDownloadURL()); assertEquals("md5", fileItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file1.getPropertyValue("file:content")).getDigest(), fileItem.getDigest()); // Get deleted file file1.followTransition("delete"); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemJSON); assertNull(mapper.readValue(fileSystemItemJSON.getStream(), Object.class)); } @Test public void testGetChildren() throws Exception { // Get children of sub-folder of sync root 1 Blob childrenJSON = (Blob) clientSession.newRequest( NuxeoDriveGetChildren.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).execute(); assertNotNull(childrenJSON); List<DocumentBackedFileItem> children = mapper.readValue( childrenJSON.getStream(), new TypeReference<List<DocumentBackedFileItem>>() { }); assertNotNull(children); assertEquals(2, children.size()); // Don't check children order against MySQL database because of the // milliseconds limitation boolean ordered = !(DatabaseHelper.DATABASE instanceof DatabaseMySQL); checkChildren(children, subFolder1.getId(), file3.getId(), file4.getId(), ordered); } @Test public void testCreateFolder() throws Exception { Blob newFolderJSON = (Blob) clientSession.newRequest( NuxeoDriveCreateFolder.ID).set("parentId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).set( "name", "newFolder").execute(); assertNotNull(newFolderJSON); DocumentBackedFolderItem newFolder = mapper.readValue( newFolderJSON.getStream(), DocumentBackedFolderItem.class); assertNotNull(newFolder); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel newFolderDoc = session.getDocument(new PathRef( "/folder2/newFolder")); assertEquals("Folder", newFolderDoc.getType()); assertEquals("newFolder", newFolderDoc.getTitle()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + newFolderDoc.getId(), newFolder.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId(), newFolder.getParentId()); assertEquals("newFolder", newFolder.getName()); assertTrue(newFolder.isFolder()); assertEquals("Administrator", newFolder.getCreator()); assertTrue(newFolder.getCanRename()); assertTrue(newFolder.getCanDelete()); assertTrue(newFolder.getCanCreateChild()); } @Test public void testCreateFile() throws Exception { StringBlob blob = new StringBlob("This is the content of a new file."); blob.setFileName("New file.odt"); Blob newFileJSON = (Blob) clientSession.newRequest( NuxeoDriveCreateFile.ID).set("parentId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).setInput( blob).execute(); assertNotNull(newFileJSON); DocumentBackedFileItem newFile = mapper.readValue( newFileJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(newFile); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel newFileDoc = session.getDocument(new PathRef( "/folder1/subFolder1/New file.odt")); assertEquals("File", newFileDoc.getType()); assertEquals("New file.odt", newFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob newFileBlob = (org.nuxeo.ecm.core.api.Blob) newFileDoc.getPropertyValue("file:content"); assertNotNull(newFileBlob); assertEquals("New file.odt", newFileBlob.getFilename()); assertEquals("This is the content of a new file.", newFileBlob.getString()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + newFileDoc.getId(), newFile.getId()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId(), newFile.getParentId()); assertEquals("New file.odt", newFile.getName()); assertFalse(newFile.isFolder()); assertEquals("Administrator", newFile.getCreator()); assertTrue(newFile.getCanRename()); assertTrue(newFile.getCanDelete()); assertTrue(newFile.getCanUpdate()); assertEquals("nxbigfile/test/" + newFileDoc.getId() + "/blobholder:0/New%20file.odt", newFile.getDownloadURL()); assertEquals("md5", newFile.getDigestAlgorithm()); assertEquals(newFileBlob.getDigest(), newFile.getDigest()); } @Test public void testUpdateFile() throws Exception { StringBlob blob = new StringBlob( "This is the updated content of file 1."); blob.setFileName("Updated file 1.odt"); Blob updatedFileJSON = (Blob) clientSession.newRequest( NuxeoDriveUpdateFile.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).setInput( blob).execute(); assertNotNull(updatedFileJSON); DocumentBackedFileItem updatedFile = mapper.readValue( updatedFileJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(updatedFile); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel updatedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("File", updatedFileDoc.getType()); assertEquals("file1", updatedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob updatedFileBlob = (org.nuxeo.ecm.core.api.Blob) updatedFileDoc.getPropertyValue("file:content"); assertNotNull(updatedFileBlob); assertEquals("Updated file 1.odt", updatedFileBlob.getFilename()); assertEquals("This is the updated content of file 1.", updatedFileBlob.getString()); assertEquals( DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + updatedFileDoc.getId(), updatedFile.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), updatedFile.getParentId()); assertEquals("Updated file 1.odt", updatedFile.getName()); assertFalse(updatedFile.isFolder()); assertEquals("Administrator", updatedFile.getCreator()); assertTrue(updatedFile.getCanRename()); assertTrue(updatedFile.getCanDelete()); assertTrue(updatedFile.getCanUpdate()); assertEquals("nxbigfile/test/" + updatedFileDoc.getId() + "/blobholder:0/Updated%20file%201.odt", updatedFile.getDownloadURL()); assertEquals("md5", updatedFile.getDigestAlgorithm()); assertEquals(updatedFileBlob.getDigest(), updatedFile.getDigest()); } @Test public void testDelete() throws Exception { // ------------------------------------------------------ // Delete file in sync root: should trash it // ------------------------------------------------------ clientSession.newRequest(NuxeoDriveDelete.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel deletedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("deleted", deletedFileDoc.getCurrentLifeCycleState()); // ------------------------------------------------------ // Delete sync root: should unregister it // ------------------------------------------------------ clientSession.newRequest(NuxeoDriveDelete.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); assertFalse(nuxeoDriveManager.getSynchronizationRootReferences(session).contains( new IdRef(syncRoot2.getId()))); // ------------------------------------------------------ // Delete top level folder: should be unsupported // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveDelete.ID).set( "id", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).execute(); fail("Top level folder item deletion should be unsupported."); } catch (Exception e) { assertEquals("Failed to invoke operation: NuxeoDrive.Delete", e.getMessage()); } } @Test public void testRename() throws Exception { // ------------------------------------------------------ // File // ------------------------------------------------------ Blob renamedFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveRename.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set("name", "Renamed file 1.odt").execute(); assertNotNull(renamedFSItemJSON); DocumentBackedFileItem renamedFileItem = mapper.readValue( renamedFSItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(renamedFileItem); assertEquals("Renamed file 1.odt", renamedFileItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel renamedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("file1", renamedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob renamedFileBlob = (org.nuxeo.ecm.core.api.Blob) renamedFileDoc.getPropertyValue("file:content"); assertNotNull(renamedFileBlob); assertEquals("Renamed file 1.odt", renamedFileBlob.getFilename()); assertEquals("nxbigfile/test/" + file1.getId() + "/blobholder:0/Renamed%20file%201.odt", renamedFileItem.getDownloadURL()); assertEquals("md5", renamedFileItem.getDigestAlgorithm()); assertEquals(renamedFileBlob.getDigest(), renamedFileItem.getDigest()); // ------------------------------------------------------ // Folder // ------------------------------------------------------ renamedFSItemJSON = (Blob) clientSession.newRequest(NuxeoDriveRename.ID).set( "id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).set( "name", "Renamed sub-folder 1").execute(); assertNotNull(renamedFSItemJSON); DocumentBackedFolderItem renamedFolderItem = mapper.readValue( renamedFSItemJSON.getStream(), DocumentBackedFolderItem.class); assertNotNull(renamedFolderItem); assertEquals("Renamed sub-folder 1", renamedFolderItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel renamedFolderDoc = session.getDocument(new IdRef( subFolder1.getId())); assertEquals("Renamed sub-folder 1", renamedFolderDoc.getTitle()); // ------------------------------------------------------ // Sync root // ------------------------------------------------------ renamedFSItemJSON = (Blob) clientSession.newRequest(NuxeoDriveRename.ID).set( "id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "name", "New name for sync root").execute(); assertNotNull(renamedFSItemJSON); DefaultSyncRootFolderItem renamedSyncRootItem = mapper.readValue( renamedFSItemJSON.getStream(), DefaultSyncRootFolderItem.class); assertNotNull(renamedSyncRootItem); assertEquals("New name for sync root", renamedSyncRootItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel renamedSyncRootDoc = session.getDocument(new IdRef( syncRoot1.getId())); assertEquals("New name for sync root", renamedSyncRootDoc.getTitle()); // ------------------------------------------------------ // Top level folder // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveRename.ID).set( "id", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("name", "New name for top level folder").execute(); fail("Top level folder renaming shoud be unsupported."); } catch (Exception e) { assertEquals("Failed to invoke operation: NuxeoDrive.Rename", e.getMessage()); } } @Test public void testCanMove() throws Exception { // ------------------------------------------------------ // File to File => false // ------------------------------------------------------ Blob canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file2.getId()).execute(); assertNotNull(canMoveFSItemJSON); String canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------ // Sync root => false // ------------------------------------------------------ canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set("srcId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------ // Top level folder => false // ------------------------------------------------------ canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set( "srcId", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // -------------------------------------------------------- // No REMOVE permission on the source backing doc => false // -------------------------------------------------------- Principal joe = createUser("joe", "joe"); DocumentModel rootDoc = session.getRootDocument(); setPermission(rootDoc, "joe", SecurityConstants.READ, true); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot1, session); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); Session joeSession = automationClient.getSession("joe", "joe"); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------------------- // No ADD_CHILDREN permission on the destination backing doc => false // ------------------------------------------------------------------- setPermission(syncRoot1, "joe", SecurityConstants.WRITE, true); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ---------------------------------------------------------------------- // REMOVE permission on the source backing doc + REMOVE_CHILDREN // permission on its parent + ADD_CHILDREN permission on the destination // backing doc => true // ---------------------------------------------------------------------- setPermission(syncRoot2, "joe", SecurityConstants.WRITE, true); nuxeoDriveManager.unregisterSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); // syncRoot2 is not registered as a sync root for joe assertEquals("false", canMoveFSItem); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); // syncRoot2 is now a registered root for joe assertEquals("true", canMoveFSItem); // ---------------------------------------------------------------------- // Reset permissions // ---------------------------------------------------------------------- resetPermissions(rootDoc, "joe"); resetPermissions(syncRoot1, "joe"); resetPermissions(syncRoot2, "joe"); deleteUser("joe"); } @Test public void testMove() throws Exception { // ------------------------------------------------------ // File to File => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file2.getId()).execute(); fail("Move to a non folder item should fail."); } catch (Exception e) { assertEquals("Failed to invoke operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // Sync root => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set("srcId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); fail("Should not be able to move a synchronization root folder item."); } catch (Exception e) { assertEquals("Failed to invoke operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // Top level folder => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set( "srcId", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); fail("Should not be able to move the top level folder item."); } catch (Exception e) { assertEquals("Failed to invoke operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // File to Folder => succeed // ------------------------------------------------------ Blob movedFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(movedFSItemJSON); DocumentBackedFileItem movedFileItem = mapper.readValue( movedFSItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(movedFileItem); assertEquals("First file.odt", movedFileItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String repositoryName, Principal // principal) session.save(); DocumentModel movedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("/folder2/file1", movedFileDoc.getPathAsString()); assertEquals("file1", movedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob movedFileBlob = (org.nuxeo.ecm.core.api.Blob) movedFileDoc.getPropertyValue("file:content"); assertNotNull(movedFileBlob); assertEquals("First file.odt", movedFileBlob.getFilename()); assertEquals("md5", movedFileItem.getDigestAlgorithm()); assertEquals(movedFileBlob.getDigest(), movedFileItem.getDigest()); } @Test public void testConflictedNames() throws Exception { // Try a canonical example with the Administrator user Blob jsonOut = (Blob) clientSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "My file (with accents \u00e9).doc").execute(); assertNotNull(jsonOut); String newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("My file (with accents \u00e9) (Administrator - ")); assertTrue(newName.endsWith(").doc")); // Try with a filename with filename extension jsonOut = (Blob) clientSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "My file").execute(); assertNotNull(jsonOut); newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("My file (Administrator - ")); assertTrue(newName.endsWith(")")); // Test with a user that has a firstname and a lastname // Joe Strummer likes conflicting files createUser("joe", "joe", "Joe", "Strummer"); Session joeSession = automationClient.getSession("joe", "joe"); jsonOut = (Blob) joeSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "The Clashing File.xls").execute(); assertNotNull(jsonOut); newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("The Clashing File (Joe Strummer - ")); assertTrue(newName.endsWith(").xls")); deleteUser("joe"); } protected NuxeoPrincipal createUser(String userName, String password) throws ClientException { return createUser(userName, password, null, null); } protected NuxeoPrincipal createUser(String userName, String password, String firstName, String lastName) throws ClientException { org.nuxeo.ecm.directory.Session userDir = directoryService.getDirectory( "userDirectory").getSession(); try { Map<String, Object> user = new HashMap<String, Object>(); user.put("username", userName); user.put("password", password); user.put("firstName", firstName); user.put("lastName", lastName); userDir.createEntry(user); } finally { userDir.close(); } UserManager userManager = Framework.getLocalService(UserManager.class); return userManager.getPrincipal(userName); } protected void deleteUser(String userName) throws ClientException { org.nuxeo.ecm.directory.Session userDir = directoryService.getDirectory( "userDirectory").getSession(); try { userDir.deleteEntry(userName); } finally { userDir.close(); } } protected void setPermission(DocumentModel doc, String userName, String permission, boolean isGranted) throws ClientException { ACP acp = session.getACP(doc.getRef()); ACL localACL = acp.getOrCreateACL(ACL.LOCAL_ACL); localACL.add(new ACE(userName, permission, isGranted)); session.setACP(doc.getRef(), acp, true); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } protected void resetPermissions(DocumentModel doc, String userName) throws ClientException { ACP acp = session.getACP(doc.getRef()); ACL localACL = acp.getOrCreateACL(ACL.LOCAL_ACL); Iterator<ACE> localACLIt = localACL.iterator(); while (localACLIt.hasNext()) { ACE ace = localACLIt.next(); if (userName.equals(ace.getUsername())) { localACLIt.remove(); } } session.setACP(doc.getRef(), acp, true); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } protected void checkChildren(List<DocumentBackedFileItem> folderChildren, String folderId, String child1Id, String child2Id, boolean ordered) throws Exception { boolean isChild1Found = false; boolean isChild2Found = false; int childrenCount = 0; for (DocumentBackedFileItem fsItem : folderChildren) { // Check child 1 if (!isChild1Found && (DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + child1Id).equals(fsItem.getId())) { if (!ordered || ordered && childrenCount == 0) { assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + folderId, fsItem.getParentId()); assertEquals("Third file.odt", fsItem.getName()); assertFalse(fsItem.isFolder()); assertEquals("Administrator", fsItem.getCreator()); assertTrue(fsItem.getCanRename()); assertTrue(fsItem.getCanDelete()); assertTrue(fsItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file3.getId() + "/blobholder:0/Third%20file.odt", fsItem.getDownloadURL()); assertEquals("md5", fsItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file3.getPropertyValue("file:content")).getDigest(), fsItem.getDigest()); isChild1Found = true; childrenCount++; } } // Check child 2 else if (!isChild2Found && (DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + child2Id).equals(fsItem.getId())) { if (!ordered || ordered && childrenCount == 1) { assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + folderId, fsItem.getParentId()); assertEquals("Fourth file.odt", fsItem.getName()); assertFalse(fsItem.isFolder()); assertEquals("Administrator", fsItem.getCreator()); assertTrue(fsItem.getCanRename()); assertTrue(fsItem.getCanDelete()); assertTrue(fsItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file4.getId() + "/blobholder:0/Fourth%20file.odt", fsItem.getDownloadURL()); assertEquals("md5", fsItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file4.getPropertyValue("file:content")).getDigest(), fsItem.getDigest()); } } else { fail(String.format( "FileSystemItem %s doesn't match any expected.", fsItem.getId())); } } } }
nuxeo-drive-server/nuxeo-drive-operations/src/test/java/org/nuxeo/drive/operations/TestFileSystemItemOperations.java
/* * (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * Antoine Taillefer <[email protected]> */ package org.nuxeo.drive.operations; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.Serializable; import java.security.Principal; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.nuxeo.drive.adapter.FileSystemItem; import org.nuxeo.drive.adapter.FolderItem; import org.nuxeo.drive.adapter.impl.DefaultSyncRootFolderItem; import org.nuxeo.drive.adapter.impl.DefaultTopLevelFolderItem; import org.nuxeo.drive.adapter.impl.DocumentBackedFileItem; import org.nuxeo.drive.adapter.impl.DocumentBackedFolderItem; import org.nuxeo.drive.service.FileSystemItemAdapterService; import org.nuxeo.drive.service.NuxeoDriveManager; import org.nuxeo.ecm.automation.client.Session; import org.nuxeo.ecm.automation.client.jaxrs.impl.HttpAutomationClient; import org.nuxeo.ecm.automation.client.model.Blob; import org.nuxeo.ecm.automation.client.model.StringBlob; import org.nuxeo.ecm.automation.test.EmbeddedAutomationServerFeature; import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.CoreSession; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.core.api.IdRef; import org.nuxeo.ecm.core.api.NuxeoPrincipal; import org.nuxeo.ecm.core.api.PathRef; import org.nuxeo.ecm.core.api.security.ACE; import org.nuxeo.ecm.core.api.security.ACL; import org.nuxeo.ecm.core.api.security.ACP; import org.nuxeo.ecm.core.api.security.SecurityConstants; import org.nuxeo.ecm.core.storage.sql.DatabaseHelper; import org.nuxeo.ecm.core.storage.sql.DatabaseMySQL; import org.nuxeo.ecm.core.test.TransactionalFeature; import org.nuxeo.ecm.core.test.annotations.Granularity; import org.nuxeo.ecm.core.test.annotations.RepositoryConfig; import org.nuxeo.ecm.directory.api.DirectoryService; import org.nuxeo.ecm.platform.usermanager.UserManager; import org.nuxeo.runtime.api.Framework; import org.nuxeo.runtime.test.runner.Deploy; import org.nuxeo.runtime.test.runner.Features; import org.nuxeo.runtime.test.runner.FeaturesRunner; import org.nuxeo.runtime.test.runner.Jetty; import org.nuxeo.runtime.transaction.TransactionHelper; import com.google.inject.Inject; /** * Tests the {@link FileSystemItem} related operations. * * @author Antoine Taillefer */ @RunWith(FeaturesRunner.class) @Features({ TransactionalFeature.class, EmbeddedAutomationServerFeature.class }) @Deploy({ "org.nuxeo.ecm.platform.filemanager.core", "org.nuxeo.ecm.platform.mimetype.api", "org.nuxeo.ecm.platform.mimetype.core", "org.nuxeo.ecm.platform.types.core", "org.nuxeo.ecm.webapp.base:OSGI-INF/ecm-types-contrib.xml", "org.nuxeo.drive.core", "org.nuxeo.drive.operations" }) @RepositoryConfig(cleanup = Granularity.METHOD) @Jetty(port = 18080) public class TestFileSystemItemOperations { private static final String SYNC_ROOT_FOLDER_ITEM_ID_PREFIX = "defaultSyncRootFolderItemFactory#test#"; private static final String DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX = "defaultFileSystemItemFactory#test#"; @Inject protected CoreSession session; @Inject protected DirectoryService directoryService; @Inject protected FileSystemItemAdapterService fileSystemItemAdapterService; @Inject protected NuxeoDriveManager nuxeoDriveManager; @Inject protected HttpAutomationClient automationClient; @Inject protected Session clientSession; protected DocumentModel syncRoot1; protected DocumentModel syncRoot2; protected DocumentModel file1; protected DocumentModel file2; protected DocumentModel file3; protected DocumentModel file4; protected DocumentModel subFolder1; protected ObjectMapper mapper; /** * Initializes the test hierarchy. * * <pre> * topLevel * |-- folder1 (syncRoot1) * | |-- file1 * | |-- subFolder1 * | |-- file3 * | |-- file4 * |-- folder2 (syncRoot2) * | |-- file2 * </pre> */ @Before public void init() throws Exception { Principal administrator = session.getPrincipal(); // Create 2 sync roots syncRoot1 = session.createDocument(session.createDocumentModel("/", "folder1", "Folder")); syncRoot2 = session.createDocument(session.createDocumentModel("/", "folder2", "Folder")); // Register sync roots nuxeoDriveManager.registerSynchronizationRoot(administrator, syncRoot1, session); nuxeoDriveManager.registerSynchronizationRoot(administrator, syncRoot2, session); // Create 1 file in each sync root file1 = session.createDocumentModel("/folder1", "file1", "File"); org.nuxeo.ecm.core.api.Blob blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 1."); blob.setFilename("First file.odt"); file1.setPropertyValue("file:content", (Serializable) blob); file1 = session.createDocument(file1); file2 = session.createDocumentModel("/folder2", "file2", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 2."); blob.setFilename("Second file.odt"); file2.setPropertyValue("file:content", (Serializable) blob); file2 = session.createDocument(file2); // Create a sub-folder in sync root 1 subFolder1 = session.createDocument(session.createDocumentModel( "/folder1", "subFolder1", "Folder")); // Create 2 files in sub-folder file3 = session.createDocumentModel("/folder1/subFolder1", "file3", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 3."); blob.setFilename("Third file.odt"); file3.setPropertyValue("file:content", (Serializable) blob); file3 = session.createDocument(file3); file4 = session.createDocumentModel("/folder1/subFolder1", "file4", "File"); blob = new org.nuxeo.ecm.core.api.impl.blob.StringBlob( "The content of file 4."); blob.setFilename("Fourth file.odt"); file4.setPropertyValue("file:content", (Serializable) blob); file4 = session.createDocument(file4); session.save(); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); mapper = new ObjectMapper(); } @Test public void testGetTopLevelChildren() throws Exception { Blob topLevelFolderJSON = (Blob) clientSession.newRequest( NuxeoDriveGetTopLevelFolder.ID).execute(); assertNotNull(topLevelFolderJSON); FolderItem topLevelFolder = mapper.readValue( topLevelFolderJSON.getStream(), new TypeReference<DefaultTopLevelFolderItem>() { }); Blob topLevelChildrenJSON = (Blob) clientSession.newRequest( NuxeoDriveGetChildren.ID).set("id", topLevelFolder.getId()).execute(); List<DefaultSyncRootFolderItem> topLevelChildren = mapper.readValue( topLevelChildrenJSON.getStream(), new TypeReference<List<DefaultSyncRootFolderItem>>() { }); assertNotNull(topLevelChildren); assertEquals(2, topLevelChildren.size()); DefaultSyncRootFolderItem child = topLevelChildren.get(0); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), child.getId()); assertTrue(child.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder1", child.getName()); assertTrue(child.isFolder()); assertEquals("Administrator", child.getCreator()); assertTrue(child.getCanRename()); assertTrue(child.getCanDelete()); assertTrue(child.getCanCreateChild()); child = topLevelChildren.get(1); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId(), child.getId()); assertTrue(child.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder2", child.getName()); assertTrue(child.isFolder()); assertEquals("Administrator", child.getCreator()); assertTrue(child.getCanRename()); assertTrue(child.getCanDelete()); assertTrue(child.getCanCreateChild()); } @Test public void testFileSystemItemExists() throws Exception { // Non existing file system item Blob fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + "badId").execute(); assertNotNull(fileSystemItemExistsJSON); String fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("false", fileSystemItemExists); // Existing file system item fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).execute(); assertNotNull(fileSystemItemExistsJSON); fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("true", fileSystemItemExists); // Deleted file system item file1.followTransition("delete"); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); fileSystemItemExistsJSON = (Blob) clientSession.newRequest( NuxeoDriveFileSystemItemExists.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemExistsJSON); fileSystemItemExists = mapper.readValue( fileSystemItemExistsJSON.getStream(), String.class); assertEquals("false", fileSystemItemExists); } @Test public void testGetFileSystemItem() throws Exception { // Get top level folder String topLevelFolderItemId = fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId(); Blob fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", topLevelFolderItemId).execute(); assertNotNull(fileSystemItemJSON); DefaultTopLevelFolderItem topLevelFolderItem = mapper.readValue( fileSystemItemJSON.getStream(), DefaultTopLevelFolderItem.class); assertNotNull(topLevelFolderItem); assertEquals(topLevelFolderItemId, topLevelFolderItem.getId()); assertNull(topLevelFolderItem.getParentId()); assertEquals("Nuxeo Drive", topLevelFolderItem.getName()); assertTrue(topLevelFolderItem.isFolder()); assertEquals("system", topLevelFolderItem.getCreator()); assertFalse(topLevelFolderItem.getCanRename()); assertFalse(topLevelFolderItem.getCanDelete()); assertFalse(topLevelFolderItem.getCanCreateChild()); // Get sync root fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).execute(); assertNotNull(fileSystemItemJSON); DefaultSyncRootFolderItem syncRootFolderItem = mapper.readValue( fileSystemItemJSON.getStream(), DefaultSyncRootFolderItem.class); assertNotNull(syncRootFolderItem); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), syncRootFolderItem.getId()); assertTrue(syncRootFolderItem.getParentId().endsWith( "DefaultTopLevelFolderItemFactory#")); assertEquals("folder1", syncRootFolderItem.getName()); assertTrue(syncRootFolderItem.isFolder()); assertEquals("Administrator", syncRootFolderItem.getCreator()); assertTrue(syncRootFolderItem.getCanRename()); assertTrue(syncRootFolderItem.getCanDelete()); assertTrue(syncRootFolderItem.getCanCreateChild()); // Get file in sync root fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemJSON); DocumentBackedFileItem fileItem = mapper.readValue( fileSystemItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(fileItem); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId(), fileItem.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), fileItem.getParentId()); assertEquals("First file.odt", fileItem.getName()); assertFalse(fileItem.isFolder()); assertEquals("Administrator", fileItem.getCreator()); assertTrue(fileItem.getCanRename()); assertTrue(fileItem.getCanDelete()); assertTrue(fileItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file1.getId() + "/blobholder:0/First%20file.odt", fileItem.getDownloadURL()); assertEquals("md5", fileItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file1.getPropertyValue("file:content")).getDigest(), fileItem.getDigest()); // Get deleted file file1.followTransition("delete"); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); fileSystemItemJSON = (Blob) clientSession.newRequest( NuxeoDriveGetFileSystemItem.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); assertNotNull(fileSystemItemJSON); assertNull(mapper.readValue(fileSystemItemJSON.getStream(), Object.class)); } @Test public void testGetChildren() throws Exception { // Get children of sub-folder of sync root 1 Blob childrenJSON = (Blob) clientSession.newRequest( NuxeoDriveGetChildren.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).execute(); assertNotNull(childrenJSON); List<DocumentBackedFileItem> children = mapper.readValue( childrenJSON.getStream(), new TypeReference<List<DocumentBackedFileItem>>() { }); assertNotNull(children); assertEquals(2, children.size()); // Don't check children order against MySQL database because of the // milliseconds limitation boolean ordered = !(DatabaseHelper.DATABASE instanceof DatabaseMySQL); checkChildren(children, subFolder1.getId(), file3.getId(), file4.getId(), ordered); } @Test public void testCreateFolder() throws Exception { Blob newFolderJSON = (Blob) clientSession.newRequest( NuxeoDriveCreateFolder.ID).set("parentId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).set( "name", "newFolder").execute(); assertNotNull(newFolderJSON); DocumentBackedFolderItem newFolder = mapper.readValue( newFolderJSON.getStream(), DocumentBackedFolderItem.class); assertNotNull(newFolder); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel newFolderDoc = session.getDocument(new PathRef( "/folder2/newFolder")); assertEquals("Folder", newFolderDoc.getType()); assertEquals("newFolder", newFolderDoc.getTitle()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + newFolderDoc.getId(), newFolder.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId(), newFolder.getParentId()); assertEquals("newFolder", newFolder.getName()); assertTrue(newFolder.isFolder()); assertEquals("Administrator", newFolder.getCreator()); assertTrue(newFolder.getCanRename()); assertTrue(newFolder.getCanDelete()); assertTrue(newFolder.getCanCreateChild()); } @Test public void testCreateFile() throws Exception { StringBlob blob = new StringBlob("This is the content of a new file."); blob.setFileName("New file.odt"); Blob newFileJSON = (Blob) clientSession.newRequest( NuxeoDriveCreateFile.ID).set("parentId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).setInput( blob).execute(); assertNotNull(newFileJSON); DocumentBackedFileItem newFile = mapper.readValue( newFileJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(newFile); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel newFileDoc = session.getDocument(new PathRef( "/folder1/subFolder1/New file.odt")); assertEquals("File", newFileDoc.getType()); assertEquals("New file.odt", newFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob newFileBlob = (org.nuxeo.ecm.core.api.Blob) newFileDoc.getPropertyValue("file:content"); assertNotNull(newFileBlob); assertEquals("New file.odt", newFileBlob.getFilename()); assertEquals("This is the content of a new file.", newFileBlob.getString()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + newFileDoc.getId(), newFile.getId()); assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId(), newFile.getParentId()); assertEquals("New file.odt", newFile.getName()); assertFalse(newFile.isFolder()); assertEquals("Administrator", newFile.getCreator()); assertTrue(newFile.getCanRename()); assertTrue(newFile.getCanDelete()); assertTrue(newFile.getCanUpdate()); assertEquals("nxbigfile/test/" + newFileDoc.getId() + "/blobholder:0/New%20file.odt", newFile.getDownloadURL()); assertEquals("md5", newFile.getDigestAlgorithm()); assertEquals(newFileBlob.getDigest(), newFile.getDigest()); } @Test public void testUpdateFile() throws Exception { StringBlob blob = new StringBlob( "This is the updated content of file 1."); blob.setFileName("Updated file 1.odt"); Blob updatedFileJSON = (Blob) clientSession.newRequest( NuxeoDriveUpdateFile.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).setInput( blob).execute(); assertNotNull(updatedFileJSON); DocumentBackedFileItem updatedFile = mapper.readValue( updatedFileJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(updatedFile); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel updatedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("File", updatedFileDoc.getType()); assertEquals("file1", updatedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob updatedFileBlob = (org.nuxeo.ecm.core.api.Blob) updatedFileDoc.getPropertyValue("file:content"); assertNotNull(updatedFileBlob); assertEquals("Updated file 1.odt", updatedFileBlob.getFilename()); assertEquals("This is the updated content of file 1.", updatedFileBlob.getString()); assertEquals( DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + updatedFileDoc.getId(), updatedFile.getId()); assertEquals(SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId(), updatedFile.getParentId()); assertEquals("Updated file 1.odt", updatedFile.getName()); assertFalse(updatedFile.isFolder()); assertEquals("Administrator", updatedFile.getCreator()); assertTrue(updatedFile.getCanRename()); assertTrue(updatedFile.getCanDelete()); assertTrue(updatedFile.getCanUpdate()); assertEquals("nxbigfile/test/" + updatedFileDoc.getId() + "/blobholder:0/Updated%20file%201.odt", updatedFile.getDownloadURL()); assertEquals("md5", updatedFile.getDigestAlgorithm()); assertEquals(updatedFileBlob.getDigest(), updatedFile.getDigest()); } @Test public void testDelete() throws Exception { // ------------------------------------------------------ // Delete file in sync root: should trash it // ------------------------------------------------------ clientSession.newRequest(NuxeoDriveDelete.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).execute(); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel deletedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("deleted", deletedFileDoc.getCurrentLifeCycleState()); // ------------------------------------------------------ // Delete sync root: should unregister it // ------------------------------------------------------ clientSession.newRequest(NuxeoDriveDelete.ID).set("id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); assertFalse(nuxeoDriveManager.getSynchronizationRootReferences(session).contains( new IdRef(syncRoot2.getId()))); // ------------------------------------------------------ // Delete top level folder: should be unsupported // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveDelete.ID).set( "id", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).execute(); fail("Top level folder item deletion should be unsupported."); } catch (Exception e) { assertEquals("Failed to execute operation: NuxeoDrive.Delete", e.getMessage()); } } @Test public void testRename() throws Exception { // ------------------------------------------------------ // File // ------------------------------------------------------ Blob renamedFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveRename.ID).set("id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set("name", "Renamed file 1.odt").execute(); assertNotNull(renamedFSItemJSON); DocumentBackedFileItem renamedFileItem = mapper.readValue( renamedFSItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(renamedFileItem); assertEquals("Renamed file 1.odt", renamedFileItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) session.save(); DocumentModel renamedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("file1", renamedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob renamedFileBlob = (org.nuxeo.ecm.core.api.Blob) renamedFileDoc.getPropertyValue("file:content"); assertNotNull(renamedFileBlob); assertEquals("Renamed file 1.odt", renamedFileBlob.getFilename()); assertEquals("nxbigfile/test/" + file1.getId() + "/blobholder:0/Renamed%20file%201.odt", renamedFileItem.getDownloadURL()); assertEquals("md5", renamedFileItem.getDigestAlgorithm()); assertEquals(renamedFileBlob.getDigest(), renamedFileItem.getDigest()); // ------------------------------------------------------ // Folder // ------------------------------------------------------ renamedFSItemJSON = (Blob) clientSession.newRequest(NuxeoDriveRename.ID).set( "id", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + subFolder1.getId()).set( "name", "Renamed sub-folder 1").execute(); assertNotNull(renamedFSItemJSON); DocumentBackedFolderItem renamedFolderItem = mapper.readValue( renamedFSItemJSON.getStream(), DocumentBackedFolderItem.class); assertNotNull(renamedFolderItem); assertEquals("Renamed sub-folder 1", renamedFolderItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel renamedFolderDoc = session.getDocument(new IdRef( subFolder1.getId())); assertEquals("Renamed sub-folder 1", renamedFolderDoc.getTitle()); // ------------------------------------------------------ // Sync root // ------------------------------------------------------ renamedFSItemJSON = (Blob) clientSession.newRequest(NuxeoDriveRename.ID).set( "id", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "name", "New name for sync root").execute(); assertNotNull(renamedFSItemJSON); DefaultSyncRootFolderItem renamedSyncRootItem = mapper.readValue( renamedFSItemJSON.getStream(), DefaultSyncRootFolderItem.class); assertNotNull(renamedSyncRootItem); assertEquals("New name for sync root", renamedSyncRootItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String // repositoryName, Principal principal) TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); DocumentModel renamedSyncRootDoc = session.getDocument(new IdRef( syncRoot1.getId())); assertEquals("New name for sync root", renamedSyncRootDoc.getTitle()); // ------------------------------------------------------ // Top level folder // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveRename.ID).set( "id", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("name", "New name for top level folder").execute(); fail("Top level folder renaming shoud be unsupported."); } catch (Exception e) { assertEquals("Failed to execute operation: NuxeoDrive.Rename", e.getMessage()); } } @Test public void testCanMove() throws Exception { // ------------------------------------------------------ // File to File => false // ------------------------------------------------------ Blob canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file2.getId()).execute(); assertNotNull(canMoveFSItemJSON); String canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------ // Sync root => false // ------------------------------------------------------ canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set("srcId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------ // Top level folder => false // ------------------------------------------------------ canMoveFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveCanMove.ID).set( "srcId", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // -------------------------------------------------------- // No REMOVE permission on the source backing doc => false // -------------------------------------------------------- Principal joe = createUser("joe", "joe"); DocumentModel rootDoc = session.getRootDocument(); setPermission(rootDoc, "joe", SecurityConstants.READ, true); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot1, session); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); Session joeSession = automationClient.getSession("joe", "joe"); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ------------------------------------------------------------------- // No ADD_CHILDREN permission on the destination backing doc => false // ------------------------------------------------------------------- setPermission(syncRoot1, "joe", SecurityConstants.WRITE, true); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); assertEquals("false", canMoveFSItem); // ---------------------------------------------------------------------- // REMOVE permission on the source backing doc + REMOVE_CHILDREN // permission on its parent + ADD_CHILDREN permission on the destination // backing doc => true // ---------------------------------------------------------------------- setPermission(syncRoot2, "joe", SecurityConstants.WRITE, true); nuxeoDriveManager.unregisterSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); // syncRoot2 is not registered as a sync root for joe assertEquals("false", canMoveFSItem); nuxeoDriveManager.registerSynchronizationRoot(joe, syncRoot2, session); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); canMoveFSItemJSON = (Blob) joeSession.newRequest(NuxeoDriveCanMove.ID).set( "srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(canMoveFSItemJSON); canMoveFSItem = mapper.readValue(canMoveFSItemJSON.getStream(), String.class); // syncRoot2 is now a registered root for joe assertEquals("true", canMoveFSItem); // ---------------------------------------------------------------------- // Reset permissions // ---------------------------------------------------------------------- resetPermissions(rootDoc, "joe"); resetPermissions(syncRoot1, "joe"); resetPermissions(syncRoot2, "joe"); deleteUser("joe"); } @Test public void testMove() throws Exception { // ------------------------------------------------------ // File to File => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file2.getId()).execute(); fail("Move to a non folder item should fail."); } catch (Exception e) { assertEquals("Failed to execute operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // Sync root => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set("srcId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); fail("Should not be able to move a synchronization root folder item."); } catch (Exception e) { assertEquals("Failed to execute operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // Top level folder => fail // ------------------------------------------------------ try { clientSession.newRequest(NuxeoDriveMove.ID).set( "srcId", fileSystemItemAdapterService.getTopLevelFolderItemFactory().getTopLevelFolderItem( session.getPrincipal()).getId()).set("destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); fail("Should not be able to move the top level folder item."); } catch (Exception e) { assertEquals("Failed to execute operation: NuxeoDrive.Move", e.getMessage()); } // ------------------------------------------------------ // File to Folder => succeed // ------------------------------------------------------ Blob movedFSItemJSON = (Blob) clientSession.newRequest( NuxeoDriveMove.ID).set("srcId", DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file1.getId()).set( "destId", SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); assertNotNull(movedFSItemJSON); DocumentBackedFileItem movedFileItem = mapper.readValue( movedFSItemJSON.getStream(), DocumentBackedFileItem.class); assertNotNull(movedFileItem); assertEquals("First file.odt", movedFileItem.getName()); // Need to flush VCS cache to be aware of changes in the session used by // the file system item obtained by // FileSystemItemManager#getSession(String repositoryName, Principal // principal) session.save(); DocumentModel movedFileDoc = session.getDocument(new IdRef( file1.getId())); assertEquals("/folder2/file1", movedFileDoc.getPathAsString()); assertEquals("file1", movedFileDoc.getTitle()); org.nuxeo.ecm.core.api.Blob movedFileBlob = (org.nuxeo.ecm.core.api.Blob) movedFileDoc.getPropertyValue("file:content"); assertNotNull(movedFileBlob); assertEquals("First file.odt", movedFileBlob.getFilename()); assertEquals("md5", movedFileItem.getDigestAlgorithm()); assertEquals(movedFileBlob.getDigest(), movedFileItem.getDigest()); } @Test public void testConflictedNames() throws Exception { // Try a canonical example with the Administrator user Blob jsonOut = (Blob) clientSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "My file (with accents \u00e9).doc").execute(); assertNotNull(jsonOut); String newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("My file (with accents \u00e9) (Administrator - ")); assertTrue(newName.endsWith(").doc")); // Try with a filename with filename extension jsonOut = (Blob) clientSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "My file").execute(); assertNotNull(jsonOut); newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("My file (Administrator - ")); assertTrue(newName.endsWith(")")); // Test with a user that has a firstname and a lastname // Joe Strummer likes conflicting files createUser("joe", "joe", "Joe", "Strummer"); Session joeSession = automationClient.getSession("joe", "joe"); jsonOut = (Blob) joeSession.newRequest( NuxeoDriveGenerateConflictedItemName.ID).set("name", "The Clashing File.xls").execute(); assertNotNull(jsonOut); newName = mapper.readValue(jsonOut.getStream(), String.class); assertTrue(newName.startsWith("The Clashing File (Joe Strummer - ")); assertTrue(newName.endsWith(").xls")); deleteUser("joe"); } protected NuxeoPrincipal createUser(String userName, String password) throws ClientException { return createUser(userName, password, null, null); } protected NuxeoPrincipal createUser(String userName, String password, String firstName, String lastName) throws ClientException { org.nuxeo.ecm.directory.Session userDir = directoryService.getDirectory( "userDirectory").getSession(); try { Map<String, Object> user = new HashMap<String, Object>(); user.put("username", userName); user.put("password", password); user.put("firstName", firstName); user.put("lastName", lastName); userDir.createEntry(user); } finally { userDir.close(); } UserManager userManager = Framework.getLocalService(UserManager.class); return userManager.getPrincipal(userName); } protected void deleteUser(String userName) throws ClientException { org.nuxeo.ecm.directory.Session userDir = directoryService.getDirectory( "userDirectory").getSession(); try { userDir.deleteEntry(userName); } finally { userDir.close(); } } protected void setPermission(DocumentModel doc, String userName, String permission, boolean isGranted) throws ClientException { ACP acp = session.getACP(doc.getRef()); ACL localACL = acp.getOrCreateACL(ACL.LOCAL_ACL); localACL.add(new ACE(userName, permission, isGranted)); session.setACP(doc.getRef(), acp, true); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } protected void resetPermissions(DocumentModel doc, String userName) throws ClientException { ACP acp = session.getACP(doc.getRef()); ACL localACL = acp.getOrCreateACL(ACL.LOCAL_ACL); Iterator<ACE> localACLIt = localACL.iterator(); while (localACLIt.hasNext()) { ACE ace = localACLIt.next(); if (userName.equals(ace.getUsername())) { localACLIt.remove(); } } session.setACP(doc.getRef(), acp, true); TransactionHelper.commitOrRollbackTransaction(); TransactionHelper.startTransaction(); } protected void checkChildren(List<DocumentBackedFileItem> folderChildren, String folderId, String child1Id, String child2Id, boolean ordered) throws Exception { boolean isChild1Found = false; boolean isChild2Found = false; int childrenCount = 0; for (DocumentBackedFileItem fsItem : folderChildren) { // Check child 1 if (!isChild1Found && (DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + child1Id).equals(fsItem.getId())) { if (!ordered || ordered && childrenCount == 0) { assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + folderId, fsItem.getParentId()); assertEquals("Third file.odt", fsItem.getName()); assertFalse(fsItem.isFolder()); assertEquals("Administrator", fsItem.getCreator()); assertTrue(fsItem.getCanRename()); assertTrue(fsItem.getCanDelete()); assertTrue(fsItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file3.getId() + "/blobholder:0/Third%20file.odt", fsItem.getDownloadURL()); assertEquals("md5", fsItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file3.getPropertyValue("file:content")).getDigest(), fsItem.getDigest()); isChild1Found = true; childrenCount++; } } // Check child 2 else if (!isChild2Found && (DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + child2Id).equals(fsItem.getId())) { if (!ordered || ordered && childrenCount == 1) { assertEquals(DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + folderId, fsItem.getParentId()); assertEquals("Fourth file.odt", fsItem.getName()); assertFalse(fsItem.isFolder()); assertEquals("Administrator", fsItem.getCreator()); assertTrue(fsItem.getCanRename()); assertTrue(fsItem.getCanDelete()); assertTrue(fsItem.getCanUpdate()); assertEquals("nxbigfile/test/" + file4.getId() + "/blobholder:0/Fourth%20file.odt", fsItem.getDownloadURL()); assertEquals("md5", fsItem.getDigestAlgorithm()); assertEquals( ((org.nuxeo.ecm.core.api.Blob) file4.getPropertyValue("file:content")).getDigest(), fsItem.getDigest()); } } else { fail(String.format( "FileSystemItem %s doesn't match any expected.", fsItem.getId())); } } } }
NXP-12876: align tests on webexception updates
nuxeo-drive-server/nuxeo-drive-operations/src/test/java/org/nuxeo/drive/operations/TestFileSystemItemOperations.java
NXP-12876: align tests on webexception updates
<ide><path>uxeo-drive-server/nuxeo-drive-operations/src/test/java/org/nuxeo/drive/operations/TestFileSystemItemOperations.java <ide> session.getPrincipal()).getId()).execute(); <ide> fail("Top level folder item deletion should be unsupported."); <ide> } catch (Exception e) { <del> assertEquals("Failed to execute operation: NuxeoDrive.Delete", <add> assertEquals("Failed to invoke operation: NuxeoDrive.Delete", <ide> e.getMessage()); <ide> } <ide> } <ide> "New name for top level folder").execute(); <ide> fail("Top level folder renaming shoud be unsupported."); <ide> } catch (Exception e) { <del> assertEquals("Failed to execute operation: NuxeoDrive.Rename", <add> assertEquals("Failed to invoke operation: NuxeoDrive.Rename", <ide> e.getMessage()); <ide> } <ide> } <ide> DEFAULT_FILE_SYSTEM_ITEM_ID_PREFIX + file2.getId()).execute(); <ide> fail("Move to a non folder item should fail."); <ide> } catch (Exception e) { <del> assertEquals("Failed to execute operation: NuxeoDrive.Move", <add> assertEquals("Failed to invoke operation: NuxeoDrive.Move", <ide> e.getMessage()); <ide> } <ide> <ide> SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); <ide> fail("Should not be able to move a synchronization root folder item."); <ide> } catch (Exception e) { <del> assertEquals("Failed to execute operation: NuxeoDrive.Move", <add> assertEquals("Failed to invoke operation: NuxeoDrive.Move", <ide> e.getMessage()); <ide> } <ide> <ide> SYNC_ROOT_FOLDER_ITEM_ID_PREFIX + syncRoot2.getId()).execute(); <ide> fail("Should not be able to move the top level folder item."); <ide> } catch (Exception e) { <del> assertEquals("Failed to execute operation: NuxeoDrive.Move", <add> assertEquals("Failed to invoke operation: NuxeoDrive.Move", <ide> e.getMessage()); <ide> } <ide>
Java
mit
9ff85309d01b116b131bb3e03f76c8ce45beabe8
0
zabawaba99/reflector
package com.zabawaba.reflector; import com.zabawaba.reflector.classes.SampleOne; import org.junit.Before; import org.junit.Test; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class ReflectorConstructorTest { private SampleOne sample; private String expected; private Constructor<SampleOne> constructor; @Before public void setup() throws NoSuchMethodException, SecurityException { sample = new SampleOne(); expected = "foobar"; constructor = SampleOne.class.getConstructor(); } @Test public void testGetConstructor() { ReflectorConstructor rConstructor = new ReflectorConstructor(sample, constructor); assertEquals(constructor, rConstructor.getConstructor()); } @Test public void testNewInstance() throws InstantiationException, IllegalArgumentException, InvocationTargetException { ReflectorConstructor rConstructor = new ReflectorConstructor(sample, constructor); assertNotNull(expected, rConstructor.newInstance()); } }
src/test/java/com/zabawaba/reflector/ReflectorConstructorTest.java
package com.zabawaba.reflector; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import org.junit.Before; import org.junit.Test; import com.zabawaba.reflector.classes.SampleOne; public class ReflectorConstructorTest { private SampleOne sample; private String expected; private Constructor<SampleOne> constructor; @Before public void setup() throws NoSuchMethodException, SecurityException { sample = new SampleOne(); expected = "foobar"; constructor = SampleOne.class.getConstructor(SampleOne.class); } @Test public void testGetConstructor() { ReflectorConstructor rConstructor = new ReflectorConstructor(sample, constructor); assertEquals(constructor, rConstructor.getConstructor()); } @Test public void testNewInstance() throws InstantiationException, IllegalArgumentException, InvocationTargetException { ReflectorConstructor rConstructor = new ReflectorConstructor(sample, constructor); assertNotNull(expected, rConstructor.newInstance()); } }
Fixing tests. SampleOne class does not have a constructor with parameters
src/test/java/com/zabawaba/reflector/ReflectorConstructorTest.java
Fixing tests.
<ide><path>rc/test/java/com/zabawaba/reflector/ReflectorConstructorTest.java <ide> package com.zabawaba.reflector; <ide> <del>import static org.junit.Assert.assertEquals; <del>import static org.junit.Assert.assertNotNull; <add>import com.zabawaba.reflector.classes.SampleOne; <add>import org.junit.Before; <add>import org.junit.Test; <ide> <ide> import java.lang.reflect.Constructor; <ide> import java.lang.reflect.InvocationTargetException; <ide> <del>import org.junit.Before; <del>import org.junit.Test; <del> <del>import com.zabawaba.reflector.classes.SampleOne; <add>import static org.junit.Assert.assertEquals; <add>import static org.junit.Assert.assertNotNull; <ide> <ide> public class ReflectorConstructorTest { <ide> <ide> public void setup() throws NoSuchMethodException, SecurityException { <ide> sample = new SampleOne(); <ide> expected = "foobar"; <del> constructor = SampleOne.class.getConstructor(SampleOne.class); <add> constructor = SampleOne.class.getConstructor(); <ide> } <ide> <ide> @Test
Java
epl-1.0
b1f1fb6d164d8796207a156ebfc100810b6996cb
0
gameduell/eclipselink.runtime,gameduell/eclipselink.runtime,gameduell/eclipselink.runtime,gameduell/eclipselink.runtime
/******************************************************************************* * Copyright (c) 2005, 2009 SAP. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * SAP - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.testing.models.wdf.jpa1.employee; import javax.persistence.Basic; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.TableGenerator; @Entity @Table(name = "TMP_HOBBY") @TableGenerator(name = "StringIdGenerator", table = "TMP_STRING_GEN", pkColumnName = "BEAN_NAME", valueColumnName = "MAX_ID") public class Hobby { private String id; private String description; private String category; public Hobby() { } public Hobby(String aDescription) { description = aDescription; } public Hobby(String txt, String aDescription) { id = txt; description = aDescription; } @Id @GeneratedValue(strategy = GenerationType.TABLE, generator = "StringIdGenerator") public String getId() { return id; } public void setId(String id) { this.id = id; } @Basic public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Basic public String getCategory() { return category; } public void setCategory(final String aCategory) { category = aCategory; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Hobby)) { return false; } final Hobby other = (Hobby) obj; if (id == null && other.id != null) { return false; } if (id != null && !id.equals(other.id)) { return false; } if (description == null && other.description != null) { return false; } if (description != null && !description.equals(other.description)) { return false; } if (category != null && !category.equals(other.category)) { return false; } if (category == null && other.category != null) { return false; } return true; } @Override public int hashCode() { int result = 17 + id.hashCode(); result *= 37; if (description != null) { result += description.hashCode(); } result += 17; result *= 37; if (category != null) { result += category.hashCode(); } return result; } @Override public String toString() { return id + ":" + category + ":" + description; } }
jpa/eclipselink.jpa.wdf.test/src/org/eclipse/persistence/testing/models/wdf/jpa1/employee/Hobby.java
/******************************************************************************* * Copyright (c) 2005, 2009 SAP. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * SAP - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.testing.models.wdf.jpa1.employee; import javax.persistence.Basic; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.TableGenerator; import javax.persistence.UniqueConstraint; @Entity @Table(name = "TMP_HOBBY", uniqueConstraints = @UniqueConstraint(columnNames = "Hutzliputz")) // intentionally broken unique constraint @TableGenerator(name = "StringIdGenerator", table = "TMP_STRING_GEN", pkColumnName = "BEAN_NAME", valueColumnName = "MAX_ID") public class Hobby { private String id; private String description; private String category; public Hobby() { } public Hobby(String aDescription) { description = aDescription; } public Hobby(String txt, String aDescription) { id = txt; description = aDescription; } @Id @GeneratedValue(strategy = GenerationType.TABLE, generator = "StringIdGenerator") public String getId() { return id; } public void setId(String id) { this.id = id; } @Basic public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Basic public String getCategory() { return category; } public void setCategory(final String aCategory) { category = aCategory; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Hobby)) { return false; } final Hobby other = (Hobby) obj; if (id == null && other.id != null) { return false; } if (id != null && !id.equals(other.id)) { return false; } if (description == null && other.description != null) { return false; } if (description != null && !description.equals(other.description)) { return false; } if (category != null && !category.equals(other.category)) { return false; } if (category == null && other.category != null) { return false; } return true; } @Override public int hashCode() { int result = 17 + id.hashCode(); result *= 37; if (description != null) { result += description.hashCode(); } result += 17; result *= 37; if (category != null) { result += category.hashCode(); } return result; } @Override public String toString() { return id + ":" + category + ":" + description; } }
remove "intentionally broken" unique constraint
jpa/eclipselink.jpa.wdf.test/src/org/eclipse/persistence/testing/models/wdf/jpa1/employee/Hobby.java
remove "intentionally broken" unique constraint
<ide><path>pa/eclipselink.jpa.wdf.test/src/org/eclipse/persistence/testing/models/wdf/jpa1/employee/Hobby.java <ide> import javax.persistence.Id; <ide> import javax.persistence.Table; <ide> import javax.persistence.TableGenerator; <del>import javax.persistence.UniqueConstraint; <ide> <ide> @Entity <del>@Table(name = "TMP_HOBBY", uniqueConstraints = @UniqueConstraint(columnNames = "Hutzliputz")) <del>// intentionally broken unique constraint <add>@Table(name = "TMP_HOBBY") <ide> @TableGenerator(name = "StringIdGenerator", table = "TMP_STRING_GEN", pkColumnName = "BEAN_NAME", valueColumnName = "MAX_ID") <ide> public class Hobby { <ide> private String id;
Java
apache-2.0
e492f114526045178cc7baf53554003e938710a1
0
AdrianBZG/Twitter-Follow-Exploit
package logic; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.firefox.FirefoxBinary; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.firefox.FirefoxProfile; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import core.DataCache; import core.TwitterData; import core.UINamesData; import randomfactory.HTTPRequestFactory; public class TwitterTask { public static final String GECKO_DRIVER_PATH = "PATH_TO_GECKODRIVER_(geckodriver.exe)"; public static String DEFAULT_USER_PASSWORD = "wantedpassword9999"; public static final String TOR_ROOT_PATH = "PATH_TO_TOR_BROWSER_FOLDER"; public static final String TOR_EXE_PATH = TOR_ROOT_PATH + "Tor Browser\\Browser\\firefox.exe"; public static final String TOR_PROFILE_PATH = TOR_ROOT_PATH + "Tor Browser\\Browser\\TorBrowser\\Data\\Browser\\profile.default"; public static void perform() { System.setProperty("webdriver.gecko.driver", GECKO_DRIVER_PATH); // Tor Browser configuration File torProfileDir = new File(TOR_PROFILE_PATH); FirefoxBinary binary = new FirefoxBinary(new File(TOR_EXE_PATH)); FirefoxProfile torProfile = new FirefoxProfile(torProfileDir); torProfile.setPreference("webdriver.load.strategy", "unstable"); // int randomNum = 0 + (int)(Math.random() * UINamesData.QUERIES_BY_REGION.size()); DataCache.newUserCreated(HTTPRequestFactory.performRequestInBackground(UINamesData.QUERIES_BY_REGION.get(randomNum))); try { binary.startProfile(torProfile, torProfileDir, ""); } catch (IOException e) { e.printStackTrace(); } FirefoxProfile profile = new FirefoxProfile(); profile.setPreference("network.proxy.type", 1); profile.setPreference("network.proxy.socks", "127.0.0.1"); profile.setPreference("network.proxy.socks_port", 9150); FirefoxDriver driver = new FirefoxDriver(profile); System.setProperty("webdriver.gecko.driver", GECKO_DRIVER_PATH); driver.get(TwitterData.TWITTER_REG_URL); WebDriverWait wait = new WebDriverWait(driver, 10); WebElement element = wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("full-name"))); driver.findElement(By.id("full-name")).sendKeys(DataCache.lastResponseObject.getName() + " " + DataCache.lastResponseObject.getSurname()); driver.findElement(By.id("email")).sendKeys(DataCache.lastResponseObject.getEmail()); try { Thread.sleep(5000); } catch (InterruptedException e1) { e1.printStackTrace(); } driver.findElement(By.id("password")).sendKeys(DEFAULT_USER_PASSWORD); driver.findElement(By.id("submit_button")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } driver.get(TwitterData.TWITTER_STEP2_REG_URL); WebElement element2 = wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("username"))); randomNum = 1 + (int)(Math.random() * 50); driver.findElement(By.id("username")).sendKeys(DataCache.lastResponseObject.getName().toLowerCase() + DataCache.lastResponseObject.getSurname().toLowerCase() + randomNum); driver.findElement(By.id("submit_button")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } driver.get(TwitterData.TWITTER_BASE_URL + TwitterData.TWITTER_USER_TO_FOLLOW); WebElement element3 = wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//li[contains(@class, 'follow-button')]/li[1]/a"))); driver.findElement(By.xpath("//li[contains(@class, 'follow-button')]/li[1]/a")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } //Tidy up now killBrowser(); } private static void killBrowser() { Runtime rt = Runtime.getRuntime(); try { rt.exec("taskkill /F /IM firefox.exe"); while (processIsRunning("firefox.exe")) { Thread.sleep(100); } } catch (Exception e) { e.printStackTrace(); } } private static boolean processIsRunning(String process) { boolean processIsRunning = false; String line; try { Process proc = Runtime.getRuntime().exec("wmic.exe"); BufferedReader input = new BufferedReader(new InputStreamReader(proc.getInputStream())); OutputStreamWriter oStream = new OutputStreamWriter(proc.getOutputStream()); oStream.write("process where name='" + process + "'"); oStream.flush(); oStream.close(); while ((line = input.readLine()) != null) { if (line.toLowerCase().contains("caption")) { processIsRunning = true; break; } } input.close(); } catch (IOException e) { e.printStackTrace(); } return processIsRunning; } }
src/main/java/logic/TwitterTask.java
package logic; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.firefox.FirefoxBinary; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.firefox.FirefoxProfile; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import core.DataCache; import core.TwitterData; import core.UINamesData; import randomfactory.HTTPRequestFactory; public class TwitterTask { public static final String GECKO_DRIVER_PATH = "PATH_TO_GECKODRIVER_(geckodriver.exe)"; public static String DEFAULT_USER_PASSWORD = "wantedpassword9999"; public static final String TOR_ROOT_PATH = "PATH_TO_TOR_BROWSER_FOLDER"; public static final String TOR_EXE_PATH = TOR_ROOT_PATH + "Tor Browser\\Browser\\firefox.exe"; public static final String TOR_PROFILE_PATH = TOR_ROOT_PATH + "Tor Browser\\Browser\\TorBrowser\\Data\\Browser\\profile.default"; public static void perform() { System.setProperty("webdriver.gecko.driver", GECKO_DRIVER_PATH); // Tor Browser configuration File torProfileDir = new File(TOR_PROFILE_PATH); FirefoxBinary binary = new FirefoxBinary(new File(TOR_EXE_PATH)); FirefoxProfile torProfile = new FirefoxProfile(torProfileDir); torProfile.setPreference("webdriver.load.strategy", "unstable"); // int randomNum = 0 + (int)(Math.random() * UINamesData.QUERIES_BY_REGION.size()); DataCache.newUserCreated(HTTPRequestFactory.performRequestInBackground(UINamesData.QUERIES_BY_REGION.get(randomNum))); try { binary.startProfile(torProfile, torProfileDir, ""); } catch (IOException e) { e.printStackTrace(); } FirefoxProfile profile = new FirefoxProfile(); profile.setPreference("network.proxy.type", 1); profile.setPreference("network.proxy.socks", "127.0.0.1"); profile.setPreference("network.proxy.socks_port", 9150); FirefoxDriver driver = new FirefoxDriver(profile); System.setProperty("webdriver.gecko.driver", GECKO_DRIVER_PATH); driver.get(TwitterData.TWITTER_REG_URL); WebDriverWait wait = new WebDriverWait(driver, 10); WebElement element = wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("full-name"))); driver.findElement(By.id("full-name")).sendKeys(DataCache.lastResponseObject.getName() + " " + DataCache.lastResponseObject.getSurname()); driver.findElement(By.id("email")).sendKeys(DataCache.lastResponseObject.getEmail()); try { Thread.sleep(5000); } catch (InterruptedException e1) { e1.printStackTrace(); } driver.findElement(By.id("password")).sendKeys(DEFAULT_USER_PASSWORD); driver.findElement(By.id("submit_button")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } driver.get(TwitterData.TWITTER_STEP2_REG_URL); WebElement element2 = wait.until(ExpectedConditions.visibilityOfElementLocated(By.id("username"))); randomNum = 1 + (int)(Math.random() * 50); driver.findElement(By.id("username")).sendKeys(DataCache.lastResponseObject.getName().toLowerCase() + DataCache.lastResponseObject.getSurname().toLowerCase() + randomNum); driver.findElement(By.id("submit_button")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } driver.get(TwitterData.TWITTER_BASE_URL + TwitterData.TWITTER_USER_TO_FOLLOW); WebElement element3 = wait.until(ExpectedConditions.visibilityOfElementLocated(By.xpath("//li[contains(@class, 'follow-button')]/li[1]/a"))); driver.findElement(By.xpath("//li[contains(@class, 'follow-button')]/li[1]/a")).click(); try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } //Tidy up now. driver.quit(); } }
Killing background process after each task to get new identity
src/main/java/logic/TwitterTask.java
Killing background process after each task to get new identity
<ide><path>rc/main/java/logic/TwitterTask.java <ide> e.printStackTrace(); <ide> } <ide> <del> //Tidy up now. <del> driver.quit(); <add> //Tidy up now <add> killBrowser(); <add> } <add> <add> private static void killBrowser() { <add> Runtime rt = Runtime.getRuntime(); <add> <add> try { <add> rt.exec("taskkill /F /IM firefox.exe"); <add> while (processIsRunning("firefox.exe")) { <add> Thread.sleep(100); <add> } <add> } catch (Exception e) { <add> e.printStackTrace(); <add> } <add> } <add> <add> private static boolean processIsRunning(String process) { <add> boolean processIsRunning = false; <add> String line; <add> try { <add> Process proc = Runtime.getRuntime().exec("wmic.exe"); <add> BufferedReader input = new BufferedReader(new InputStreamReader(proc.getInputStream())); <add> OutputStreamWriter oStream = new OutputStreamWriter(proc.getOutputStream()); <add> oStream.write("process where name='" + process + "'"); <add> oStream.flush(); <add> oStream.close(); <add> while ((line = input.readLine()) != null) { <add> if (line.toLowerCase().contains("caption")) { <add> processIsRunning = true; <add> break; <add> } <add> } <add> input.close(); <add> } catch (IOException e) { <add> e.printStackTrace(); <add> } <add> return processIsRunning; <ide> } <ide> }
Java
apache-2.0
7ccb47256a4f9903f06cba61c155bbc586b9dce9
0
kalyanreddyemani/hpg-bigdata,pawanpal01/hpg-bigdata,kalyanreddyemani/hpg-bigdata,kalyanreddyemani/hpg-bigdata,pawanpal01/hpg-bigdata,opencb/hpg-bigdata,pawanpal01/hpg-bigdata,opencb/hpg-bigdata,opencb/hpg-bigdata
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.hpg.bigdata.tools.stats.alignment.mr; import java.io.IOException; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyInputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.ga4gh.models.ReadAlignment; import org.opencb.hpg.bigdata.tools.io.ReadAlignmentStatsWritable; import org.opencb.hpg.bigdata.tools.io.ReadStatsWritable; public class ReadAlignmentStatsMR { public static class ReadAlignmentStatsMapper extends Mapper<AvroKey<ReadAlignment>, NullWritable, LongWritable, ReadAlignmentStatsWritable> { int newKey; int numRecords; final int MAX_NUM_AVRO_RECORDS = 1000; public void setup(Context context) { newKey = 0; numRecords = 0; } @Override public void map(AvroKey<ReadAlignment> key, NullWritable value, Context context) throws IOException, InterruptedException { ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); stats.updateByReadAlignment(key.datum()); context.write(new LongWritable(newKey), stats); // count records and update new key numRecords++; if (numRecords >= MAX_NUM_AVRO_RECORDS) { newKey++; numRecords = 0; } } } public static class ReadAlignmentStatsCombiner extends Reducer<LongWritable, ReadAlignmentStatsWritable, LongWritable, ReadAlignmentStatsWritable> { public void reduce(LongWritable key, Iterable<ReadAlignmentStatsWritable> values, Context context) throws IOException, InterruptedException { ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); for (ReadAlignmentStatsWritable value : values) { stats.update(value); } context.write(new LongWritable(1), stats); } } public static class ReadAlignmentStatsReducer extends Reducer<LongWritable, ReadAlignmentStatsWritable, Text, NullWritable> { public void reduce(LongWritable key, Iterable<ReadAlignmentStatsWritable> values, Context context) throws IOException, InterruptedException { ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); for (ReadAlignmentStatsWritable value : values) { stats.update(value); } context.write(new Text(stats.toJSON()), NullWritable.get()); } } public static int run(String input, String output) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "ReadAlignmentStatsMR"); job.setJarByClass(ReadAlignmentStatsMR.class); // input AvroJob.setInputKeySchema(job, ReadAlignment.getClassSchema()); FileInputFormat.setInputPaths(job, new Path(input)); job.setInputFormatClass(AvroKeyInputFormat.class); // output FileOutputFormat.setOutputPath(job, new Path(output)); job.setOutputKeyClass(ReadStatsWritable.class); job.setOutputValueClass(NullWritable.class); // mapper job.setMapperClass(ReadAlignmentStatsMapper.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(ReadAlignmentStatsWritable.class); // combiner job.setCombinerClass(ReadAlignmentStatsCombiner.class); // reducer job.setReducerClass(ReadAlignmentStatsReducer.class); job.setNumReduceTasks(1); return (job.waitForCompletion(true) ? 0 : 1); } }
hpg-bigdata-tools/src/main/java/org/opencb/hpg/bigdata/tools/stats/alignment/mr/ReadAlignmentStatsMR.java
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.hpg.bigdata.tools.stats.alignment.mr; import java.io.IOException; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyInputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.ga4gh.models.ReadAlignment; import org.opencb.hpg.bigdata.tools.io.ReadAlignmentStatsWritable; import org.opencb.hpg.bigdata.tools.io.ReadStatsWritable; public class ReadAlignmentStatsMR { public static class ReadAlignmentStatsMapper extends Mapper<AvroKey<ReadAlignment>, NullWritable, LongWritable, ReadAlignmentStatsWritable> { @Override public void map(AvroKey<ReadAlignment> key, NullWritable value, Context context) throws IOException, InterruptedException { ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); stats.updateByReadAlignment(key.datum()); context.write(new LongWritable(1), stats); } } public static class ReadAlignmentStatsReducer extends Reducer<LongWritable, ReadAlignmentStatsWritable, Text, NullWritable> { public void reduce(LongWritable key, Iterable<ReadAlignmentStatsWritable> values, Context context) throws IOException, InterruptedException { ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); for (ReadAlignmentStatsWritable value : values) { stats.update(value); } context.write(new Text(stats.toJSON()), NullWritable.get()); } } public static int run(String input, String output) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "ReadAlignmentStatsMR"); job.setJarByClass(ReadAlignmentStatsMR.class); // input AvroJob.setInputKeySchema(job, ReadAlignment.getClassSchema()); FileInputFormat.setInputPaths(job, new Path(input)); job.setInputFormatClass(AvroKeyInputFormat.class); // output FileOutputFormat.setOutputPath(job, new Path(output)); job.setOutputKeyClass(ReadStatsWritable.class); job.setOutputValueClass(NullWritable.class); // mapper job.setMapperClass(ReadAlignmentStatsMapper.class); job.setMapOutputKeyClass(LongWritable.class); job.setMapOutputValueClass(ReadAlignmentStatsWritable.class); // reducer job.setReducerClass(ReadAlignmentStatsReducer.class); job.setNumReduceTasks(1); return (job.waitForCompletion(true) ? 0 : 1); } }
stats: add a combiner function to compute BAM stats in MapReduce. #9
hpg-bigdata-tools/src/main/java/org/opencb/hpg/bigdata/tools/stats/alignment/mr/ReadAlignmentStatsMR.java
stats: add a combiner function to compute BAM stats in MapReduce. #9
<ide><path>pg-bigdata-tools/src/main/java/org/opencb/hpg/bigdata/tools/stats/alignment/mr/ReadAlignmentStatsMR.java <ide> <ide> public static class ReadAlignmentStatsMapper extends Mapper<AvroKey<ReadAlignment>, NullWritable, LongWritable, ReadAlignmentStatsWritable> { <ide> <add> int newKey; <add> int numRecords; <add> final int MAX_NUM_AVRO_RECORDS = 1000; <add> <add> public void setup(Context context) { <add> newKey = 0; <add> numRecords = 0; <add> } <add> <ide> @Override <ide> public void map(AvroKey<ReadAlignment> key, NullWritable value, Context context) throws IOException, InterruptedException { <ide> ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); <ide> stats.updateByReadAlignment(key.datum()); <add> context.write(new LongWritable(newKey), stats); <add> <add> // count records and update new key <add> numRecords++; <add> if (numRecords >= MAX_NUM_AVRO_RECORDS) { <add> newKey++; <add> numRecords = 0; <add> } <add> } <add> } <add> <add> public static class ReadAlignmentStatsCombiner extends Reducer<LongWritable, ReadAlignmentStatsWritable, LongWritable, ReadAlignmentStatsWritable> { <add> <add> public void reduce(LongWritable key, Iterable<ReadAlignmentStatsWritable> values, Context context) throws IOException, InterruptedException { <add> ReadAlignmentStatsWritable stats = new ReadAlignmentStatsWritable(); <add> for (ReadAlignmentStatsWritable value : values) { <add> stats.update(value); <add> } <ide> context.write(new LongWritable(1), stats); <ide> } <ide> } <del> <ide> public static class ReadAlignmentStatsReducer extends Reducer<LongWritable, ReadAlignmentStatsWritable, Text, NullWritable> { <ide> <ide> public void reduce(LongWritable key, Iterable<ReadAlignmentStatsWritable> values, Context context) throws IOException, InterruptedException { <ide> job.setMapperClass(ReadAlignmentStatsMapper.class); <ide> job.setMapOutputKeyClass(LongWritable.class); <ide> job.setMapOutputValueClass(ReadAlignmentStatsWritable.class); <del> <add> <add> // combiner <add> job.setCombinerClass(ReadAlignmentStatsCombiner.class); <add> <ide> // reducer <ide> job.setReducerClass(ReadAlignmentStatsReducer.class); <ide> job.setNumReduceTasks(1);
Java
apache-2.0
7455610aa52183942315f9e793cb03818006a75f
0
tarasane/h2o-3,kyoren/https-github.com-h2oai-h2o-3,junwucs/h2o-3,nilbody/h2o-3,junwucs/h2o-3,nilbody/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,weaver-viii/h2o-3,ChristosChristofidis/h2o-3,madmax983/h2o-3,junwucs/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,weaver-viii/h2o-3,printedheart/h2o-3,jangorecki/h2o-3,PawarPawan/h2o-v3,PawarPawan/h2o-v3,bospetersen/h2o-3,junwucs/h2o-3,tarasane/h2o-3,ChristosChristofidis/h2o-3,mrgloom/h2o-3,mrgloom/h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,ChristosChristofidis/h2o-3,pchmieli/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,kyoren/https-github.com-h2oai-h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,weaver-viii/h2o-3,datachand/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,bospetersen/h2o-3,PawarPawan/h2o-v3,junwucs/h2o-3,madmax983/h2o-3,printedheart/h2o-3,mrgloom/h2o-3,ChristosChristofidis/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,mrgloom/h2o-3,nilbody/h2o-3,pchmieli/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,h2oai/h2o-3,spennihana/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,tarasane/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,kyoren/https-github.com-h2oai-h2o-3,mathemage/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-dev,mathemage/h2o-3,brightchen/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,mathemage/h2o-3,bospetersen/h2o-3,h2oai/h2o-dev,datachand/h2o-3,PawarPawan/h2o-v3,mrgloom/h2o-3,bospetersen/h2o-3,mathemage/h2o-3,ChristosChristofidis/h2o-3,jangorecki/h2o-3,datachand/h2o-3,mrgloom/h2o-3,YzPaul3/h2o-3,mrgloom/h2o-3,ChristosChristofidis/h2o-3,bospetersen/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,weaver-viii/h2o-3,spennihana/h2o-3,tarasane/h2o-3,YzPaul3/h2o-3,madmax983/h2o-3,bospetersen/h2o-3,jangorecki/h2o-3,weaver-viii/h2o-3,bospetersen/h2o-3,pchmieli/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,mathemage/h2o-3,printedheart/h2o-3,PawarPawan/h2o-v3,michalkurka/h2o-3,tarasane/h2o-3,tarasane/h2o-3,mathemage/h2o-3,nilbody/h2o-3,nilbody/h2o-3,brightchen/h2o-3,datachand/h2o-3,pchmieli/h2o-3,datachand/h2o-3,madmax983/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,datachand/h2o-3,pchmieli/h2o-3,nilbody/h2o-3,brightchen/h2o-3,h2oai/h2o-3,spennihana/h2o-3,junwucs/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,weaver-viii/h2o-3,weaver-viii/h2o-3,nilbody/h2o-3,jangorecki/h2o-3,brightchen/h2o-3,datachand/h2o-3,printedheart/h2o-3,h2oai/h2o-dev,printedheart/h2o-3,spennihana/h2o-3,madmax983/h2o-3,tarasane/h2o-3,madmax983/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,brightchen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,ChristosChristofidis/h2o-3,junwucs/h2o-3,madmax983/h2o-3,printedheart/h2o-3,YzPaul3/h2o-3
package hex.deeplearning; import hex.ConfusionMatrix; import hex.deeplearning.DeepLearningParameters.ClassSamplingMethod; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import water.*; import water.fvec.Frame; import water.fvec.NFSFileVec; import water.parser.ParseDataset; import water.rapids.Env; import water.rapids.Exec; import water.util.Log; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Random; import static hex.ConfusionMatrix.buildCM; public class DeepLearningProstateTest extends TestUtil { @BeforeClass() public static void setup() { stall_till_cloudsize(1); } @Test public void run() throws Exception { runFraction(0.000025f); } public void runFraction(float fraction) { long seed = 0xDECAF; Random rng = new Random(seed); String[] datasets = new String[2]; int[][] responses = new int[datasets.length][]; datasets[0] = "smalldata/logreg/prostate.csv"; responses[0] = new int[]{1,2,8}; //CAPSULE (binomial), AGE (regression), GLEASON (multi-class) datasets[1] = "smalldata/iris/iris.csv"; responses[1] = new int[]{4}; //Iris-type (multi-class) HashSet<Long> checkSums = new LinkedHashSet<>(); int testcount = 0; int count = 0; for (int i = 0; i < datasets.length; ++i) { final String dataset = datasets[i]; NFSFileVec nfs = NFSFileVec.make(find_test_file(dataset)); Frame frame = ParseDataset.parse(Key.make(), nfs._key); NFSFileVec vnfs = NFSFileVec.make(find_test_file(dataset)); Frame vframe = ParseDataset.parse(Key.make(), vnfs._key); Scope.enter(); try { for (int resp : responses[i]) { boolean classification = !(i == 0 && resp == 2); if (classification && !frame.vec(resp).isEnum()) { Scope.track(frame.replace(resp, frame.vec(resp).toEnum())._key); DKV.put(frame._key, frame); } for (DeepLearningParameters.Loss loss : new DeepLearningParameters.Loss[]{ DeepLearningParameters.Loss.Automatic, DeepLearningParameters.Loss.CrossEntropy, DeepLearningParameters.Loss.Huber, DeepLearningParameters.Loss.Absolute, DeepLearningParameters.Loss.MeanSquare }) { if ( !classification && loss == DeepLearningParameters.Loss.CrossEntropy ) continue; for (boolean elastic_averaging : new boolean[]{ true, false, }) { for (boolean replicate : new boolean[]{ true, false, }) { for (DeepLearningParameters.Activation activation : new DeepLearningParameters.Activation[]{ DeepLearningParameters.Activation.Tanh, DeepLearningParameters.Activation.TanhWithDropout, DeepLearningParameters.Activation.Rectifier, DeepLearningParameters.Activation.RectifierWithDropout, DeepLearningParameters.Activation.Maxout, // DeepLearningParameters.Activation.MaxoutWithDropout }) { for (boolean load_balance : new boolean[]{ true, false, }) { for (boolean shuffle : new boolean[]{ true, false, }) { for (boolean balance_classes : new boolean[]{ true, false, }) { for (ClassSamplingMethod csm : new ClassSamplingMethod[]{ ClassSamplingMethod.Stratified, ClassSamplingMethod.Uniform }) { for (int scoretraining : new int[]{ 200, 20, 0, }) { for (int scorevalidation : new int[]{ 200, 20, 0, }) { for (int vf : new int[]{ 0, //no validation 1, //same as source -1, //different validation frame }) { for (int n_folds : new int[]{ 0, 3, }) { if (n_folds != 0 && vf != 0) continue; for (boolean keep_cv_splits : new boolean[]{false}) { //otherwise it leaks for (boolean overwrite_with_best_model : new boolean[]{false, true}) { for (int train_samples_per_iteration : new int[]{ -2, //auto-tune -1, //N epochs per iteration 0, //1 epoch per iteration rng.nextInt(200), // <1 epoch per iteration 500, //>1 epoch per iteration }) { DeepLearningModel model1 = null, model2 = null; count++; if (fraction < rng.nextFloat()) continue; try { Scope.enter(); Log.info("**************************)"); Log.info("Starting test #" + count); Log.info("**************************)"); final double epochs = 7 + rng.nextDouble() + rng.nextInt(4); final int[] hidden = new int[]{1 + rng.nextInt(4), 1 + rng.nextInt(6)}; Frame valid = null; //no validation if (vf == 1) valid = frame; //use the same frame for validation else if (vf == -1) valid = vframe; //different validation frame (here: from the same file) long myseed = rng.nextLong(); // build the model, with all kinds of shuffling/rebalancing/sampling DeepLearningParameters p = new DeepLearningParameters(); { Log.info("Using seed: " + myseed); p._model_id = Key.make(Key.make().toString() + "first"); p._train = frame._key; p._response_column = frame._names[resp]; p._valid = valid==null ? null : valid._key; p._hidden = hidden; p._input_dropout_ratio = 0.1; p._hidden_dropout_ratios = null; p._activation = activation; // p.best_model_key = best_model_key; p._overwrite_with_best_model = overwrite_with_best_model; p._epochs = epochs; p._loss = loss; p._nfolds = n_folds; p._keep_cross_validation_splits = keep_cv_splits; p._seed = myseed; p._train_samples_per_iteration = train_samples_per_iteration; p._force_load_balance = load_balance; p._replicate_training_data = replicate; p._shuffle_training_data = shuffle; p._score_training_samples = scoretraining; p._score_validation_samples = scorevalidation; p._classification_stop = -1; p._regression_stop = -1; p._balance_classes = classification && balance_classes; p._quiet_mode = false; p._score_validation_sampling = csm; p._elastic_averaging = elastic_averaging; // Log.info(new String(p.writeJSON(new AutoBuffer()).buf()).replace(",","\n")); DeepLearning dl = new DeepLearning(p); try { model1 = dl.trainModel().get(); checkSums.add(model1.checksum()); } catch (Throwable t) { throw t; } finally { dl.remove(); } Log.info("Trained for " + model1.epoch_counter + " epochs."); assert( ((p._train_samples_per_iteration <= 0 || p._train_samples_per_iteration >= frame.numRows()) && model1.epoch_counter > epochs) || Math.abs(model1.epoch_counter - epochs)/epochs < 0.20 ); // check that iteration is of the expected length - check via when first scoring happens if (p._train_samples_per_iteration == 0) { // no sampling - every node does its share of the full data if (!replicate) assert((double)model1._output._scoring_history.get(1,3) == 1); // sampling on each node - replicated data else assert((double)model1._output._scoring_history.get(1,3) > 0.7 && (double)model1._output._scoring_history.get(1,3) < 1.3) : ("First scoring at " + model1._output._scoring_history.get(1,3) + " epochs, should be closer to 1!" + "\n" + model1.toString()); } else if (p._train_samples_per_iteration == -1) { // no sampling - every node does its share of the full data if (!replicate) assert ((double) model1._output._scoring_history.get(1, 3) == 1); // every node passes over the full dataset else assert ((double) model1._output._scoring_history.get(1, 3) == H2O.CLOUD.size()); } if (n_folds != 0) { assert(model1._output._validation_metrics != null); } } assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); // Do some more training via checkpoint restart // For n_folds, continue without n_folds (not yet implemented) - from now on, model2 will have n_folds=0... DeepLearningParameters p2 = new DeepLearningParameters(); Assert.assertTrue(model1.model_info().get_processed_total() >= frame.numRows() * epochs); { p2._model_id = Key.make(); p2._checkpoint = model1._key; p2._nfolds = 0; p2._train = frame._key; p2._activation = activation; p2._hidden = hidden; p2._valid = valid == null ? null : valid._key; p2._l1 = 1e-3; p2._l2 = 1e-3; p2._response_column = frame._names[resp]; p2._overwrite_with_best_model = overwrite_with_best_model; p2._epochs = epochs; p2._replicate_training_data = rng.nextBoolean(); p2._seed = myseed; p2._train_samples_per_iteration = train_samples_per_iteration; p2._balance_classes = classification && balance_classes; p2._elastic_averaging = rng.nextBoolean(); DeepLearning dl = new DeepLearning(p2); try { model2 = dl.trainModel().get(); } catch (Throwable t) { throw t; } finally { dl.remove(); } } assert(model1._parms != p2); assert(model1.model_info().get_params() != model2.model_info().get_params()); assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); Assert.assertTrue(model2.model_info().get_processed_total() >= frame.numRows() * 2 * epochs); assert(p != p2); assert(p != model1.model_info().get_params()); assert(p2 != model2.model_info().get_params()); if (p._loss == DeepLearningParameters.Loss.Automatic) { assert(p._loss == DeepLearningParameters.Loss.Automatic); assert(p2._loss == DeepLearningParameters.Loss.Automatic); assert(model1.model_info().get_params()._loss != DeepLearningParameters.Loss.Automatic); assert(model2.model_info().get_params()._loss != DeepLearningParameters.Loss.Automatic); } assert(p._hidden_dropout_ratios == null); assert(p2._hidden_dropout_ratios == null); if (p._activation.toString().contains("WithDropout")) { assert(model1.model_info().get_params()._hidden_dropout_ratios != null); assert(model2.model_info().get_params()._hidden_dropout_ratios != null); assert(Arrays.equals( model1.model_info().get_params()._hidden_dropout_ratios, model2.model_info().get_params()._hidden_dropout_ratios)); } assert(p._l1 == 0); assert(p._l2 == 0); assert(p2._l1 == 1e-3); assert(p2._l2 == 1e-3); assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); assert(model2.model_info().get_params()._l1 == 1e-3); assert(model2.model_info().get_params()._l2 == 1e-3); if (valid == null) valid = frame; double threshold = 0; if (model2._output.isClassifier()) { Frame pred = null, pred2 = null; try { pred = model2.score(valid); // Build a POJO, validate same results Assert.assertTrue(model2.testJavaScoring(valid,pred,1e-6)); hex.ModelMetrics mm = hex.ModelMetrics.getFromDKV(model2, valid); double error; // binary if (model2._output.nclasses() == 2) { assert (resp == 1); threshold = mm.auc().defaultThreshold(); error = mm.auc().defaultErr(); // check that auc.cm() is the right CM Assert.assertEquals(new ConfusionMatrix(mm.auc().defaultCM(), model2._output._domains[resp]).err(), error, 1e-15); // check that calcError() is consistent as well (for CM=null, AUC!=null) Assert.assertEquals(mm.cm().err(), error, 1e-15); // check that the labels made with the default threshold are consistent with the CM that's reported by the AUC object ConfusionMatrix cm = buildCM(valid.vecs()[resp].toEnum(), pred.vecs()[0].toEnum()); Log.info("CM from pre-made labels:"); Log.info(cm.toASCII()); // Assert.assertEquals(cm.err(), error, 1e-4); //FIXME // manually make labels with AUC-given default threshold String ast = "(= ([ %pred2 \"null\" #0) (G ([ %pred2 \"null\" #2) #"+threshold+"))"; // confirm that orig CM was made with threshold 0.5 // put pred2 into DKV, and allow access pred2 = new Frame(Key.make("pred2"), pred.names(), pred.vecs()); pred2.delete_and_lock(null); pred2.unlock(null); Env ev = Exec.exec(ast); try { pred2 = ev.popAry(); // pop0 pops w/o lowering refs, let remove_and_unlock handle cleanup } finally { if (ev != null) ev.remove_and_unlock(); } cm = buildCM(valid.vecs()[resp].toEnum(), pred2.vecs()[0].toEnum()); Log.info("CM from self-made labels:"); Log.info(cm.toASCII()); Assert.assertEquals(cm.err(), error, 1e-4); //AUC-given F1-optimal threshold might not reproduce AUC-given CM-error identically, but should match up to 1% } } finally { if (pred != null) pred.delete(); if (pred2 != null) pred2.delete(); } } //classifier else { Frame pred = model2.score(valid); // Build a POJO, validate same results Assert.assertTrue(model2.testJavaScoring(frame,pred,1e-6)); pred.delete(); } Log.info("Parameters combination " + count + ": PASS"); testcount++; } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (model1 != null) { model1.delete_xval_models(); model1.delete(); } if (model2 != null) { model2.delete_xval_models(); model2.delete(); } Scope.exit(); } } } } } } } } } } } } } } } } } } finally { frame.delete(); vframe.delete(); Scope.exit(); } } Assert.assertTrue(checkSums.size() == testcount); Log.info("\n\n============================================="); Log.info("Tested " + testcount + " out of " + count + " parameter combinations."); Log.info("============================================="); } public static class Mid extends DeepLearningProstateTest { @Test @Ignore public void run() throws Exception { runFraction(0.01f); } //for nightly tests } public static class Short extends DeepLearningProstateTest { @Test @Ignore public void run() throws Exception { runFraction(0.001f); } } }
h2o-algos/src/test/java/hex/deeplearning/DeepLearningProstateTest.java
package hex.deeplearning; import hex.ConfusionMatrix; import hex.deeplearning.DeepLearningParameters.ClassSamplingMethod; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import water.*; import water.fvec.Frame; import water.fvec.NFSFileVec; import water.parser.ParseDataset; import water.rapids.Env; import water.rapids.Exec; import water.util.Log; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Random; import static hex.ConfusionMatrix.buildCM; public class DeepLearningProstateTest extends TestUtil { @BeforeClass() public static void setup() { stall_till_cloudsize(1); } @Test public void run() throws Exception { runFraction(0.000025f); } public void runFraction(float fraction) { long seed = 0xDECAF; Random rng = new Random(seed); String[] datasets = new String[2]; int[][] responses = new int[datasets.length][]; datasets[0] = "smalldata/logreg/prostate.csv"; responses[0] = new int[]{1,2,8}; //CAPSULE (binomial), AGE (regression), GLEASON (multi-class) datasets[1] = "smalldata/iris/iris.csv"; responses[1] = new int[]{4}; //Iris-type (multi-class) HashSet<Long> checkSums = new LinkedHashSet<>(); int testcount = 0; int count = 0; for (int i = 0; i < datasets.length; ++i) { final String dataset = datasets[i]; NFSFileVec nfs = NFSFileVec.make(find_test_file(dataset)); Frame frame = ParseDataset.parse(Key.make(), nfs._key); NFSFileVec vnfs = NFSFileVec.make(find_test_file(dataset)); Frame vframe = ParseDataset.parse(Key.make(), vnfs._key); Scope.enter(); try { for (int resp : responses[i]) { boolean classification = !(i == 0 && resp == 2); if (classification && !frame.vec(resp).isEnum()) { Scope.track(frame.replace(resp, frame.vec(resp).toEnum())._key); DKV.put(frame._key, frame); } for (DeepLearningParameters.Loss loss : new DeepLearningParameters.Loss[]{ DeepLearningParameters.Loss.Automatic, DeepLearningParameters.Loss.CrossEntropy, DeepLearningParameters.Loss.Huber, DeepLearningParameters.Loss.Absolute, DeepLearningParameters.Loss.MeanSquare }) { if ( !classification && loss == DeepLearningParameters.Loss.CrossEntropy ) continue; for (boolean elastic_averaging : new boolean[]{ true, false, }) { for (boolean replicate : new boolean[]{ true, false, }) { for (DeepLearningParameters.Activation activation : new DeepLearningParameters.Activation[]{ DeepLearningParameters.Activation.Tanh, DeepLearningParameters.Activation.TanhWithDropout, DeepLearningParameters.Activation.Rectifier, DeepLearningParameters.Activation.RectifierWithDropout, DeepLearningParameters.Activation.Maxout, // DeepLearningParameters.Activation.MaxoutWithDropout }) { for (boolean load_balance : new boolean[]{ true, false, }) { for (boolean shuffle : new boolean[]{ true, false, }) { for (boolean balance_classes : new boolean[]{ true, false, }) { for (ClassSamplingMethod csm : new ClassSamplingMethod[]{ ClassSamplingMethod.Stratified, ClassSamplingMethod.Uniform }) { for (int scoretraining : new int[]{ 200, 20, 0, }) { for (int scorevalidation : new int[]{ 200, 20, 0, }) { for (int vf : new int[]{ 0, //no validation 1, //same as source -1, //different validation frame }) { for (int n_folds : new int[]{ 0, // 2, }) { if (n_folds != 0 && vf != 0) continue; for (boolean keep_cv_splits : new boolean[]{false}) { //otherwise it leaks for (boolean overwrite_with_best_model : new boolean[]{false, true}) { for (int train_samples_per_iteration : new int[]{ -2, //auto-tune -1, //N epochs per iteration 0, //1 epoch per iteration rng.nextInt(200), // <1 epoch per iteration 500, //>1 epoch per iteration }) { DeepLearningModel model1 = null, model2 = null; count++; if (fraction < rng.nextFloat()) continue; try { Scope.enter(); Log.info("**************************)"); Log.info("Starting test #" + count); Log.info("**************************)"); final double epochs = 7 + rng.nextDouble() + rng.nextInt(4); final int[] hidden = new int[]{1 + rng.nextInt(4), 1 + rng.nextInt(6)}; Frame valid = null; //no validation if (vf == 1) valid = frame; //use the same frame for validation else if (vf == -1) valid = vframe; //different validation frame (here: from the same file) long myseed = rng.nextLong(); // build the model, with all kinds of shuffling/rebalancing/sampling DeepLearningParameters p = new DeepLearningParameters(); { Log.info("Using seed: " + myseed); p._model_id = Key.make(Key.make().toString() + "first"); p._train = frame._key; p._response_column = frame._names[resp]; p._valid = valid==null ? null : valid._key; p._hidden = hidden; p._input_dropout_ratio = 0.1; p._hidden_dropout_ratios = null; p._activation = activation; // p.best_model_key = best_model_key; p._overwrite_with_best_model = overwrite_with_best_model; p._epochs = epochs; p._loss = loss; if (n_folds > 0) { H2O.unimpl(); // p._n_folds = n_folds; } p._keep_cross_validation_splits = keep_cv_splits; p._seed = myseed; p._train_samples_per_iteration = train_samples_per_iteration; p._force_load_balance = load_balance; p._replicate_training_data = replicate; p._shuffle_training_data = shuffle; p._score_training_samples = scoretraining; p._score_validation_samples = scorevalidation; p._classification_stop = -1; p._regression_stop = -1; p._balance_classes = classification && balance_classes; p._quiet_mode = false; p._score_validation_sampling = csm; p._elastic_averaging = elastic_averaging; // Log.info(new String(p.writeJSON(new AutoBuffer()).buf()).replace(",","\n")); DeepLearning dl = new DeepLearning(p); try { model1 = dl.trainModel().get(); checkSums.add(model1.checksum()); } catch (Throwable t) { throw t; } finally { dl.remove(); } Log.info("Trained for " + model1.epoch_counter + " epochs."); assert( ((p._train_samples_per_iteration <= 0 || p._train_samples_per_iteration >= frame.numRows()) && model1.epoch_counter > epochs) || Math.abs(model1.epoch_counter - epochs)/epochs < 0.20 ); // check that iteration is of the expected length - check via when first scoring happens if (p._train_samples_per_iteration == 0) { // no sampling - every node does its share of the full data if (!replicate) assert((double)model1._output._scoring_history.get(1,3) == 1); // sampling on each node - replicated data else assert((double)model1._output._scoring_history.get(1,3) > 0.7 && (double)model1._output._scoring_history.get(1,3) < 1.3) : ("First scoring at " + model1._output._scoring_history.get(1,3) + " epochs, should be closer to 1!" + "\n" + model1.toString()); } else if (p._train_samples_per_iteration == -1) { // no sampling - every node does its share of the full data if (!replicate) assert ((double) model1._output._scoring_history.get(1, 3) == 1); // every node passes over the full dataset else assert ((double) model1._output._scoring_history.get(1, 3) == H2O.CLOUD.size()); } if (n_folds != 0) // test HTML of cv models { throw H2O.unimpl(); // for (Key k : model1.get_params().xval_models) { // DeepLearningModel cv_model = UKV.get(k); // StringBuilder sb = new StringBuilder(); // cv_model.generateHTML("cv", sb); // cv_model.delete(); // } } } assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); // Do some more training via checkpoint restart // For n_folds, continue without n_folds (not yet implemented) - from now on, model2 will have n_folds=0... DeepLearningParameters p2 = new DeepLearningParameters(); Assert.assertTrue(model1.model_info().get_processed_total() >= frame.numRows() * epochs); { p2._model_id = Key.make(); p2._checkpoint = model1._key; // p._n_folds = 0; p2._train = frame._key; p2._activation = activation; p2._hidden = hidden; p2._valid = valid == null ? null : valid._key; p2._l1 = 1e-3; p2._l2 = 1e-3; p2._response_column = frame._names[resp]; p2._overwrite_with_best_model = overwrite_with_best_model; p2._epochs = epochs; p2._replicate_training_data = rng.nextBoolean(); p2._seed = myseed; p2._train_samples_per_iteration = train_samples_per_iteration; p2._balance_classes = classification && balance_classes; p2._elastic_averaging = rng.nextBoolean(); DeepLearning dl = new DeepLearning(p2); try { model2 = dl.trainModel().get(); } catch (Throwable t) { throw t; } finally { dl.remove(); } } assert(model1._parms != p2); assert(model1.model_info().get_params() != model2.model_info().get_params()); assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); Assert.assertTrue(model2.model_info().get_processed_total() >= frame.numRows() * 2 * epochs); assert(p != p2); assert(p != model1.model_info().get_params()); assert(p2 != model2.model_info().get_params()); if (p._loss == DeepLearningParameters.Loss.Automatic) { assert(p._loss == DeepLearningParameters.Loss.Automatic); assert(p2._loss == DeepLearningParameters.Loss.Automatic); assert(model1.model_info().get_params()._loss != DeepLearningParameters.Loss.Automatic); assert(model2.model_info().get_params()._loss != DeepLearningParameters.Loss.Automatic); } assert(p._hidden_dropout_ratios == null); assert(p2._hidden_dropout_ratios == null); if (p._activation.toString().contains("WithDropout")) { assert(model1.model_info().get_params()._hidden_dropout_ratios != null); assert(model2.model_info().get_params()._hidden_dropout_ratios != null); assert(Arrays.equals( model1.model_info().get_params()._hidden_dropout_ratios, model2.model_info().get_params()._hidden_dropout_ratios)); } assert(p._l1 == 0); assert(p._l2 == 0); assert(p2._l1 == 1e-3); assert(p2._l2 == 1e-3); assert(model1.model_info().get_params()._l1 == 0); assert(model1.model_info().get_params()._l2 == 0); assert(model2.model_info().get_params()._l1 == 1e-3); assert(model2.model_info().get_params()._l2 == 1e-3); if (valid == null) valid = frame; double threshold = 0; if (model2._output.isClassifier()) { Frame pred = null, pred2 = null; try { pred = model2.score(valid); // Build a POJO, validate same results Assert.assertTrue(model2.testJavaScoring(valid,pred,1e-6)); hex.ModelMetrics mm = hex.ModelMetrics.getFromDKV(model2, valid); double error; // binary if (model2._output.nclasses() == 2) { assert (resp == 1); threshold = mm.auc().defaultThreshold(); error = mm.auc().defaultErr(); // check that auc.cm() is the right CM Assert.assertEquals(new ConfusionMatrix(mm.auc().defaultCM(), model2._output._domains[resp]).err(), error, 1e-15); // check that calcError() is consistent as well (for CM=null, AUC!=null) Assert.assertEquals(mm.cm().err(), error, 1e-15); // check that the labels made with the default threshold are consistent with the CM that's reported by the AUC object ConfusionMatrix cm = buildCM(valid.vecs()[resp].toEnum(), pred.vecs()[0].toEnum()); Log.info("CM from pre-made labels:"); Log.info(cm.toASCII()); // Assert.assertEquals(cm.err(), error, 1e-4); //FIXME // manually make labels with AUC-given default threshold String ast = "(= ([ %pred2 \"null\" #0) (G ([ %pred2 \"null\" #2) #"+threshold+"))"; // confirm that orig CM was made with threshold 0.5 // put pred2 into DKV, and allow access pred2 = new Frame(Key.make("pred2"), pred.names(), pred.vecs()); pred2.delete_and_lock(null); pred2.unlock(null); Env ev = Exec.exec(ast); try { pred2 = ev.popAry(); // pop0 pops w/o lowering refs, let remove_and_unlock handle cleanup } finally { if (ev != null) ev.remove_and_unlock(); } cm = buildCM(valid.vecs()[resp].toEnum(), pred2.vecs()[0].toEnum()); Log.info("CM from self-made labels:"); Log.info(cm.toASCII()); Assert.assertEquals(cm.err(), error, 1e-4); //AUC-given F1-optimal threshold might not reproduce AUC-given CM-error identically, but should match up to 1% } } finally { if (pred != null) pred.delete(); if (pred2 != null) pred2.delete(); } } //classifier else { Frame pred = model2.score(valid); // Build a POJO, validate same results Assert.assertTrue(model2.testJavaScoring(frame,pred,1e-6)); pred.delete(); } Log.info("Parameters combination " + count + ": PASS"); testcount++; } catch (Throwable t) { t.printStackTrace(); throw new RuntimeException(t); } finally { if (model1 != null) { model1.delete_xval_models(); model1.delete(); } if (model2 != null) { model2.delete_xval_models(); model2.delete(); } Scope.exit(); } } } } } } } } } } } } } } } } } } finally { frame.delete(); vframe.delete(); Scope.exit(); } } Assert.assertTrue(checkSums.size() == testcount); Log.info("\n\n============================================="); Log.info("Tested " + testcount + " out of " + count + " parameter combinations."); Log.info("============================================="); } public static class Mid extends DeepLearningProstateTest { @Test @Ignore public void run() throws Exception { runFraction(0.01f); } //for nightly tests } public static class Short extends DeepLearningProstateTest { @Test @Ignore public void run() throws Exception { runFraction(0.001f); } } }
PUBDEV-1451: Add JUnits for cross-val for DL.
h2o-algos/src/test/java/hex/deeplearning/DeepLearningProstateTest.java
PUBDEV-1451: Add JUnits for cross-val for DL.
<ide><path>2o-algos/src/test/java/hex/deeplearning/DeepLearningProstateTest.java <ide> }) { <ide> for (int n_folds : new int[]{ <ide> 0, <del>// 2, <add> 3, <ide> }) { <ide> if (n_folds != 0 && vf != 0) continue; <ide> for (boolean keep_cv_splits : new boolean[]{false}) { //otherwise it leaks <ide> p._overwrite_with_best_model = overwrite_with_best_model; <ide> p._epochs = epochs; <ide> p._loss = loss; <del> if (n_folds > 0) { <del> H2O.unimpl(); <del> // p._n_folds = n_folds; <del> } <add> p._nfolds = n_folds; <ide> p._keep_cross_validation_splits = keep_cv_splits; <ide> p._seed = myseed; <ide> p._train_samples_per_iteration = train_samples_per_iteration; <ide> else assert ((double) model1._output._scoring_history.get(1, 3) == H2O.CLOUD.size()); <ide> } <ide> <del> if (n_folds != 0) <del> // test HTML of cv models <del> { <del> throw H2O.unimpl(); <del>// for (Key k : model1.get_params().xval_models) { <del>// DeepLearningModel cv_model = UKV.get(k); <del>// StringBuilder sb = new StringBuilder(); <del>// cv_model.generateHTML("cv", sb); <del>// cv_model.delete(); <del>// } <add> if (n_folds != 0) { <add> assert(model1._output._validation_metrics != null); <ide> } <ide> } <ide> <ide> { <ide> p2._model_id = Key.make(); <ide> p2._checkpoint = model1._key; <del> // p._n_folds = 0; <add> p2._nfolds = 0; <ide> p2._train = frame._key; <ide> p2._activation = activation; <ide> p2._hidden = hidden;
Java
bsd-3-clause
00e0fd97cfa0a571a323e8eec2e369263cb92049
0
NCIP/cagrid,NCIP/cagrid,NCIP/cagrid,NCIP/cagrid
package org.cagrid.grape; import java.awt.Component; import java.awt.Dimension; import java.awt.Insets; import java.awt.Point; import javax.swing.DefaultDesktopManager; import javax.swing.JComponent; import javax.swing.JDesktopPane; import javax.swing.JDialog; import javax.swing.JInternalFrame; import javax.swing.JScrollPane; import javax.swing.JViewport; import javax.swing.border.Border; import org.cagrid.grape.model.Dimensions; import org.cagrid.grape.model.RenderOptions; /** * @author <A href="mailto:[email protected]">Stephen Langella </A> * @author <A href="mailto:[email protected]">Scott Oster </A> * @author <A href="mailto:[email protected]">Shannon Hastings </A> * @created Oct 14, 2004 * @version $Id: ArgumentManagerTable.java,v 1.2 2004/10/15 16:35:16 langella * Exp $ */ public class MDIDesktopPane extends JDesktopPane { private static int FRAME_OFFSET = 20; private MDIDesktopManager manager; public MDIDesktopPane() { manager = new MDIDesktopManager(this); setDesktopManager(manager); setDragMode(JDesktopPane.OUTLINE_DRAG_MODE); } public void setBounds(int x, int y, int w, int h) { super.setBounds(x, y, w, h); checkDesktopSize(); } public Component add(JInternalFrame frame) { JInternalFrame[] array = getAllFrames(); Point p; int w; int h; Component retval = super.add(frame); checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } frame.setLocation(p.x, p.y); if (frame.isResizable()) { w = getWidth() - (getWidth() / 8); h = getHeight() - (getHeight() / 8); if (w < frame.getMinimumSize().getWidth()) w = (int) frame.getMinimumSize().getWidth(); if (h < frame.getMinimumSize().getHeight()) h = (int) frame.getMinimumSize().getHeight(); frame.setSize(w, h); } frame.show(); return retval; } public Component add(JInternalFrame frame, Dimensions dim, RenderOptions options) { JInternalFrame[] array = getAllFrames(); Point p; int w; int h; Component retval = super.add(frame); checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } frame.setLocation(p.x, p.y); if (frame.isResizable()) { w = getWidth() - (getWidth() / 8); h = getHeight() - (getHeight() / 8); if (w < frame.getMinimumSize().getWidth()) w = (int) frame.getMinimumSize().getWidth(); if (h < frame.getMinimumSize().getHeight()) h = (int) frame.getMinimumSize().getHeight(); frame.setSize(w, h); } if (dim != null) { frame.setSize(dim.getWidth(), dim.getHeight()); } setRenderOptions(frame, options);; frame.show(); return retval; } public void show(JDialog dialog, Dimensions dim, RenderOptions options) { JInternalFrame[] array = getAllFrames(); Point p; checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } dialog.setLocation(p.x, p.y); if (dim != null) { dialog.setSize(dim.getWidth(), dim.getHeight()); } setRenderOptions(dialog, options); dialog.setVisible(true); } private void setRenderOptions(JInternalFrame frame, RenderOptions options) { if (options != null) { if (options.isCentered()) { // Determine the new location of the window Dimension paneSize = this.getSize(); Dimension frameSize = frame.getSize(); frame.setLocation( (paneSize.width / 2) - (frameSize.width / 2), (paneSize.height / 2) - (frameSize.height / 2)); } if (options.isMaximized()) { try { frame.setMaximum(true); } catch (Exception e) { } } } } private void setRenderOptions(JDialog dialog, RenderOptions options) { if (options != null) { if (options.isCentered()) { // Determine the new location of the window Dimension paneSize = this.getSize(); Dimension dialogSize = dialog.getSize(); dialog.setLocation( (paneSize.width / 2) - (dialogSize.width / 2), (paneSize.height / 2) - (dialogSize.height / 2)); } if (options.isMaximized()) { try { dialog.setSize(this.getSize()); } catch (Exception e) { } } } } public void remove(Component c) { super.remove(c); checkDesktopSize(); } /** * Cascade all internal frames */ public void cascadeFrames() { int x = 0; int y = 0; JInternalFrame allFrames[] = getAllFrames(); manager.setNormalSize(); int frameHeight = (getBounds().height - 5) - allFrames.length * FRAME_OFFSET; int frameWidth = (getBounds().width - 5) - allFrames.length * FRAME_OFFSET; for (int i = allFrames.length - 1; i >= 0; i--) { allFrames[i].setSize(frameWidth, frameHeight); allFrames[i].setLocation(x, y); x = x + FRAME_OFFSET; y = y + FRAME_OFFSET; } } /** * Tile all internal frames */ public void tileFrames() { java.awt.Component allFrames[] = getAllFrames(); manager.setNormalSize(); int frameHeight = getBounds().height / allFrames.length; int y = 0; for (int i = 0; i < allFrames.length; i++) { allFrames[i].setSize(getBounds().width, frameHeight); allFrames[i].setLocation(0, y); y = y + frameHeight; } } /** * Sets all component size properties ( maximum, minimum, preferred) to the * given dimension. */ public void setAllSize(Dimension d) { setMinimumSize(d); setMaximumSize(d); setPreferredSize(d); } /** * Sets all component size properties ( maximum, minimum, preferred) to the * given width and height. */ public void setAllSize(int width, int height) { setAllSize(new Dimension(width, height)); } private void checkDesktopSize() { if (getParent() != null && isVisible()) manager.resizeDesktop(); } } /** * Private class used to replace the standard DesktopManager for JDesktopPane. * Used to provide scrollbar functionality. */ class MDIDesktopManager extends DefaultDesktopManager { private MDIDesktopPane desktop; public MDIDesktopManager(MDIDesktopPane desktop) { this.desktop = desktop; } public void endResizingFrame(JComponent f) { super.endResizingFrame(f); resizeDesktop(); } public void endDraggingFrame(JComponent f) { super.endDraggingFrame(f); resizeDesktop(); } public void setNormalSize() { JScrollPane scrollPane = getScrollPane(); int x = 0; int y = 0; if (scrollPane != null) { Dimension d = scrollPane.getVisibleRect().getSize(); if (scrollPane.getBorder() != null) { Insets scrollInsets = getScrollPaneInsets(); d.setSize(d.getWidth() - scrollInsets.left - scrollInsets.right, d.getHeight() - scrollInsets.top - scrollInsets.bottom); } d.setSize(d.getWidth() - 20, d.getHeight() - 20); desktop.setAllSize(x, y); scrollPane.invalidate(); scrollPane.validate(); } } private Insets getScrollPaneInsets() { JScrollPane scrollPane = getScrollPane(); if (scrollPane == null) { return new Insets(0, 0, 0, 0); } Border border = getScrollPane().getBorder(); if (border == null) { return new Insets(0, 0, 0, 0); } return border.getBorderInsets(scrollPane); } private JScrollPane getScrollPane() { if (desktop.getParent() instanceof JViewport) { JViewport viewPort = (JViewport) desktop.getParent(); if (viewPort.getParent() instanceof JScrollPane) return (JScrollPane) viewPort.getParent(); } return null; } protected void resizeDesktop() { int x = 0; int y = 0; JScrollPane scrollPane = getScrollPane(); if (scrollPane != null) { JInternalFrame allFrames[] = desktop.getAllFrames(); for (int i = 0; i < allFrames.length; i++) { if (allFrames[i].getX() + allFrames[i].getWidth() > x) { x = allFrames[i].getX() + allFrames[i].getWidth(); } if (allFrames[i].getY() + allFrames[i].getHeight() > y) { y = allFrames[i].getY() + allFrames[i].getHeight(); } } Dimension d = scrollPane.getVisibleRect().getSize(); if (scrollPane.getBorder() != null) { Insets scrollInsets = getScrollPaneInsets(); d.setSize(d.getWidth() - scrollInsets.left - scrollInsets.right, d.getHeight() - scrollInsets.top - scrollInsets.bottom); } if (x <= d.getWidth()) x = ((int) d.getWidth()) - 20; if (y <= d.getHeight()) y = ((int) d.getHeight()) - 20; desktop.setAllSize(x, y); scrollPane.invalidate(); scrollPane.validate(); } } }
cagrid-1-0/caGrid/projects/grape/src/org/cagrid/grape/MDIDesktopPane.java
package org.cagrid.grape; import java.awt.Component; import java.awt.Dimension; import java.awt.Insets; import java.awt.Point; import javax.swing.DefaultDesktopManager; import javax.swing.JComponent; import javax.swing.JDesktopPane; import javax.swing.JDialog; import javax.swing.JInternalFrame; import javax.swing.JScrollPane; import javax.swing.JViewport; import javax.swing.border.Border; import org.cagrid.grape.model.Dimensions; import org.cagrid.grape.model.RenderOptions; /** * @author <A href="mailto:[email protected]">Stephen Langella </A> * @author <A href="mailto:[email protected]">Scott Oster </A> * @author <A href="mailto:[email protected]">Shannon Hastings </A> * @created Oct 14, 2004 * @version $Id: ArgumentManagerTable.java,v 1.2 2004/10/15 16:35:16 langella * Exp $ */ public class MDIDesktopPane extends JDesktopPane { private static int FRAME_OFFSET = 20; private MDIDesktopManager manager; public MDIDesktopPane() { manager = new MDIDesktopManager(this); setDesktopManager(manager); setDragMode(JDesktopPane.OUTLINE_DRAG_MODE); } public void setBounds(int x, int y, int w, int h) { super.setBounds(x, y, w, h); checkDesktopSize(); } public Component add(JInternalFrame frame) { JInternalFrame[] array = getAllFrames(); Point p; int w; int h; Component retval = super.add(frame); checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } frame.setLocation(p.x, p.y); if (frame.isResizable()) { w = getWidth() - (getWidth() / 8); h = getHeight() - (getHeight() / 8); if (w < frame.getMinimumSize().getWidth()) w = (int) frame.getMinimumSize().getWidth(); if (h < frame.getMinimumSize().getHeight()) h = (int) frame.getMinimumSize().getHeight(); frame.setSize(w, h); } frame.show(); return retval; } public Component add(JInternalFrame frame, Dimensions dim, RenderOptions options) { JInternalFrame[] array = getAllFrames(); Point p; Component retval = super.add(frame); checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } frame.setLocation(p.x, p.y); if (dim != null) { frame.setSize(dim.getWidth(), dim.getHeight()); } setRenderOptions(frame, options); frame.show(); return retval; } public void show(JDialog dialog, Dimensions dim, RenderOptions options) { JInternalFrame[] array = getAllFrames(); Point p; checkDesktopSize(); if (array.length > 0) { p = array[0].getLocation(); p.x = p.x + FRAME_OFFSET; p.y = p.y + FRAME_OFFSET; } else { p = new Point(0, 0); } dialog.setLocation(p.x, p.y); if (dim != null) { dialog.setSize(dim.getWidth(), dim.getHeight()); } setRenderOptions(dialog, options); dialog.setVisible(true); } private void setRenderOptions(JInternalFrame frame, RenderOptions options) { if (options != null) { if (options.isCentered()) { // Determine the new location of the window Dimension paneSize = this.getSize(); Dimension frameSize = frame.getSize(); frame.setLocation( (paneSize.width / 2) - (frameSize.width / 2), (paneSize.height / 2) - (frameSize.height / 2)); } if (options.isMaximized()) { try { frame.setMaximum(true); } catch (Exception e) { } } } } private void setRenderOptions(JDialog dialog, RenderOptions options) { if (options != null) { if (options.isCentered()) { // Determine the new location of the window Dimension paneSize = this.getSize(); Dimension dialogSize = dialog.getSize(); dialog.setLocation( (paneSize.width / 2) - (dialogSize.width / 2), (paneSize.height / 2) - (dialogSize.height / 2)); } if (options.isMaximized()) { try { dialog.setSize(this.getSize()); } catch (Exception e) { } } } } public void remove(Component c) { super.remove(c); checkDesktopSize(); } /** * Cascade all internal frames */ public void cascadeFrames() { int x = 0; int y = 0; JInternalFrame allFrames[] = getAllFrames(); manager.setNormalSize(); int frameHeight = (getBounds().height - 5) - allFrames.length * FRAME_OFFSET; int frameWidth = (getBounds().width - 5) - allFrames.length * FRAME_OFFSET; for (int i = allFrames.length - 1; i >= 0; i--) { allFrames[i].setSize(frameWidth, frameHeight); allFrames[i].setLocation(x, y); x = x + FRAME_OFFSET; y = y + FRAME_OFFSET; } } /** * Tile all internal frames */ public void tileFrames() { java.awt.Component allFrames[] = getAllFrames(); manager.setNormalSize(); int frameHeight = getBounds().height / allFrames.length; int y = 0; for (int i = 0; i < allFrames.length; i++) { allFrames[i].setSize(getBounds().width, frameHeight); allFrames[i].setLocation(0, y); y = y + frameHeight; } } /** * Sets all component size properties ( maximum, minimum, preferred) to the * given dimension. */ public void setAllSize(Dimension d) { setMinimumSize(d); setMaximumSize(d); setPreferredSize(d); } /** * Sets all component size properties ( maximum, minimum, preferred) to the * given width and height. */ public void setAllSize(int width, int height) { setAllSize(new Dimension(width, height)); } private void checkDesktopSize() { if (getParent() != null && isVisible()) manager.resizeDesktop(); } } /** * Private class used to replace the standard DesktopManager for JDesktopPane. * Used to provide scrollbar functionality. */ class MDIDesktopManager extends DefaultDesktopManager { private MDIDesktopPane desktop; public MDIDesktopManager(MDIDesktopPane desktop) { this.desktop = desktop; } public void endResizingFrame(JComponent f) { super.endResizingFrame(f); resizeDesktop(); } public void endDraggingFrame(JComponent f) { super.endDraggingFrame(f); resizeDesktop(); } public void setNormalSize() { JScrollPane scrollPane = getScrollPane(); int x = 0; int y = 0; if (scrollPane != null) { Dimension d = scrollPane.getVisibleRect().getSize(); if (scrollPane.getBorder() != null) { Insets scrollInsets = getScrollPaneInsets(); d.setSize(d.getWidth() - scrollInsets.left - scrollInsets.right, d.getHeight() - scrollInsets.top - scrollInsets.bottom); } d.setSize(d.getWidth() - 20, d.getHeight() - 20); desktop.setAllSize(x, y); scrollPane.invalidate(); scrollPane.validate(); } } private Insets getScrollPaneInsets() { JScrollPane scrollPane = getScrollPane(); if (scrollPane == null) { return new Insets(0, 0, 0, 0); } Border border = getScrollPane().getBorder(); if (border == null) { return new Insets(0, 0, 0, 0); } return border.getBorderInsets(scrollPane); } private JScrollPane getScrollPane() { if (desktop.getParent() instanceof JViewport) { JViewport viewPort = (JViewport) desktop.getParent(); if (viewPort.getParent() instanceof JScrollPane) return (JScrollPane) viewPort.getParent(); } return null; } protected void resizeDesktop() { int x = 0; int y = 0; JScrollPane scrollPane = getScrollPane(); if (scrollPane != null) { JInternalFrame allFrames[] = desktop.getAllFrames(); for (int i = 0; i < allFrames.length; i++) { if (allFrames[i].getX() + allFrames[i].getWidth() > x) { x = allFrames[i].getX() + allFrames[i].getWidth(); } if (allFrames[i].getY() + allFrames[i].getHeight() > y) { y = allFrames[i].getY() + allFrames[i].getHeight(); } } Dimension d = scrollPane.getVisibleRect().getSize(); if (scrollPane.getBorder() != null) { Insets scrollInsets = getScrollPaneInsets(); d.setSize(d.getWidth() - scrollInsets.left - scrollInsets.right, d.getHeight() - scrollInsets.top - scrollInsets.bottom); } if (x <= d.getWidth()) x = ((int) d.getWidth()) - 20; if (y <= d.getHeight()) y = ((int) d.getHeight()) - 20; desktop.setAllSize(x, y); scrollPane.invalidate(); scrollPane.validate(); } } }
*** empty log message ***
cagrid-1-0/caGrid/projects/grape/src/org/cagrid/grape/MDIDesktopPane.java
*** empty log message ***
<ide><path>agrid-1-0/caGrid/projects/grape/src/org/cagrid/grape/MDIDesktopPane.java <ide> public Component add(JInternalFrame frame, Dimensions dim, RenderOptions options) { <ide> JInternalFrame[] array = getAllFrames(); <ide> Point p; <add> int w; <add> int h; <ide> Component retval = super.add(frame); <ide> checkDesktopSize(); <ide> if (array.length > 0) { <ide> p = new Point(0, 0); <ide> } <ide> frame.setLocation(p.x, p.y); <add> if (frame.isResizable()) { <add> w = getWidth() - (getWidth() / 8); <add> h = getHeight() - (getHeight() / 8); <add> if (w < frame.getMinimumSize().getWidth()) <add> w = (int) frame.getMinimumSize().getWidth(); <add> if (h < frame.getMinimumSize().getHeight()) <add> h = (int) frame.getMinimumSize().getHeight(); <add> frame.setSize(w, h); <add> } <ide> if (dim != null) { <ide> frame.setSize(dim.getWidth(), dim.getHeight()); <ide> } <del> setRenderOptions(frame, options); <add> setRenderOptions(frame, options);; <ide> frame.show(); <ide> return retval; <ide> }
Java
apache-2.0
error: pathspec 'gemma-core/src/main/resources/ubic/gemma/javaspaces/gigaspaces/LoggingEntry.java' did not match any file(s) known to git
33090a90edd78324ea5572f0e2d0956f3e41cea5
1
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project * * Copyright (c) 2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.javaspaces.gigaspaces; import net.jini.core.entry.Entry; /** * @author keshav * @version $Id$ */ public class LoggingEntry implements Entry { /** * */ private static final long serialVersionUID = 1L; String message = null; /** * * */ public LoggingEntry() { } /** * @param message */ public LoggingEntry( String message ) { this.message = message; } }
gemma-core/src/main/resources/ubic/gemma/javaspaces/gigaspaces/LoggingEntry.java
a logging entry "template"
gemma-core/src/main/resources/ubic/gemma/javaspaces/gigaspaces/LoggingEntry.java
a logging entry "template"
<ide><path>emma-core/src/main/resources/ubic/gemma/javaspaces/gigaspaces/LoggingEntry.java <add>/* <add> * The Gemma project <add> * <add> * Copyright (c) 2007 University of British Columbia <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> * <add> */ <add>package ubic.gemma.javaspaces.gigaspaces; <add> <add>import net.jini.core.entry.Entry; <add> <add>/** <add> * @author keshav <add> * @version $Id$ <add> */ <add>public class LoggingEntry implements Entry { <add> <add> /** <add> * <add> */ <add> private static final long serialVersionUID = 1L; <add> String message = null; <add> <add> /** <add> * <add> * <add> */ <add> public LoggingEntry() { <add> <add> } <add> <add> /** <add> * @param message <add> */ <add> public LoggingEntry( String message ) { <add> this.message = message; <add> } <add> <add>}
Java
apache-2.0
de9aae5d433d64f8c3c36cb58874bb8a849033c1
0
etirelli/jbpm-form-modeler,mbiarnes/jbpm-form-modeler,mbiarnes/jbpm-form-modeler,porcelli-forks/jbpm-form-modeler,droolsjbpm/jbpm-form-modeler,mbiarnes/jbpm-form-modeler,droolsjbpm/jbpm-form-modeler,baldimir/jbpm-form-modeler,baldimir/jbpm-form-modeler,porcelli-forks/jbpm-form-modeler,etirelli/jbpm-form-modeler,porcelli-forks/jbpm-form-modeler,droolsjbpm/jbpm-form-modeler,baldimir/jbpm-form-modeler,etirelli/jbpm-form-modeler
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.backend.server.impl; import java.net.URI; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Produces; import javax.inject.Inject; import javax.inject.Named; import org.kie.commons.io.IOService; import org.kie.commons.io.impl.IOServiceDotFileImpl; import org.kie.commons.java.nio.file.FileSystem; import org.kie.commons.java.nio.file.FileSystemAlreadyExistsException; import org.uberfire.backend.repositories.Repository; import org.uberfire.backend.repositories.RepositoryService; import org.kie.commons.services.cdi.Startup; @ApplicationScoped @Startup public class AppSetup { //private static final String REPO_PLAYGROUND = "jbpm-playground"; //private static final String ORIGIN_URL = "https://github.com/guvnorngtestuser1/jbpm-console-ng-playground.git"; private static final String REPO_PLAYGROUND = "uf-playground"; private static final String ORIGIN_URL = "https://github.com/wmedvede/guvnorng-playground.git"; private final IOService ioService = new IOServiceDotFileImpl(); @Produces @Named("ioStrategy") public IOService ioService() { return ioService; } @Inject private RepositoryService repositoryService; @PostConstruct public void onStartup() { Repository repository = repositoryService.getRepository(REPO_PLAYGROUND); if(repository == null) { /* final String userName = "guvnorngtestuser1"; final String password = "test1234"; */ final String userName = "wmedvede"; final String password = "med0077"; repositoryService.cloneRepository("git", REPO_PLAYGROUND, ORIGIN_URL, userName, password); repository = repositoryService.getRepository(REPO_PLAYGROUND); } try { ioService.newFileSystem(URI.create(repository.getUri()), repository.getEnvironment()); } catch (FileSystemAlreadyExistsException e) { ioService.getFileSystem(URI.create(repository.getUri())); } } }
jbpm-form-modeler-showcase/src/main/java/org/uberfire/backend/server/impl/AppSetup.java
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.backend.server.impl; import java.net.URI; import javax.annotation.PostConstruct; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.inject.Produces; import javax.inject.Inject; import javax.inject.Named; import org.kie.commons.io.IOService; import org.kie.commons.io.impl.IOServiceDotFileImpl; import org.kie.commons.java.nio.file.FileSystem; import org.kie.commons.java.nio.file.FileSystemAlreadyExistsException; import org.uberfire.backend.repositories.Repository; import org.uberfire.backend.repositories.RepositoryService; import org.kie.commons.services.cdi.Startup; @ApplicationScoped @Startup public class AppSetup { //private static final String REPO_PLAYGROUND = "jbpm-playground"; //private static final String ORIGIN_URL = "https://github.com/guvnorngtestuser1/jbpm-console-ng-playground.git"; private static final String REPO_PLAYGROUND = "uf-playground"; private static final String ORIGIN_URL = "https://github.com/nmiraschTest/guvnorng-playground.git"; private final IOService ioService = new IOServiceDotFileImpl(); @Produces @Named("ioStrategy") public IOService ioService() { return ioService; } @Inject private RepositoryService repositoryService; @PostConstruct public void onStartup() { Repository repository = repositoryService.getRepository(REPO_PLAYGROUND); if(repository == null) { /* final String userName = "guvnorngtestuser1"; final String password = "test1234"; */ final String userName = "nmiraschTest"; final String password = "test1234"; repositoryService.cloneRepository("git", REPO_PLAYGROUND, ORIGIN_URL, userName, password); repository = repositoryService.getRepository(REPO_PLAYGROUND); } try { ioService.newFileSystem(URI.create(repository.getUri()), repository.getEnvironment()); } catch (FileSystemAlreadyExistsException e) { ioService.getFileSystem(URI.create(repository.getUri())); } } }
changed to a wellformed guvnor_repo
jbpm-form-modeler-showcase/src/main/java/org/uberfire/backend/server/impl/AppSetup.java
changed to a wellformed guvnor_repo
<ide><path>bpm-form-modeler-showcase/src/main/java/org/uberfire/backend/server/impl/AppSetup.java <ide> //private static final String REPO_PLAYGROUND = "jbpm-playground"; <ide> //private static final String ORIGIN_URL = "https://github.com/guvnorngtestuser1/jbpm-console-ng-playground.git"; <ide> private static final String REPO_PLAYGROUND = "uf-playground"; <del> private static final String ORIGIN_URL = "https://github.com/nmiraschTest/guvnorng-playground.git"; <add> private static final String ORIGIN_URL = "https://github.com/wmedvede/guvnorng-playground.git"; <ide> <ide> <ide> private final IOService ioService = new IOServiceDotFileImpl(); <ide> final String password = "test1234"; <ide> */ <ide> <del> final String userName = "nmiraschTest"; <del> final String password = "test1234"; <add> final String userName = "wmedvede"; <add> final String password = "med0077"; <ide> <ide> repositoryService.cloneRepository("git", REPO_PLAYGROUND, ORIGIN_URL, userName, password); <ide> repository = repositoryService.getRepository(REPO_PLAYGROUND);
Java
agpl-3.0
820f2f52a84b2077138e9d287803ff2a77089fa4
0
fviale/programming,PaulKh/scale-proactive,lpellegr/programming,mnip91/programming-multiactivities,acontes/scheduling,acontes/scheduling,lpellegr/programming,fviale/programming,ow2-proactive/programming,acontes/programming,paraita/programming,mnip91/programming-multiactivities,acontes/scheduling,paraita/programming,PaulKh/scale-proactive,jrochas/scale-proactive,paraita/programming,paraita/programming,lpellegr/programming,acontes/programming,ow2-proactive/programming,acontes/scheduling,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,jrochas/scale-proactive,acontes/programming,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,lpellegr/programming,jrochas/scale-proactive,fviale/programming,fviale/programming,PaulKh/scale-proactive,fviale/programming,lpellegr/programming,mnip91/proactive-component-monitoring,paraita/programming,jrochas/scale-proactive,acontes/scheduling,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,mnip91/proactive-component-monitoring,jrochas/scale-proactive,ow2-proactive/programming,acontes/programming,mnip91/programming-multiactivities,PaulKh/scale-proactive,acontes/programming,ow2-proactive/programming,ow2-proactive/programming,lpellegr/programming,fviale/programming,acontes/scheduling,jrochas/scale-proactive,paraita/programming,acontes/programming,acontes/scheduling,acontes/programming,mnip91/programming-multiactivities,jrochas/scale-proactive,PaulKh/scale-proactive,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,ow2-proactive/programming
package org.objectweb.proactive.ic2d.gui.jobmonitor; import java.awt.Component; import javax.swing.Icon; import javax.swing.JTree; import javax.swing.tree.DefaultTreeCellRenderer; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.DataTreeNode; public class JobMonitorTreeCellRenderer extends DefaultTreeCellRenderer implements JobMonitorConstants { public Component getTreeCellRendererComponent (JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row,boolean hasFocus) { super.getTreeCellRendererComponent (tree, value, sel, expanded, leaf, row, hasFocus); DataTreeNode currentNode = (DataTreeNode) value; int key = currentNode.getKey(); Icon icon = Icons.getIconForKey(key); if (icon != null) setIcon (icon); return this; } }
src/org/objectweb/proactive/ic2d/gui/jobmonitor/JobMonitorTreeCellRenderer.java
package org.objectweb.proactive.ic2d.gui.jobmonitor; import java.awt.Component; import javax.swing.*; import javax.swing.tree.DefaultTreeCellRenderer; import org.objectweb.proactive.ic2d.gui.jobmonitor.data.DataTreeNode; public class JobMonitorTreeCellRenderer extends DefaultTreeCellRenderer implements JobMonitorConstants { private static final String IMAGES_DIRECTORY = "images/"; private static final String NODE_ICON_GIF = "node_icon.gif"; private static final String VN_ICON_GIF = "vn_icon.gif"; private static final String JVM_ICON_GIF = "jvm_icon.gif"; private static final String AO_ICON_GIF = "ao_icon.gif"; private static final String JOB_ICON_GIF = "job_icon.gif"; private static final String HOST_ICON_GIF = "host_icon.gif"; private static final String NODE_ICON = IMAGES_DIRECTORY + NODE_ICON_GIF; private static final String VN_ICON = IMAGES_DIRECTORY + VN_ICON_GIF; private static final String JVM_ICON = IMAGES_DIRECTORY + JVM_ICON_GIF; private static final String AO_ICON = IMAGES_DIRECTORY + AO_ICON_GIF; private static final String JOB_ICON = IMAGES_DIRECTORY + JOB_ICON_GIF; private static final String HOST_ICON = IMAGES_DIRECTORY + HOST_ICON_GIF; private static Icon job, host, jvm, vn, node, ao; public JobMonitorTreeCellRenderer () { if (job == null) { host = createImageIcon (HOST_ICON); job = createImageIcon (JOB_ICON); ao = createImageIcon (AO_ICON); jvm = createImageIcon (JVM_ICON); vn = createImageIcon (VN_ICON); node = createImageIcon (NODE_ICON); } } public ImageIcon createImageIcon (String path) { java.net.URL imgURL = JobMonitorPanel.class.getResource (path); if (imgURL != null) return new ImageIcon (imgURL); else { // System.err.println ("Couldn't find file: " + path); return null; } } public Component getTreeCellRendererComponent (JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row,boolean hasFocus) { super.getTreeCellRendererComponent (tree, value, sel, expanded, leaf, row, hasFocus); DataTreeNode currentNode = (DataTreeNode) value; int key = currentNode.getKey(); Icon icon = Icons.getIconForKey(key); if (icon != null) setIcon (icon); return this; } }
Cleanup now that we have Icons.java git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@1257 28e8926c-6b08-0410-baaa-805c5e19b8d6
src/org/objectweb/proactive/ic2d/gui/jobmonitor/JobMonitorTreeCellRenderer.java
Cleanup now that we have Icons.java
<ide><path>rc/org/objectweb/proactive/ic2d/gui/jobmonitor/JobMonitorTreeCellRenderer.java <ide> <ide> import java.awt.Component; <ide> <del>import javax.swing.*; <add>import javax.swing.Icon; <add>import javax.swing.JTree; <ide> import javax.swing.tree.DefaultTreeCellRenderer; <ide> <ide> import org.objectweb.proactive.ic2d.gui.jobmonitor.data.DataTreeNode; <ide> <ide> public class JobMonitorTreeCellRenderer extends DefaultTreeCellRenderer implements JobMonitorConstants <ide> { <del> private static final String IMAGES_DIRECTORY = "images/"; <del> <del> private static final String NODE_ICON_GIF = "node_icon.gif"; <del> private static final String VN_ICON_GIF = "vn_icon.gif"; <del> private static final String JVM_ICON_GIF = "jvm_icon.gif"; <del> private static final String AO_ICON_GIF = "ao_icon.gif"; <del> private static final String JOB_ICON_GIF = "job_icon.gif"; <del> private static final String HOST_ICON_GIF = "host_icon.gif"; <del> <del> private static final String NODE_ICON = IMAGES_DIRECTORY + NODE_ICON_GIF; <del> private static final String VN_ICON = IMAGES_DIRECTORY + VN_ICON_GIF; <del> private static final String JVM_ICON = IMAGES_DIRECTORY + JVM_ICON_GIF; <del> private static final String AO_ICON = IMAGES_DIRECTORY + AO_ICON_GIF; <del> private static final String JOB_ICON = IMAGES_DIRECTORY + JOB_ICON_GIF; <del> private static final String HOST_ICON = IMAGES_DIRECTORY + HOST_ICON_GIF; <del> <del> private static Icon job, host, jvm, vn, node, ao; <del> <del> public JobMonitorTreeCellRenderer () <del> { <del> if (job == null) <del> { <del> host = createImageIcon (HOST_ICON); <del> job = createImageIcon (JOB_ICON); <del> ao = createImageIcon (AO_ICON); <del> jvm = createImageIcon (JVM_ICON); <del> vn = createImageIcon (VN_ICON); <del> node = createImageIcon (NODE_ICON); <del> } <del> } <del> <del> public ImageIcon createImageIcon (String path) <del> { <del> java.net.URL imgURL = JobMonitorPanel.class.getResource (path); <del> if (imgURL != null) <del> return new ImageIcon (imgURL); <del> else <del> { <del>// System.err.println ("Couldn't find file: " + path); <del> return null; <del> } <del> } <del> <ide> public Component getTreeCellRendererComponent (JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row,boolean hasFocus) <ide> { <ide> super.getTreeCellRendererComponent (tree, value, sel, expanded, leaf, row, hasFocus);
Java
apache-2.0
6d1b4083f9460d9ee33a48faf9381261fbe347ee
0
thymeleaf/thymeleaf,thymeleaf/thymeleaf
/* * ============================================================================= * * Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.standard.processor; import org.thymeleaf.templatemode.TemplateMode; /** * * @author Daniel Fern&aacute;ndez * * @since 3.0.0 * */ public final class StandardDOMEventAttributeTagProcessor extends AbstractStandardAttributeModifierTagProcessor { public static final int PRECEDENCE = 1000; // These attributes should be removed even if their value evaluates to null or empty string. // The reason why we don't let all these attributes to be processed by the default processor is that some other attribute // processors executing afterwards (e.g. th:field) might need attribute values already processed by these. public static final String[] ATTR_NAMES = new String[] { "onabort", "onafterprint", "onbeforeprint", "onbeforeunload", "onblur", "oncanplay", "oncanplaythrough", "onchange", "onclick", "oncontextmenu", "ondblclick", "ondrag", "ondragend", "ondragenter", "ondragleave", "ondragover", "ondragstart", "ondrop", "ondurationchange", "onemptied", "onended", "onerror", "onfocus", "onformchange", "onforminput", "onhashchange", "oninput", "oninvalid", "onkeydown", "onkeypress", "onkeyup", "onload", "onloadeddata", "onloadedmetadata", "onloadstart", "onmessage", "onmousedown", "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onmousewheel", "onoffline", "ononline", "onpause", "onplay", "onplaying", "onpopstate", "onprogress", "onratechange", "onreadystatechange", "onredo", "onreset", "onresize", "onscroll", "onseeked", "onseeking", "onselect", "onshow", "onstalled", "onstorage", "onsubmit", "onsuspend", "ontimeupdate", "onundo", "onunload", "onvolumechange", "onwaiting" }; public StandardDOMEventAttributeTagProcessor(final String dialectPrefix, final String attrName) { super(TemplateMode.HTML, dialectPrefix, attrName, PRECEDENCE, true, true); } }
src/main/java/org/thymeleaf/standard/processor/StandardDOMEventAttributeTagProcessor.java
/* * ============================================================================= * * Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.standard.processor; import org.thymeleaf.templatemode.TemplateMode; /** * * @author Daniel Fern&aacute;ndez * * @since 3.0.0 * */ public final class StandardDOMEventAttributeTagProcessor extends AbstractStandardAttributeModifierTagProcessor { public static final int PRECEDENCE = 1000; // These attributes should be removed even if their value evaluates to null or empty string. // The reason why we don't let all these attributes to be processed by the default processor is that some other attribute // processors executing afterwards (e.g. th:field) might need attribute values already processed by these. public static final String[] ATTR_NAMES = new String[] { "onabort", "onafterprint", "onbeforeprint", "onbeforeunload", "onblur", "oncanplay", "oncanplaythrough", "onchange", "onclick", "oncontextmenu", "ondblclick", "ondrag", "ondragend", "ondragenter", "ondragleave", "ondragover", "ondragstart", "ondrop", "ondurationchange", "onemptied", "onended", "onerror", "onfocus", "onformchange", "onforminput", "onhashchange", "oninput", "oninvalid", "onkeydown", "onkeypress", "onkeyup", "onload", "onloadeddata", "onloadedmetadata", "onloadstart", "onmessage", "onmousedown", "onmousemove", "onmouseout", "onmouseover", "onmouseup", "onmousewheel", "onoffline", "ononline", "onpause", "onplay", "onplaying", "onpopstate", "onprogress", "onratechange", "onreadystatechange", "onredo", "onreset", "onresize", "onscroll", "onseeked", "onseeking", "onselect", "onshow", "onstalled", "onstorage", "onsubmit", "onsuspend", "ontimeupdate", "onundo", "onunload", "onvolumechange", "onwaiting" }; public StandardDOMEventAttributeTagProcessor(final String dialectPrefix, final String attrName) { super(TemplateMode.HTML, dialectPrefix, attrName, PRECEDENCE, true); } }
For #649 - Set the th:on* processors to use restricted expression evaluation mode
src/main/java/org/thymeleaf/standard/processor/StandardDOMEventAttributeTagProcessor.java
For #649 - Set the th:on* processors to use restricted expression evaluation mode
<ide><path>rc/main/java/org/thymeleaf/standard/processor/StandardDOMEventAttributeTagProcessor.java <ide> <ide> <ide> public StandardDOMEventAttributeTagProcessor(final String dialectPrefix, final String attrName) { <del> super(TemplateMode.HTML, dialectPrefix, attrName, PRECEDENCE, true); <add> super(TemplateMode.HTML, dialectPrefix, attrName, PRECEDENCE, true, true); <ide> } <ide> <ide>
Java
apache-2.0
error: pathspec 'src/main/java/org/openmhealth/dsu/controller/ApiController.java' did not match any file(s) known to git
c1336b4a96b1408884cb9339346b640066ff3da7
1
smalldatalab/omh-dsu,openmhealth/omh-dsu-ri,openmhealth/omh-dsu-ri,smalldatalab/omh-dsu,smalldatalab/omh-dsu,smalldatalab/omh-dsu
/* * Copyright 2014 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.dsu.controller; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; /** * A specialisation of the {@link Controller} annotation that controls the version number of the DSU API. * @author Emerson Farrugia */ @Controller @RequestMapping("/v2") public @interface ApiController { }
src/main/java/org/openmhealth/dsu/controller/ApiController.java
Add @Controller specialisation to control the API version.
src/main/java/org/openmhealth/dsu/controller/ApiController.java
Add @Controller specialisation to control the API version.
<ide><path>rc/main/java/org/openmhealth/dsu/controller/ApiController.java <add>/* <add> * Copyright 2014 Open mHealth <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.openmhealth.dsu.controller; <add> <add>import org.springframework.stereotype.Controller; <add>import org.springframework.web.bind.annotation.RequestMapping; <add> <add> <add>/** <add> * A specialisation of the {@link Controller} annotation that controls the version number of the DSU API. <add> * @author Emerson Farrugia <add> */ <add>@Controller <add>@RequestMapping("/v2") <add>public @interface ApiController { <add> <add>}
JavaScript
mit
522a1d303f3546daf313cb25754f62dde20cbeb8
0
uditalias/babel-plugin-remove-imports
/* * Babel plugin to remove unwanted `import` declarations when building * packages with babel transforms. * * PLEASE USE WITH CAUTION and check your RegExp expressions carefully. */ module.exports = function (regex) { return function(babel) { return new babel.Transformer('babel-plugin-remove-imports', { ImportDeclaration: function (node, parent) { //node.source.value contains the import declaration var text = node.source.value; //convert regex to Array of regexp if we have a single item if(!(regex instanceof Array)) { regex = [regex]; } //iterate over all regexps to find a truthy one, //when found, remove the `import` node from the code for(var i = 0, len = regex.length; i < len; i++) { if(isRegexExpressionTruthy(text, regex[i])) { this.dangerouslyRemove(); break; } } } }); } }; //check whether the text (import declaration) matches the given regex function isRegexExpressionTruthy(text, regex) { return (regex instanceof RegExp) ? regex.test(text) : false; }
index.js
/* * Babel plugin to remove unwanted `import` declarations when building * packages with babel transforms. * * PLEASE USE WITH COUTION and check your RegExp expressions carefully. */ module.exports = function (regex) { return function(babel) { return new babel.Transformer('babel-plugin-remove-imports', { ImportDeclaration: function (node, parent) { //node.source.value contains the import declaration var text = node.source.value; //convert regex to Array of regexp if we have a single item if(!(regex instanceof Array)) { regex = [regex]; } //iterate over all regexps to find a truthy one, //when found, remove the `import` node from the code for(var i = 0, len = regex.length; i < len; i++) { if(isRegexExpressionTruthy(text, regex[i])) { this.dangerouslyRemove(); break; } } } }); } }; //check whether the text (import declaration) is matches the given regex function isRegexExpressionTruthy(text, regex) { return (regex instanceof RegExp) ? regex.test(text) : false; }
Typo fixes
index.js
Typo fixes
<ide><path>ndex.js <ide> * Babel plugin to remove unwanted `import` declarations when building <ide> * packages with babel transforms. <ide> * <del> * PLEASE USE WITH COUTION and check your RegExp expressions carefully. <add> * PLEASE USE WITH CAUTION and check your RegExp expressions carefully. <ide> */ <ide> <ide> module.exports = function (regex) { <ide> } <ide> }; <ide> <del>//check whether the text (import declaration) is matches the given regex <add>//check whether the text (import declaration) matches the given regex <ide> function isRegexExpressionTruthy(text, regex) { <ide> return (regex instanceof RegExp) ? regex.test(text) : false; <ide> }
JavaScript
bsd-3-clause
6f577a792ed74a16c1c55c3c0059411b447eafc9
0
peterborkuti/alloy-ui,zxdgoal/alloy-ui,zsagia/alloy-ui,ipeychev/alloy-ui,mthadley/alloy-ui,fernandosouza/alloy-ui,pei-jung/alloy-ui,inacionery/alloy-ui,ampratt/alloy-ui,henvic/alloy-ui,pei-jung/alloy-ui,fernandosouza/alloy-ui,mairatma/alloy-ui,thiago-rocha/alloy-ui,zsigmondrab/alloy-ui,thiago-rocha/alloy-ui,4talesa/alloy-ui,drewbrokke/alloy-ui,4talesa/alloy-ui,ambrinchaudhary/alloy-ui,modulexcite/alloy-ui,ericyanLr/alloy-ui,mairatma/alloy-ui,drewbrokke/alloy-ui,zenorocha/alloy-ui,antoniopol06/alloy-ui,eduardolundgren/alloy-ui,ambrinchaudhary/alloy-ui,peterborkuti/alloy-ui,zxdgoal/alloy-ui,bryceosterhaus/alloy-ui,bryceosterhaus/alloy-ui,zsagia/alloy-ui,Khamull/alloy-ui,seedtigo/alloy-ui,ampratt/alloy-ui,Khamull/alloy-ui,westonhancock/alloy-ui,mthadley/alloy-ui,samanzanedo/alloy-ui,jonathanmccann/alloy-ui,dsanz/alloy-ui,henvic/alloy-ui,antoniopol06/alloy-ui,zenorocha/alloy-ui,zsigmondrab/alloy-ui,ericchin/alloy-ui,BryanEngler/alloy-ui,ipeychev/alloy-ui,dsanz/alloy-ui,crimago/alloy-ui,dsanz/alloy-ui,inacionery/alloy-ui,seedtigo/alloy-ui,ericchin/alloy-ui,modulexcite/alloy-ui,BryanEngler/alloy-ui,brianchandotcom/alloy-ui,ericyanLr/alloy-ui,Preston-Crary/alloy-ui,crimago/alloy-ui,dsanz/alloy-ui,westonhancock/alloy-ui,Preston-Crary/alloy-ui,jonathanmccann/alloy-ui
/** * The Diagram Builder Connector * * @module aui-diagram-builder * @submodule aui-diagram-builder-connector */ var Lang = A.Lang, isArray = Lang.isArray, isBoolean = Lang.isBoolean, isObject = Lang.isObject, isString = Lang.isString, AArray = A.Array, // The first Bernstein basis polynomials (n=3), // http://en.wikipedia.org/wiki/B%C3%A9zier_curve The t in the function for // a linear Bézier curve can be thought of as describing how far B(t) is // from P0 to P1. For example when t=0.25, B(t) is one quarter of the way // from point P0 to P1. As t varies from 0 to 1, B(t) describes a straight // line from P0 to P1. B1 = function(t) { return (t * t * t); }, B2 = function(t) { return (3 * t * t * (1 - t)); }, B3 = function(t) { return (3 * t * (1 - t) * (1 - t)); }, B4 = function(t) { return ((1 - t) * (1 - t) * (1 - t)); }, // Find a Cubic Bezier point based on the control points. Consider the first // two control points as the start and end point respectively. getCubicBezier = function(t, startPos, endPos, cp1, cp2) { var x = startPos[0] * B1(t) + cp1[0] * B2(t) + cp2[0] * B3(t) + endPos[0] * B4(t); var y = startPos[1] * B1(t) + cp1[1] * B2(t) + cp2[1] * B3(t) + endPos[1] * B4(t); return [x, y]; }, isGraphic = function(val) { return A.instanceOf(val, A.Graphic); }, toDegrees = function(angleRadians) { return angleRadians * 180 / Math.PI; }, sign = function(x) { return x === 0 ? 0 : (x < 0 ? -1 : 1); }, ARROW_POINTS = 'arrowPoints', BUILDER = 'builder', CLICK = 'click', COLOR = 'color', CONNECTOR = 'connector', FILL = 'fill', GRAPHIC = 'graphic', LAZY_DRAW = 'lazyDraw', MOUSEENTER = 'mouseenter', MOUSELEAVE = 'mouseleave', NAME = 'name', NODE_NAME = 'nodeName', P1 = 'p1', P2 = 'p2', PATH = 'path', SELECTED = 'selected', SHAPE = 'shape', SHAPE_ARROW = 'shapeArrow', SHAPE_ARROW_HOVER = 'shapeArrowHover', SHAPE_ARROW_SELECTED = 'shapeArrowSelected', SHAPE_HOVER = 'shapeHover', SHAPE_SELECTED = 'shapeSelected', SHOW_NAME = 'showName', STROKE = 'stroke', VISIBLE = 'visible', getCN = A.getClassName, CSS_DIAGRAM_BUILDER_CONNECTOR_NAME = getCN('diagram', 'builder', 'connector', 'name'), CSS_HIDE = getCN('hide'); A.PolygonUtil = { ARROW_POINTS: [ [-12, -6], [-8, 0], [-12, 6], [6, 0] ], drawArrow: function(shape, x1, y1, x2, y2, arrowPoints) { var instance = this; var angle = Math.atan2(y2 - y1, x2 - x1); shape.moveTo(x2, y2); // Slide the arrow position along the line in 5px in polar coordinates x2 = x2 - 5 * Math.cos(angle); y2 = y2 - 5 * Math.sin(angle); instance.drawPolygon( shape, instance.translatePoints(instance.rotatePoints(arrowPoints || instance.ARROW_POINTS, angle), x2, y2) ); }, drawPolygon: function(shape, points) { var instance = this; shape.moveTo(points[0][0], points[0][1]); AArray.each(points, function(p, i) { if (i > 0) { shape.lineTo(points[i][0], points[i][1]); } }); shape.lineTo(points[0][0], points[0][1]); }, translatePoints: function(points, x, y) { var instance = this; var xy = []; AArray.each(points, function(p, i) { xy.push([points[i][0] + x, points[i][1] + y]); }); return xy; }, rotatePoints: function(points, angle) { var instance = this; var xy = []; AArray.each(points, function(p, i) { xy.push(instance.rotatePoint(angle, points[i][0], points[i][1])); }); return xy; }, rotatePoint: function(angle, x, y) { return [ (x * Math.cos(angle)) - (y * Math.sin(angle)), (x * Math.sin(angle)) + (y * Math.cos(angle)) ]; } }; /** * A base class for Connector. * * @class A.Connector * @extends Base * @param {Object} config Object literal specifying widget configuration * properties. * @constructor */ A.Connector = A.Base.create('line', A.Base, [], { SERIALIZABLE_ATTRS: [COLOR, LAZY_DRAW, NAME, SHAPE_SELECTED, SHAPE_HOVER, /*SHAPE,*/ P1, P2], shape: null, shapeArrow: null, /** * Construction logic executed during `A.Connector` instantiation. Lifecycle. * * @method initializer * @param config * @protected */ initializer: function(config) { var instance = this; var lazyDraw = instance.get(LAZY_DRAW); instance.after({ nameChange: instance._afterNameChange, p1Change: instance.draw, p2Change: instance.draw, selectedChange: instance._afterSelectedChange, showNameChange: instance._afterShowNameChange, visibleChange: instance._afterVisibleChange }); instance._initShapes(); if (!lazyDraw) { instance.draw(); } instance._uiSetVisible(instance.get(VISIBLE)); instance._uiSetName(instance.get(NAME)); instance._uiSetSelected(instance.get(SELECTED), !lazyDraw); instance._uiSetShowName(instance.get(SHOW_NAME)); }, /** * Destructor lifecycle implementation for the `A.Connector` class. * * @method destructor * @protected */ destructor: function() { var instance = this; instance.shape.destroy(); instance.shapeArrow.destroy(); instance.get(NODE_NAME).remove(); }, /** * Responsible for drawing the connectors. * * @method draw */ draw: function() { var instance = this; var shape = instance.shape; var shapeArrow = instance.shapeArrow; var p1 = instance.get(P1), p2 = instance.get(P2), c1 = instance.toCoordinate(p1), c2 = instance.toCoordinate(p2), x1 = c1[0], y1 = c1[1], x2 = c2[0], y2 = c2[1], dx = Math.max(Math.abs(x1 - x2) / 2, 10), dy = Math.max(Math.abs(y1 - y2) / 2, 10), curveArgs = null, nQuadrantSections = 8, angle = toDegrees(Math.atan2(y2 - y1, x2 - x1)), pseudoQuadrant = Math.round(Math.abs(angle) / (360 / nQuadrantSections)); if (sign(angle) < 0) { curveArgs = [ [x1 + dx, y1, x2 - dx, y2, x2, y2], // 3,6 [x1 + dx, y1, x2, y1 - dy, x2, y2], // 3,5 [x1, y1 - dy, x2, y1 - dy, x2, y2], // 0,5 [x1 - dx, y1, x2, y1 - dy, x2, y2], // 2,5 [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 ]; } else { curveArgs = [ [x1 + dx, y1, x2 - dx, y2, x2, y2], // 3,6 [x1 + dx, y1, x2, y1 + dy, x2, y2], // 3,4 [x1, y1 + dy, x2, y1 + dy, x2, y2], // 1,4 [x1 - dx, y1, x2, y1 + dy, x2, y2], // 2,4 [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 ]; } var cp = curveArgs[pseudoQuadrant]; shape.clear(); shape.moveTo(x1, y1); shape.curveTo.apply(shape, cp); shape.end(); // Extract the angle from a segment of the current Cubic Bezier curve to // rotate the arrow. The segment should be an extremities for better // angle extraction, on this particular case t = [0 to 0.025]. var xy1 = getCubicBezier(0, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]), xy2 = getCubicBezier(0.075, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]), centerXY = getCubicBezier(0.5, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]); shapeArrow.clear(); A.PolygonUtil.drawArrow(shapeArrow, xy2[0], xy2[1], xy1[0], xy1[1], instance.get(ARROW_POINTS)); shapeArrow.end(); if (instance.get(SHOW_NAME)) { instance.get(NODE_NAME).center(instance.toXY(centerXY)); } return instance; }, /** * Gets the list of properties from the property model. * * @method getProperties * @return {Array} */ getProperties: function() { var instance = this; var propertyModel = instance.getPropertyModel(); AArray.each(propertyModel, function(property) { property.value = instance.get(property.attributeName); }); return propertyModel; }, /** * Gets the model defition of a property. * * @method getPropertyModel * @return {Array} */ getPropertyModel: function() { var instance = this; var strings = instance.getStrings(); return [{ attributeName: NAME, editor: new A.TextCellEditor({ validator: { rules: { value: { required: true } } } }), name: strings[NAME] }]; }, /** * Gets the collection of strings used to label elements of the UI. * * @method getStrings */ getStrings: function() { return A.Connector.STRINGS; }, /** * Sets the visibility to `false`. * * @method hide */ hide: function() { var instance = this; instance.set(VISIBLE, false); return instance; }, /** * Sets the visibility to `true`. * * @method show */ show: function() { var instance = this; instance.set(VISIBLE, true); return instance; }, /** * Converts X and Y positions to a coordinate. * * @method toCoordinate * @attribute coord */ toCoordinate: function(coord) { var instance = this; return instance._offsetXY(coord, -1); }, /** * Converts serializable attributes to JSON format. * * @method toJSON * @return {Object} */ toJSON: function() { var instance = this; var output = {}; AArray.each(instance.SERIALIZABLE_ATTRS, function(attributeName) { output[attributeName] = instance.get(attributeName); }); return output; }, /** * Converts a coordinate to X and Y positions. * * @method toXY * @attribute coord */ toXY: function(coord) { var instance = this; return instance._offsetXY(coord, 1); }, /** * Fires after `name` attribute value change. * * @method _afterNameChange * @param event * @protected */ _afterNameChange: function(event) { var instance = this; instance._uiSetName(event.newVal); instance.draw(); }, /** * Fires after `selected` attribute value change. * * @method _afterSelectedChange * @param event * @protected */ _afterSelectedChange: function(event) { var instance = this; instance._uiSetSelected(event.newVal); }, /** * Fires after `showName` attribute value change. * * @method _afterShowNameChange * @param event * @protected */ _afterShowNameChange: function(event) { var instance = this; instance._uiSetShowName(event.newVal); }, /** * Fires after `visible` attribute value change. * * @method _afterVisibleChange * @param event * @protected */ _afterVisibleChange: function(event) { var instance = this; instance._uiSetVisible(event.newVal); }, /** * Adds shapes in the UI and bind its events. * * @method _initShapes * @protected */ _initShapes: function() { var instance = this; var shape = instance.shape = instance.get(GRAPHIC).addShape( instance.get(SHAPE) ); var shapeArrow = instance.shapeArrow = instance.get(GRAPHIC).addShape( instance.get(SHAPE_ARROW) ); shape.on(CLICK, A.bind(instance._onShapeClick, instance)); shape.on(MOUSEENTER, A.bind(instance._onShapeMouseEnter, instance)); shape.on(MOUSELEAVE, A.bind(instance._onShapeMouseLeave, instance)); shapeArrow.on(CLICK, A.bind(instance._onShapeClick, instance)); instance.get(NODE_NAME).on(CLICK, A.bind(instance._onShapeClick, instance)); }, /** * Calculates the distance relative to the graphic. * * @method _offsetXY * @param xy * @param sign * @protected */ _offsetXY: function(xy, sign) { var instance = this; var offsetXY = instance.get(GRAPHIC).getXY(); return [xy[0] + offsetXY[0] * sign, xy[1] + offsetXY[1] * sign]; }, /** * Fires when shape is clicked. * * @method _onShapeClick * @param event * @protected */ _onShapeClick: function(event) { var instance = this; var builder = instance.get(BUILDER); var selected = instance.get(SELECTED); if (builder) { if (event.hasModifier()) { builder.closeEditProperties(); } else { builder.unselectConnectors(); if (selected) { builder.closeEditProperties(); } else { builder.editConnector(instance); } } } instance.set(SELECTED, !selected); event.halt(); }, /** * Fires when mouse enters a shape. * * @method _onShapeMouseEnter * @param event * @protected */ _onShapeMouseEnter: function(event) { var instance = this; if (!instance.get(SELECTED)) { var shapeHover = instance.get(SHAPE_HOVER); var shapeArrowHover = instance.get(SHAPE_ARROW_HOVER); if (shapeHover) { instance._updateShape(instance.shape, shapeHover); } if (shapeArrowHover) { instance._updateShape(instance.shapeArrow, shapeArrowHover); } } }, /** * Fires when mouse leaves a shape. * * @method _onShapeMouseLeave * @param event * @protected */ _onShapeMouseLeave: function(event) { var instance = this; if (!instance.get(SELECTED)) { instance._updateShape(instance.shape, instance.get(SHAPE)); instance._updateShape(instance.shapeArrow, instance.get(SHAPE_ARROW)); } }, /** * Set the `nodeName` attribute. * * @method _setNodeName * @param val * @protected */ _setNodeName: function(val) { var instance = this; if (!A.instanceOf(val, A.Node)) { val = new A.Node.create(val); instance.get(BUILDER).canvas.append(val.unselectable()); } return val; }, /** * Set the `shape` attribute. * * @method _setShape * @param val * @protected */ _setShape: function(val) { var instance = this; return A.merge({ type: PATH, stroke: { color: instance.get(COLOR), weight: 2, opacity: 1 } }, val ); }, /** * Set the `shapeArrow` attribute. * * @method _setShapeArrow * @param val * @protected */ _setShapeArrow: function(val) { var instance = this; return A.merge({ type: PATH, fill: { color: instance.get(COLOR), opacity: 1 }, stroke: { color: instance.get(COLOR), weight: 2, opacity: 1 } }, val ); }, /** * Sets the `name` attribute in the UI. * * @method _uiSetName * @param val * @protected */ _uiSetName: function(val) { var instance = this; instance.get(NODE_NAME).html(val); }, /** * Sets the `selected` attribute in the UI. * * @method _uiSetSelected * @param val * @param draw * @protected */ _uiSetSelected: function(val, draw) { var instance = this; instance._updateShape( instance.shape, val ? instance.get(SHAPE_SELECTED) : instance.get(SHAPE), draw); instance._updateShape( instance.shapeArrow, val ? instance.get(SHAPE_ARROW_SELECTED) : instance.get(SHAPE_ARROW), draw); }, /** * Sets the `showName` attribute in the UI. * * @method _uiSetShowName * @param val * @protected */ _uiSetShowName: function(val) { var instance = this; instance.get(NODE_NAME).toggleClass(CSS_HIDE, !val); }, /** * Sets the `visible` attribute in the UI. * * @method _uiSetVisible * @param val * @protected */ _uiSetVisible: function(val) { var instance = this; instance.shape.set(VISIBLE, val); instance.shapeArrow.set(VISIBLE, val); instance._uiSetShowName(val && instance.get(SHOW_NAME)); }, /** * Updates shape's fill and stroke. * * @method _updateShape * @param shape * @param cShape * @param draw * @protected */ _updateShape: function(shape, cShape, draw) { var instance = this; if (cShape.hasOwnProperty(FILL)) { shape.set(FILL, cShape[FILL]); } if (cShape.hasOwnProperty(STROKE)) { shape.set(STROKE, cShape[STROKE]); } if (draw !== false) { instance.draw(); } } }, { /** * Static property used to define the default attribute * configuration for the `A.Connector`. * * @property ATTRS * @type Object * @static */ ATTRS: { /** * Arrow points from `A.PolygonUtil` instance. * * @attribute arrowPoints * @default 'arrowPoints' * @type String */ arrowPoints: { value: A.PolygonUtil.ARROW_POINTS }, /** * Stores an instance of `A.DiagramBuilder`. * * @attribute builder */ builder: {}, /** * The color used in the connector. * * @attribute color * @default '#27aae1' * @type String */ color: { value: '#27aae1', validator: isString }, /** * Graphic used to represent the connector. * * @attribute graphic * @type Graphic */ graphic: { validator: isGraphic }, /** * Determine if the draw should be delayed or not. * * @attribute lazyDraw * @default false * @type Boolean */ lazyDraw: { value: false, validator: isBoolean }, /** * The name of the connector. * * @attribute name * @type String */ name: { valueFn: function() { var instance = this; return CONNECTOR + (++A.Env._uidx); }, validator: isString }, /** * The connector node name. * * @attribute nodeName * @type String * @writeOnce */ nodeName: { setter: '_setNodeName', value: '<span class="' + CSS_DIAGRAM_BUILDER_CONNECTOR_NAME + '"></span>', writeOnce: true }, /** * Origin connector position. * * @attribute p1 * @default [0, 0] * @type Array */ p1: { value: [0, 0], validator: isArray }, /** * Destination connector position. * * @attribute p2 * @default [0, 0] * @type Array */ p2: { value: [0, 0], validator: isArray }, /** * Checks if a connector is selected or not. * * @attribute selected * @default false * @type Boolean */ selected: { value: false, validator: isBoolean }, /** * Graphic used to represent the connector's shape. * * @attribute shape * @default null */ shape: { value: null, setter: '_setShape' }, /** * Graphic used to represent the connector's shape arrow. * * @attribute shapeArrow * @default null */ shapeArrow: { value: null, setter: '_setShapeArrow' }, /** * Collection of styles applied when mouse is over the shape arrow. * * @attribute shapeArrowHover * @type Object */ shapeArrowHover: { value: { fill: { color: '#ffd700' }, stroke: { color: '#ffd700', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when shape arrow is selected. * * @attribute shapeArrowSelected * @type Object */ shapeArrowSelected: { value: { fill: { color: '#ff6600' }, stroke: { color: '#ff6600', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when mouse is over the shape. * * @attribute shapeHover * @type Object */ shapeHover: { value: { stroke: { color: '#ffd700', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when shape is selected. * * @attribute shapeSelected * @type Object */ shapeSelected: { value: { stroke: { color: '#ff6600', weight: 5, opacity: 0.8 } } }, /** * Sets the visibility of the connector name. * * @attribute showName * @default true * @type Boolean */ showName: { validator: isBoolean, value: true }, /** * Stores the uid, source and target data from a connector. * * @attribute transition * @default {} * @type Object */ transition: { value: {}, validator: isObject }, /** * Indicates whether or not the connector is visible. * * @attribute visible * @default true * @type Boolean */ visible: { validator: isBoolean, value: true } }, /** * Collection of strings used to label elements of the UI. * * @property STRINGS * @type Object * @static */ STRINGS: { name: 'Name' } });
src/aui-diagram-builder/js/aui-diagram-builder-connector.js
/** * The Diagram Builder Connector * * @module aui-diagram-builder * @submodule aui-diagram-builder-connector */ var Lang = A.Lang, isArray = Lang.isArray, isBoolean = Lang.isBoolean, isObject = Lang.isObject, isString = Lang.isString, AArray = A.Array, // The first Bernstein basis polynomials (n=3), // http://en.wikipedia.org/wiki/B%C3%A9zier_curve The t in the function for // a linear Bézier curve can be thought of as describing how far B(t) is // from P0 to P1. For example when t=0.25, B(t) is one quarter of the way // from point P0 to P1. As t varies from 0 to 1, B(t) describes a straight // line from P0 to P1. B1 = function(t) { return (t * t * t); }, B2 = function(t) { return (3 * t * t * (1 - t)); }, B3 = function(t) { return (3 * t * (1 - t) * (1 - t)); }, B4 = function(t) { return ((1 - t) * (1 - t) * (1 - t)); }, // Find a Cubic Bezier point based on the control points. Consider the first // two control points as the start and end point respectively. getCubicBezier = function(t, startPos, endPos, cp1, cp2) { var x = startPos[0] * B1(t) + cp1[0] * B2(t) + cp2[0] * B3(t) + endPos[0] * B4(t); var y = startPos[1] * B1(t) + cp1[1] * B2(t) + cp2[1] * B3(t) + endPos[1] * B4(t); return [x, y]; }, isGraphic = function(val) { return A.instanceOf(val, A.Graphic); }, toDegrees = function(angleRadians) { return angleRadians * 180 / Math.PI; }, sign = function(x) { return x === 0 ? 0 : (x < 0 ? -1 : 1); }, ARROW_POINTS = 'arrowPoints', BUILDER = 'builder', CLICK = 'click', COLOR = 'color', CONNECTOR = 'connector', FILL = 'fill', GRAPHIC = 'graphic', LAZY_DRAW = 'lazyDraw', MOUSEENTER = 'mouseenter', MOUSELEAVE = 'mouseleave', NAME = 'name', NODE_NAME = 'nodeName', P1 = 'p1', P2 = 'p2', PATH = 'path', SELECTED = 'selected', SHAPE = 'shape', SHAPE_ARROW = 'shapeArrow', SHAPE_ARROW_HOVER = 'shapeArrowHover', SHAPE_ARROW_SELECTED = 'shapeArrowSelected', SHAPE_HOVER = 'shapeHover', SHAPE_SELECTED = 'shapeSelected', SHOW_NAME = 'showName', STROKE = 'stroke', VISIBLE = 'visible', getCN = A.getClassName, CSS_DIAGRAM_BUILDER_CONNECTOR_NAME = getCN('diagram', 'builder', 'connector', 'name'), CSS_HIDE = getCN('hide'); A.PolygonUtil = { ARROW_POINTS: [ [-12, -6], [-8, 0], [-12, 6], [6, 0] ], drawArrow: function(shape, x1, y1, x2, y2, arrowPoints) { var instance = this; var angle = Math.atan2(y2 - y1, x2 - x1); shape.moveTo(x2, y2); // Slide the arrow position along the line in 5px in polar coordinates x2 = x2 - 5 * Math.cos(angle); y2 = y2 - 5 * Math.sin(angle); instance.drawPolygon( shape, instance.translatePoints(instance.rotatePoints(arrowPoints || instance.ARROW_POINTS, angle), x2, y2) ); }, drawPolygon: function(shape, points) { var instance = this; shape.moveTo(points[0][0], points[0][1]); AArray.each(points, function(p, i) { if (i > 0) { shape.lineTo(points[i][0], points[i][1]); } }); shape.lineTo(points[0][0], points[0][1]); }, translatePoints: function(points, x, y) { var instance = this; var xy = []; AArray.each(points, function(p, i) { xy.push([points[i][0] + x, points[i][1] + y]); }); return xy; }, rotatePoints: function(points, angle) { var instance = this; var xy = []; AArray.each(points, function(p, i) { xy.push(instance.rotatePoint(angle, points[i][0], points[i][1])); }); return xy; }, rotatePoint: function(angle, x, y) { return [ (x * Math.cos(angle)) - (y * Math.sin(angle)), (x * Math.sin(angle)) + (y * Math.cos(angle)) ]; } }; /** * A base class for Connector. * * @class A.Connector * @extends Base * @param {Object} config Object literal specifying widget configuration * properties. * @constructor */ A.Connector = A.Base.create('line', A.Base, [], { SERIALIZABLE_ATTRS: [COLOR, LAZY_DRAW, NAME, SHAPE_SELECTED, SHAPE_HOVER, /*SHAPE,*/ P1, P2], shape: null, shapeArrow: null, /** * Construction logic executed during `A.Connector` instantiation. Lifecycle. * * @method initializer * @param config * @protected */ initializer: function(config) { var instance = this; var lazyDraw = instance.get(LAZY_DRAW); instance.after({ nameChange: instance._afterNameChange, p1Change: instance.draw, p2Change: instance.draw, selectedChange: instance._afterSelectedChange, showNameChange: instance._afterShowNameChange, visibleChange: instance._afterVisibleChange }); instance._initShapes(); if (!lazyDraw) { instance.draw(); } instance._uiSetVisible(instance.get(VISIBLE)); instance._uiSetName(instance.get(NAME)); instance._uiSetSelected(instance.get(SELECTED), !lazyDraw); instance._uiSetShowName(instance.get(SHOW_NAME)); }, /** * Destructor lifecycle implementation for the `A.Connector` class. * * @method destructor * @protected */ destructor: function() { var instance = this; instance.shape.destroy(); instance.shapeArrow.destroy(); instance.get(NODE_NAME).remove(); }, /** * Responsible for drawing the connectors. * * @method draw */ draw: function() { var instance = this; var shape = instance.shape; var shapeArrow = instance.shapeArrow; var p1 = instance.get(P1), p2 = instance.get(P2), c1 = instance.toCoordinate(p1), c2 = instance.toCoordinate(p2), x1 = c1[0], y1 = c1[1], x2 = c2[0], y2 = c2[1], dx = Math.max(Math.abs(x1 - x2) / 2, 10), dy = Math.max(Math.abs(y1 - y2) / 2, 10), curveArgs = null, nQuadrantSections = 8, angle = toDegrees(Math.atan2(y2 - y1, x2 - x1)), pseudoQuadrant = Math.round(Math.abs(angle) / (360 / nQuadrantSections)); if (sign(angle) < 0) { curveArgs = [ [x1 + dx, y1, x2 - dx, y2, x2, y2], // 3,6 [x1 + dx, y1, x2, y1 - dy, x2, y2], // 3,5 [x1, y1 - dy, x2, y1 - dy, x2, y2], // 0,5 [x1 - dx, y1, x2, y1 - dy, x2, y2], // 2,5 [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 ]; } else { curveArgs = [ [x1 + dx, y1, x2 - dx, y2, x2, y2], // 3,6 [x1 + dx, y1, x2, y1 + dy, x2, y2], // 3,4 [x1, y1 + dy, x2, y1 + dy, x2, y2], // 1,4 [x1 - dx, y1, x2, y1 + dy, x2, y2], // 2,4 [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 ]; } var cp = curveArgs[pseudoQuadrant]; shape.clear(); shape.moveTo(x1, y1); shape.curveTo.apply(shape, cp); shape.end(); // Extract the angle from a segment of the current Cubic Bezier curve to // rotate the arrow. The segment should be an extremities for better // angle extraction, on this particular case t = [0 to 0.025]. var xy1 = getCubicBezier(0, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]), xy2 = getCubicBezier(0.075, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]), centerXY = getCubicBezier(0.5, [x1, y1], [x2, y2], [cp[0], cp[1]], [cp[2], cp[3]]); shapeArrow.clear(); A.PolygonUtil.drawArrow(shapeArrow, xy2[0], xy2[1], xy1[0], xy1[1], instance.get(ARROW_POINTS)); shapeArrow.end(); if (instance.get(SHOW_NAME)) { instance.get(NODE_NAME).center(instance.toXY(centerXY)); } return instance; }, /** * Gets the list of properties from the property model. * * @method getProperties * @return {Array} */ getProperties: function() { var instance = this; var propertyModel = instance.getPropertyModel(); AArray.each(propertyModel, function(property) { property.value = instance.get(property.attributeName); }); return propertyModel; }, /** * Gets the model defition of a property. * * @method getPropertyModel * @return {Array} */ getPropertyModel: function() { var instance = this; var strings = instance.getStrings(); return [{ attributeName: NAME, editor: new A.TextCellEditor({ validator: { rules: { value: { required: true } } } }), name: strings[NAME] }]; }, /** * Gets the collection of strings used to label elements of the UI. * * @method getStrings */ getStrings: function() { return A.Connector.STRINGS; }, /** * Sets the visibility to `false`. * * @method hide */ hide: function() { var instance = this; instance.set(VISIBLE, false); return instance; }, /** * Sets the visibility to `true`. * * @method show */ show: function() { var instance = this; instance.set(VISIBLE, true); return instance; }, /** * Converts X and Y positions to a coordinate. * * @method toCoordinate * @attribute coord */ toCoordinate: function(coord) { var instance = this; return instance._offsetXY(coord, -1); }, /** * Converts serializable attributes to JSON format. * * @method toJSON * @return {Object} */ toJSON: function() { var instance = this; var output = {}; AArray.each(instance.SERIALIZABLE_ATTRS, function(attributeName) { output[attributeName] = instance.get(attributeName); }); return output; }, /** * Converts a coordinate to X and Y positions. * * @method toXY * @attribute coord */ toXY: function(coord) { var instance = this; return instance._offsetXY(coord, 1); }, /** * Fires after `name` attribute value change. * * @method _afterNameChange * @param event * @protected */ _afterNameChange: function(event) { var instance = this; instance._uiSetName(event.newVal); instance.draw(); }, /** * Fires after `selected` attribute value change. * * @method _afterSelectedChange * @param event * @protected */ _afterSelectedChange: function(event) { var instance = this; instance._uiSetSelected(event.newVal); }, /** * Fires after `showName` attribute value change. * * @method _afterShowNameChange * @param event * @protected */ _afterShowNameChange: function(event) { var instance = this; instance._uiSetShowName(event.newVal); }, /** * Fires after `visible` attribute value change. * * @method _afterVisibleChange * @param event * @protected */ _afterVisibleChange: function(event) { var instance = this; instance._uiSetVisible(event.newVal); }, /** * Adds shapes in the UI and bind its events. * * @method _initShapes * @protected */ _initShapes: function() { var instance = this; var shape = instance.shape = instance.get(GRAPHIC).addShape( instance.get(SHAPE) ); var shapeArrow = instance.shapeArrow = instance.get(GRAPHIC).addShape( instance.get(SHAPE_ARROW) ); shape.on(CLICK, A.bind(instance._onShapeClick, instance)); shape.on(MOUSEENTER, A.bind(instance._onShapeMouseEnter, instance)); shape.on(MOUSELEAVE, A.bind(instance._onShapeMouseLeave, instance)); shapeArrow.on(CLICK, A.bind(instance._onShapeClick, instance)); instance.get(NODE_NAME).on(CLICK, A.bind(instance._onShapeClick, instance)); }, /** * Calculates the distance relative to the graphic. * * @method _offsetXY * @param xy * @param sign * @protected */ _offsetXY: function(xy, sign) { var instance = this; var offsetXY = instance.get(GRAPHIC).getXY(); return [xy[0] + offsetXY[0] * sign, xy[1] + offsetXY[1] * sign]; }, /** * Fires when shape is clicked. * * @method _onShapeClick * @param event * @protected */ _onShapeClick: function(event) { var instance = this; var builder = instance.get(BUILDER); var selected = instance.get(SELECTED); if (builder) { if (event.hasModifier()) { builder.closeEditProperties(); } else { builder.unselectConnectors(); if (selected) { builder.closeEditProperties(); } else { builder.editConnector(instance); } } } instance.set(SELECTED, !selected); event.halt(); }, /** * Fires when mouse enters a shape. * * @method _onShapeMouseEnter * @param event * @protected */ _onShapeMouseEnter: function(event) { var instance = this; if (!instance.get(SELECTED)) { var shapeHover = instance.get(SHAPE_HOVER); var shapeArrowHover = instance.get(SHAPE_ARROW_HOVER); if (shapeHover) { instance._updateShape(instance.shape, shapeHover); } if (shapeArrowHover) { instance._updateShape(instance.shapeArrow, shapeArrowHover); } } }, /** * Fires when mouse leaves a shape. * * @method _onShapeMouseLeave * @param event * @protected */ _onShapeMouseLeave: function(event) { var instance = this; if (!instance.get(SELECTED)) { instance._updateShape(instance.shape, instance.get(SHAPE)); instance._updateShape(instance.shapeArrow, instance.get(SHAPE_ARROW)); } }, /** * Set the `nodeName` attribute. * * @method _setNodeName * @param val * @protected */ _setNodeName: function(val) { var instance = this; if (!A.instanceOf(val, A.Node)) { val = new A.Node.create(val); instance.get(BUILDER).canvas.append(val.unselectable()); } return val; }, /** * Set the `shape` attribute. * * @method _setShape * @param val * @protected */ _setShape: function(val) { var instance = this; return A.merge({ type: PATH, stroke: { color: instance.get(COLOR), weight: 2, opacity: 1 } }, val ); }, /** * Set the `shapeArrow` attribute. * * @method _setShapeArrow * @param val * @protected */ _setShapeArrow: function(val) { var instance = this; return A.merge({ type: PATH, fill: { color: instance.get(COLOR), opacity: 1 }, stroke: { color: instance.get(COLOR), weight: 2, opacity: 1 } }, val ); }, /** * Sets the `name` attribute in the UI. * * @method _uiSetName * @param val * @protected */ _uiSetName: function(val) { var instance = this; instance.get(NODE_NAME).html(val); }, /** * Sets the `selected` attribute in the UI. * * @method _uiSetSelected * @param val * @param draw * @protected */ _uiSetSelected: function(val, draw) { var instance = this; instance._updateShape(instance.shape, val ? instance.get(SHAPE_SELECTED) : instance.get(SHAPE), draw); instance._updateShape(instance.shapeArrow, val ? instance.get(SHAPE_ARROW_SELECTED) : instance.get(SHAPE_ARROW), draw); }, /** * Sets the `showName` attribute in the UI. * * @method _uiSetShowName * @param val * @protected */ _uiSetShowName: function(val) { var instance = this; instance.get(NODE_NAME).toggleClass(CSS_HIDE, !val); }, /** * Sets the `visible` attribute in the UI. * * @method _uiSetVisible * @param val * @protected */ _uiSetVisible: function(val) { var instance = this; instance.shape.set(VISIBLE, val); instance.shapeArrow.set(VISIBLE, val); instance._uiSetShowName(val && instance.get(SHOW_NAME)); }, /** * Updates shape's fill and stroke. * * @method _updateShape * @param shape * @param cShape * @param draw * @protected */ _updateShape: function(shape, cShape, draw) { var instance = this; if (cShape.hasOwnProperty(FILL)) { shape.set(FILL, cShape[FILL]); } if (cShape.hasOwnProperty(STROKE)) { shape.set(STROKE, cShape[STROKE]); } if (draw !== false) { instance.draw(); } } }, { /** * Static property used to define the default attribute * configuration for the `A.Connector`. * * @property ATTRS * @type Object * @static */ ATTRS: { /** * Arrow points from `A.PolygonUtil` instance. * * @attribute arrowPoints * @default 'arrowPoints' * @type String */ arrowPoints: { value: A.PolygonUtil.ARROW_POINTS }, /** * Stores an instance of `A.DiagramBuilder`. * * @attribute builder */ builder: {}, /** * The color used in the connector. * * @attribute color * @default '#27aae1' * @type String */ color: { value: '#27aae1', validator: isString }, /** * Graphic used to represent the connector. * * @attribute graphic * @type Graphic */ graphic: { validator: isGraphic }, /** * Determine if the draw should be delayed or not. * * @attribute lazyDraw * @default false * @type Boolean */ lazyDraw: { value: false, validator: isBoolean }, /** * The name of the connector. * * @attribute name * @type String */ name: { valueFn: function() { var instance = this; return CONNECTOR + (++A.Env._uidx); }, validator: isString }, /** * The connector node name. * * @attribute nodeName * @type String * @writeOnce */ nodeName: { setter: '_setNodeName', value: '<span class="' + CSS_DIAGRAM_BUILDER_CONNECTOR_NAME + '"></span>', writeOnce: true }, /** * Origin connector position. * * @attribute p1 * @default [0, 0] * @type Array */ p1: { value: [0, 0], validator: isArray }, /** * Destination connector position. * * @attribute p2 * @default [0, 0] * @type Array */ p2: { value: [0, 0], validator: isArray }, /** * Checks if a connector is selected or not. * * @attribute selected * @default false * @type Boolean */ selected: { value: false, validator: isBoolean }, /** * Graphic used to represent the connector's shape. * * @attribute shape * @default null */ shape: { value: null, setter: '_setShape' }, /** * Graphic used to represent the connector's shape arrow. * * @attribute shapeArrow * @default null */ shapeArrow: { value: null, setter: '_setShapeArrow' }, /** * Collection of styles applied when mouse is over the shape arrow. * * @attribute shapeArrowHover * @type Object */ shapeArrowHover: { value: { fill: { color: '#ffd700' }, stroke: { color: '#ffd700', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when shape arrow is selected. * * @attribute shapeArrowSelected * @type Object */ shapeArrowSelected: { value: { fill: { color: '#ff6600' }, stroke: { color: '#ff6600', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when mouse is over the shape. * * @attribute shapeHover * @type Object */ shapeHover: { value: { stroke: { color: '#ffd700', weight: 5, opacity: 0.8 } } }, /** * Collection of styles applied when shape is selected. * * @attribute shapeSelected * @type Object */ shapeSelected: { value: { stroke: { color: '#ff6600', weight: 5, opacity: 0.8 } } }, /** * Sets the visibility of the connector name. * * @attribute showName * @default true * @type Boolean */ showName: { validator: isBoolean, value: true }, /** * Stores the uid, source and target data from a connector. * * @attribute transition * @default {} * @type Object */ transition: { value: {}, validator: isObject }, /** * Indicates whether or not the connector is visible. * * @attribute visible * @default true * @type Boolean */ visible: { validator: isBoolean, value: true } }, /** * Collection of strings used to label elements of the UI. * * @property STRINGS * @type Object * @static */ STRINGS: { name: 'Name' } });
AUI-1058 Source formatting
src/aui-diagram-builder/js/aui-diagram-builder-connector.js
AUI-1058 Source formatting
<ide><path>rc/aui-diagram-builder/js/aui-diagram-builder-connector.js <ide> [x1 + dx, y1, x2, y1 - dy, x2, y2], // 3,5 <ide> [x1, y1 - dy, x2, y1 - dy, x2, y2], // 0,5 <ide> [x1 - dx, y1, x2, y1 - dy, x2, y2], // 2,5 <del> [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 <add> [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 <ide> ]; <ide> } <ide> else { <ide> [x1 + dx, y1, x2, y1 + dy, x2, y2], // 3,4 <ide> [x1, y1 + dy, x2, y1 + dy, x2, y2], // 1,4 <ide> [x1 - dx, y1, x2, y1 + dy, x2, y2], // 2,4 <del> [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 <add> [x1 - dx, y1, x2 + dx, y2, x2, y2] // 2,7 <ide> ]; <ide> } <ide> <ide> _uiSetSelected: function(val, draw) { <ide> var instance = this; <ide> <del> instance._updateShape(instance.shape, val ? instance.get(SHAPE_SELECTED) : instance.get(SHAPE), draw); <del> instance._updateShape(instance.shapeArrow, val ? instance.get(SHAPE_ARROW_SELECTED) : instance.get(SHAPE_ARROW), draw); <add> instance._updateShape( <add> instance.shape, val ? instance.get(SHAPE_SELECTED) : instance.get(SHAPE), draw); <add> <add> instance._updateShape( <add> instance.shapeArrow, val ? instance.get(SHAPE_ARROW_SELECTED) : instance.get(SHAPE_ARROW), draw); <ide> }, <ide> <ide> /**
Java
apache-2.0
608236c63c0ad4a6558bb988d0f5c3a47d3bdda2
0
NBANDROIDTEAM/NBANDROID-V2,arsi-apli/NBANDROID-V2,arsi-apli/NBANDROID-V2,NBANDROIDTEAM/NBANDROID-V2
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.nbandroid.netbeans.gradle.v2.apk.sign.keystore; import com.android.builder.model.SigningConfig; import java.awt.Component; import java.awt.Frame; import java.awt.KeyboardFocusManager; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.File; import java.io.FileInputStream; import java.security.Key; import java.security.KeyStore; import javax.swing.JFileChooser; import javax.swing.filechooser.FileNameExtensionFilter; import org.nbandroid.netbeans.gradle.v2.apk.ApkUtils; import org.netbeans.api.keyring.Keyring; import org.netbeans.api.project.Project; import org.openide.DialogDescriptor; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.filesystems.FileChooserBuilder; import org.openide.filesystems.FileObject; import org.openide.util.NbPreferences; /** * * @author arsi */ public class KeystoreSelector extends javax.swing.JPanel implements ActionListener, KeyListener, SigningConfig { private final Project project; private final FileObject toSign; private final String hash; private static final String KEY_STORE_PATH = "_KEY_STORE_PATH"; private static final String KEY_STORE_PASSWORD = "_KEY_STORE_PASSWORD"; private static final String KEY_ALIAS = "_KEY_ALIAS"; private static final String KEY_PASSWORD = "_KEY_PASSWORD"; private static final String APK_V1 = "_APK_V1"; private static final String APK_V2 = "_APK_V2"; private static final String APK_RELEASE = "APK_RELEASE"; private static final String APK_DEBUG = "APK_DEBUG"; private static final String REMEMBER_PASSWORDS = "_REMEMBER_PASSWORDS"; private DialogDescriptor descriptor = null; private final KeyEmulatorListener keyEmulatorListener = new KeyEmulatorListener(); /** * Creates new form KeystoreSelector */ public KeystoreSelector(Project project, FileObject toSign) { initComponents(); assert project != null; assert toSign != null; this.project = project; this.toSign = toSign; hash = "ANDROID_" + project.getProjectDirectory().getPath().hashCode(); char[] keystorePasswd = Keyring.read(hash + KEY_STORE_PASSWORD); char[] keyPasswd = Keyring.read(hash + KEY_PASSWORD); if (keystorePasswd != null) { keystorePassword.setText(new String(keystorePasswd)); } if (keyPasswd != null) { keyPassword.setText(new String(keyPasswd)); } path.setText(NbPreferences.forModule(KeystoreSelector.class).get(hash + KEY_STORE_PATH, "")); alias.setText(NbPreferences.forModule(KeystoreSelector.class).get(hash + KEY_ALIAS, "")); v1.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_V1, true)); v2.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_V2, true)); release.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_RELEASE, true)); debug.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_DEBUG, false)); rememberPasswd.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + REMEMBER_PASSWORDS, true)); path.addKeyListener(this); alias.addKeyListener(this); keystorePassword.addKeyListener(this); keyPassword.addKeyListener(this); v1.addActionListener(keyEmulatorListener); v2.addActionListener(keyEmulatorListener); debug.addActionListener(keyEmulatorListener); release.addActionListener(keyEmulatorListener); keyReleased(null); } private final class KeyEmulatorListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { keyReleased(null); } } public void setDescriptor(DialogDescriptor descriptor) { this.descriptor = descriptor; keyReleased(null); } public boolean isRelease() { return release.isSelected(); } public boolean isDebug() { return debug.isSelected(); } public void storeSettings() { if (rememberPasswd.isSelected()) { Keyring.save(hash + KEY_STORE_PASSWORD, keystorePassword.getPassword(), "NBANDROID Project Keystore Password"); Keyring.save(hash + KEY_PASSWORD, keyPassword.getPassword(), "NBANDROID Project Keystore Key Password"); } else { Keyring.delete(hash + KEY_STORE_PASSWORD); Keyring.delete(hash + KEY_PASSWORD); } NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_STORE_PATH, path.getText()); NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_ALIAS, alias.getText()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_V1, v1.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_V2, v2.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_RELEASE, release.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_DEBUG, debug.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + REMEMBER_PASSWORDS, rememberPasswd.isSelected()); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { buttonGroup1 = new javax.swing.ButtonGroup(); jLabel1 = new javax.swing.JLabel(); path = new javax.swing.JTextField(); createNew = new javax.swing.JButton(); jLabel2 = new javax.swing.JLabel(); keystorePassword = new javax.swing.JPasswordField(); jLabel3 = new javax.swing.JLabel(); alias = new javax.swing.JTextField(); changeAlias = new javax.swing.JButton(); jLabel4 = new javax.swing.JLabel(); keyPassword = new javax.swing.JPasswordField(); rememberPasswd = new javax.swing.JCheckBox(); jLabel5 = new javax.swing.JLabel(); v1 = new javax.swing.JRadioButton(); v2 = new javax.swing.JRadioButton(); selectPath = new javax.swing.JButton(); jLabel6 = new javax.swing.JLabel(); release = new javax.swing.JRadioButton(); debug = new javax.swing.JRadioButton(); org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel1.text")); // NOI18N path.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.path.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(createNew, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.createNew.text")); // NOI18N createNew.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { createNewActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel2, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel2.text")); // NOI18N keystorePassword.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.keystorePassword.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(jLabel3, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel3.text")); // NOI18N alias.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.alias.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(changeAlias, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.changeAlias.text")); // NOI18N changeAlias.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { changeAliasActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel4, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel4.text")); // NOI18N keyPassword.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.keyPassword.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(rememberPasswd, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.rememberPasswd.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(jLabel5, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel5.text")); // NOI18N v1.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(v1, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.v1.text")); // NOI18N v1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { v1ActionPerformed(evt); } }); v2.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(v2, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.v2.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(selectPath, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.selectPath.text")); // NOI18N selectPath.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { selectPathActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel6, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel6.text")); // NOI18N release.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(release, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.release.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(debug, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.debug.text")); // NOI18N javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(12, 12, 12) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel2) .addComponent(jLabel1) .addComponent(jLabel3) .addComponent(jLabel4) .addComponent(jLabel5) .addComponent(jLabel6)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(keystorePassword) .addGroup(layout.createSequentialGroup() .addComponent(alias) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(changeAlias)) .addComponent(keyPassword) .addGroup(layout.createSequentialGroup() .addComponent(path, javax.swing.GroupLayout.PREFERRED_SIZE, 349, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(selectPath)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(rememberPasswd) .addGroup(layout.createSequentialGroup() .addComponent(v1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(v2)) .addGroup(layout.createSequentialGroup() .addComponent(release) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(debug))) .addGap(0, 0, Short.MAX_VALUE)))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(createNew))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(selectPath, javax.swing.GroupLayout.PREFERRED_SIZE, 17, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(path, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel1)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(createNew) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel2) .addComponent(keystorePassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel3) .addComponent(alias, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(changeAlias, javax.swing.GroupLayout.PREFERRED_SIZE, 17, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel4) .addComponent(keyPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(rememberPasswd) .addGap(5, 5, 5) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel5) .addComponent(v1) .addComponent(v2)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(release) .addComponent(debug)) .addContainerGap(12, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void createNewActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createNewActionPerformed // TODO add your handling code here: NewKeyStore newKeyStore = new NewKeyStore(); DialogDescriptor dd = new DialogDescriptor(newKeyStore, "New Key Store", true, null); newKeyStore.setDescriptor(dd); Object notify = DialogDisplayer.getDefault().notify(dd); if (DialogDescriptor.OK_OPTION.equals(notify)) { String newPath = newKeyStore.getPath(); char[] password = newKeyStore.getPassword(); ApkUtils.DN dn = newKeyStore.getDN(); boolean createNewStore = ApkUtils.createNewStore(null, new File(newPath), password, dn); if (!createNewStore) { NotifyDescriptor nd = new NotifyDescriptor.Message("Unable to create new key store!", NotifyDescriptor.ERROR_MESSAGE); DialogDisplayer.getDefault().notifyLater(nd); } else { path.setText(newPath); keystorePassword.setText(new String(password)); alias.setText(dn.getAlias()); keyPassword.setText(new String(dn.getPassword())); } keyPressed(null); } }//GEN-LAST:event_createNewActionPerformed private void v1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_v1ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_v1ActionPerformed private void selectPathActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectPathActionPerformed FileChooserBuilder builder = new FileChooserBuilder(KeystoreSelector.class); builder.setDirectoriesOnly(false); builder.setApproveText("Open"); builder.setControlButtonsAreShown(true); builder.setTitle("Open Key Store..."); builder.setFilesOnly(true); builder.setFileFilter(new FileNameExtensionFilter("Key Store", "jks")); JFileChooser chooser = builder.createFileChooser(); String text = path.getText(); if (!text.isEmpty()) { File f = new File(text); if (f.exists()) { chooser.setSelectedFile(f); } } int resp = chooser.showOpenDialog(findDialogParent()); if (JFileChooser.APPROVE_OPTION == resp) { File f = chooser.getSelectedFile(); path.setText(f.getAbsolutePath()); alias.setText(""); keyPassword.setText(""); keystorePassword.setText(""); keyReleased(null); } }//GEN-LAST:event_selectPathActionPerformed private void changeAliasActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_changeAliasActionPerformed // TODO add your handling code here: try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); EditKeyStore editKeyStore = new EditKeyStore(ks, alias.getText()); DialogDescriptor dd = new DialogDescriptor(editKeyStore, "Choose Key", true, null); editKeyStore.setDescriptor(dd); Object notify = DialogDisplayer.getDefault().notify(dd); if (DialogDescriptor.OK_OPTION.equals(notify)) { if (editKeyStore.isNewKey()) { ApkUtils.DN dn = editKeyStore.getNewDN(); boolean addNewKey = ApkUtils.addNewKey(ks, f, keystorePassword.getPassword(), dn); if (!addNewKey) { NotifyDescriptor nd = new NotifyDescriptor.Message("Unable to save new alias to key store!", NotifyDescriptor.ERROR_MESSAGE); DialogDisplayer.getDefault().notifyLater(nd); } else { alias.setText(dn.getAlias()); keyPassword.setText(new String(dn.getPassword())); } keyPressed(null); } else { alias.setText(editKeyStore.getAliasName()); keyPassword.setText(""); } keyReleased(null); } } } catch (Exception ex) { } }//GEN-LAST:event_changeAliasActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JTextField alias; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JButton changeAlias; private javax.swing.JButton createNew; private javax.swing.JRadioButton debug; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPasswordField keyPassword; private javax.swing.JPasswordField keystorePassword; private javax.swing.JTextField path; private javax.swing.JRadioButton release; private javax.swing.JCheckBox rememberPasswd; private javax.swing.JButton selectPath; private javax.swing.JRadioButton v1; private javax.swing.JRadioButton v2; // End of variables declaration//GEN-END:variables @Override public void actionPerformed(ActionEvent e) { storeSettings(); } private Component findDialogParent() { Component parent = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (parent == null) { parent = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow(); } if (parent == null) { Frame[] f = Frame.getFrames(); parent = f.length == 0 ? null : f[f.length - 1]; } return parent; } @Override public void keyTyped(KeyEvent e) { } @Override public void keyPressed(KeyEvent e) { } @Override public void keyReleased(KeyEvent e) { boolean enableChangeAlias = false; try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); enableChangeAlias = true; Key key = ks.getKey(alias.getText(), keyPassword.getPassword()); if (key != null && descriptor != null && (v1.isSelected() || v2.isSelected())) { descriptor.setValid((v1.isSelected() || v2.isSelected()) && (debug.isSelected() || release.isSelected())); changeAlias.setEnabled(enableChangeAlias); return; } } } catch (Exception ex) { } if (descriptor != null) { descriptor.setValid(false); changeAlias.setEnabled(enableChangeAlias); } } @Override public File getStoreFile() { return new File(path.getText()); } @Override public String getStorePassword() { return new String(keystorePassword.getPassword()); } @Override public String getKeyAlias() { return alias.getText(); } @Override public String getKeyPassword() { return new String(keyPassword.getPassword()); } @Override public String getStoreType() { try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); return ks.getType(); } } catch (Exception keyStoreException) { } return "jks"; } @Override public boolean isV1SigningEnabled() { return v1.isSelected(); } @Override public boolean isV2SigningEnabled() { return v2.isSelected(); } @Override public boolean isSigningReady() { return true; } }
src/main/java/org/nbandroid/netbeans/gradle/v2/apk/sign/keystore/KeystoreSelector.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.nbandroid.netbeans.gradle.v2.apk.sign.keystore; import com.android.builder.model.SigningConfig; import java.awt.Component; import java.awt.Frame; import java.awt.KeyboardFocusManager; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.io.File; import java.io.FileInputStream; import java.security.Key; import java.security.KeyStore; import javax.swing.JFileChooser; import javax.swing.filechooser.FileNameExtensionFilter; import org.nbandroid.netbeans.gradle.v2.apk.ApkUtils; import org.netbeans.api.keyring.Keyring; import org.netbeans.api.project.Project; import org.openide.DialogDescriptor; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.filesystems.FileChooserBuilder; import org.openide.filesystems.FileObject; import org.openide.util.NbPreferences; /** * * @author arsi */ public class KeystoreSelector extends javax.swing.JPanel implements ActionListener, KeyListener, SigningConfig { private final Project project; private final FileObject toSign; private final String hash; private static final String KEY_STORE_PATH = "_KEY_STORE_PATH"; private static final String KEY_STORE_PASSWORD = "_KEY_STORE_PASSWORD"; private static final String KEY_ALIAS = "_KEY_ALIAS"; private static final String KEY_PASSWORD = "_KEY_PASSWORD"; private static final String APK_V1 = "_APK_V1"; private static final String APK_V2 = "_APK_V2"; private static final String APK_RELEASE = "APK_RELEASE"; private static final String APK_DEBUG = "APK_DEBUG"; private static final String REMEMBER_PASSWORDS = "_REMEMBER_PASSWORDS"; private DialogDescriptor descriptor = null; private final KeyEmulatorListener keyEmulatorListener = new KeyEmulatorListener(); /** * Creates new form KeystoreSelector */ public KeystoreSelector(Project project, FileObject toSign) { initComponents(); assert project != null; assert toSign != null; this.project = project; this.toSign = toSign; hash = "ANDROID_" + project.getProjectDirectory().getPath().hashCode(); char[] keystorePasswd = Keyring.read(hash + KEY_STORE_PASSWORD); char[] keyPasswd = Keyring.read(hash + KEY_PASSWORD); if (keystorePasswd != null) { keystorePassword.setText(new String(keystorePasswd)); } if (keyPasswd != null) { keyPassword.setText(new String(keyPasswd)); } path.setText(NbPreferences.forModule(KeystoreSelector.class).get(hash + KEY_STORE_PATH, "")); alias.setText(NbPreferences.forModule(KeystoreSelector.class).get(hash + KEY_ALIAS, "")); v1.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_V1, true)); v2.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_V2, true)); release.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_RELEASE, true)); debug.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + APK_DEBUG, false)); rememberPasswd.setSelected(NbPreferences.forModule(KeystoreSelector.class).getBoolean(hash + REMEMBER_PASSWORDS, true)); path.addKeyListener(this); alias.addKeyListener(this); keystorePassword.addKeyListener(this); keyPassword.addKeyListener(this); v1.addActionListener(keyEmulatorListener); v2.addActionListener(keyEmulatorListener); debug.addActionListener(keyEmulatorListener); release.addActionListener(keyEmulatorListener); keyReleased(null); } private final class KeyEmulatorListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { keyReleased(null); } } public void setDescriptor(DialogDescriptor descriptor) { this.descriptor = descriptor; keyReleased(null); } public boolean isRelease() { return release.isSelected(); } public boolean isDebug() { return debug.isSelected(); } public void storeSettings() { if (rememberPasswd.isSelected()) { Keyring.save(hash + KEY_STORE_PASSWORD, keystorePassword.getPassword(), "NBANDROID Project Keystore Password"); Keyring.save(hash + KEY_PASSWORD, keyPassword.getPassword(), "NBANDROID Project Keystore Key Password"); } NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_STORE_PATH, path.getText()); NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_ALIAS, alias.getText()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_V1, v1.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_V2, v2.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_RELEASE, release.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + APK_DEBUG, debug.isSelected()); NbPreferences.forModule(KeystoreSelector.class).putBoolean(hash + REMEMBER_PASSWORDS, rememberPasswd.isSelected()); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { buttonGroup1 = new javax.swing.ButtonGroup(); jLabel1 = new javax.swing.JLabel(); path = new javax.swing.JTextField(); createNew = new javax.swing.JButton(); jLabel2 = new javax.swing.JLabel(); keystorePassword = new javax.swing.JPasswordField(); jLabel3 = new javax.swing.JLabel(); alias = new javax.swing.JTextField(); changeAlias = new javax.swing.JButton(); jLabel4 = new javax.swing.JLabel(); keyPassword = new javax.swing.JPasswordField(); rememberPasswd = new javax.swing.JCheckBox(); jLabel5 = new javax.swing.JLabel(); v1 = new javax.swing.JRadioButton(); v2 = new javax.swing.JRadioButton(); selectPath = new javax.swing.JButton(); jLabel6 = new javax.swing.JLabel(); release = new javax.swing.JRadioButton(); debug = new javax.swing.JRadioButton(); org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel1.text")); // NOI18N path.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.path.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(createNew, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.createNew.text")); // NOI18N createNew.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { createNewActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel2, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel2.text")); // NOI18N keystorePassword.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.keystorePassword.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(jLabel3, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel3.text")); // NOI18N alias.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.alias.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(changeAlias, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.changeAlias.text")); // NOI18N changeAlias.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { changeAliasActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel4, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel4.text")); // NOI18N keyPassword.setText(org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.keyPassword.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(rememberPasswd, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.rememberPasswd.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(jLabel5, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel5.text")); // NOI18N v1.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(v1, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.v1.text")); // NOI18N v1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { v1ActionPerformed(evt); } }); v2.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(v2, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.v2.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(selectPath, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.selectPath.text")); // NOI18N selectPath.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { selectPathActionPerformed(evt); } }); org.openide.awt.Mnemonics.setLocalizedText(jLabel6, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.jLabel6.text")); // NOI18N release.setSelected(true); org.openide.awt.Mnemonics.setLocalizedText(release, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.release.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(debug, org.openide.util.NbBundle.getMessage(KeystoreSelector.class, "KeystoreSelector.debug.text")); // NOI18N javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(12, 12, 12) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel2) .addComponent(jLabel1) .addComponent(jLabel3) .addComponent(jLabel4) .addComponent(jLabel5) .addComponent(jLabel6)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(keystorePassword) .addGroup(layout.createSequentialGroup() .addComponent(alias) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(changeAlias)) .addComponent(keyPassword) .addGroup(layout.createSequentialGroup() .addComponent(path, javax.swing.GroupLayout.PREFERRED_SIZE, 349, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(selectPath)) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(rememberPasswd) .addGroup(layout.createSequentialGroup() .addComponent(v1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(v2)) .addGroup(layout.createSequentialGroup() .addComponent(release) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(debug))) .addGap(0, 0, Short.MAX_VALUE)))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(createNew))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(selectPath, javax.swing.GroupLayout.PREFERRED_SIZE, 17, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(path, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel1)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(createNew) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel2) .addComponent(keystorePassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel3) .addComponent(alias, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(changeAlias, javax.swing.GroupLayout.PREFERRED_SIZE, 17, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER) .addComponent(jLabel4) .addComponent(keyPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(rememberPasswd) .addGap(5, 5, 5) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel5) .addComponent(v1) .addComponent(v2)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(release) .addComponent(debug)) .addContainerGap(12, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents private void createNewActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createNewActionPerformed // TODO add your handling code here: NewKeyStore newKeyStore = new NewKeyStore(); DialogDescriptor dd = new DialogDescriptor(newKeyStore, "New Key Store", true, null); newKeyStore.setDescriptor(dd); Object notify = DialogDisplayer.getDefault().notify(dd); if (DialogDescriptor.OK_OPTION.equals(notify)) { String newPath = newKeyStore.getPath(); char[] password = newKeyStore.getPassword(); ApkUtils.DN dn = newKeyStore.getDN(); boolean createNewStore = ApkUtils.createNewStore(null, new File(newPath), password, dn); if (!createNewStore) { NotifyDescriptor nd = new NotifyDescriptor.Message("Unable to create new key store!", NotifyDescriptor.ERROR_MESSAGE); DialogDisplayer.getDefault().notifyLater(nd); } else { path.setText(newPath); keystorePassword.setText(new String(password)); alias.setText(dn.getAlias()); keyPassword.setText(new String(dn.getPassword())); } keyPressed(null); } }//GEN-LAST:event_createNewActionPerformed private void v1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_v1ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_v1ActionPerformed private void selectPathActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectPathActionPerformed FileChooserBuilder builder = new FileChooserBuilder(KeystoreSelector.class); builder.setDirectoriesOnly(false); builder.setApproveText("Open"); builder.setControlButtonsAreShown(true); builder.setTitle("Open Key Store..."); builder.setFilesOnly(true); builder.setFileFilter(new FileNameExtensionFilter("Key Store", "jks")); JFileChooser chooser = builder.createFileChooser(); String text = path.getText(); if (!text.isEmpty()) { File f = new File(text); if (f.exists()) { chooser.setSelectedFile(f); } } int resp = chooser.showOpenDialog(findDialogParent()); if (JFileChooser.APPROVE_OPTION == resp) { File f = chooser.getSelectedFile(); path.setText(f.getAbsolutePath()); alias.setText(""); keyPassword.setText(""); keystorePassword.setText(""); keyReleased(null); } }//GEN-LAST:event_selectPathActionPerformed private void changeAliasActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_changeAliasActionPerformed // TODO add your handling code here: try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); EditKeyStore editKeyStore = new EditKeyStore(ks, alias.getText()); DialogDescriptor dd = new DialogDescriptor(editKeyStore, "Choose Key", true, null); editKeyStore.setDescriptor(dd); Object notify = DialogDisplayer.getDefault().notify(dd); if (DialogDescriptor.OK_OPTION.equals(notify)) { if (editKeyStore.isNewKey()) { ApkUtils.DN dn = editKeyStore.getNewDN(); boolean addNewKey = ApkUtils.addNewKey(ks, f, keystorePassword.getPassword(), dn); if (!addNewKey) { NotifyDescriptor nd = new NotifyDescriptor.Message("Unable to save new alias to key store!", NotifyDescriptor.ERROR_MESSAGE); DialogDisplayer.getDefault().notifyLater(nd); } else { alias.setText(dn.getAlias()); keyPassword.setText(new String(dn.getPassword())); } keyPressed(null); } else { alias.setText(editKeyStore.getAliasName()); keyPassword.setText(""); } keyReleased(null); } } } catch (Exception ex) { } }//GEN-LAST:event_changeAliasActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JTextField alias; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.JButton changeAlias; private javax.swing.JButton createNew; private javax.swing.JRadioButton debug; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPasswordField keyPassword; private javax.swing.JPasswordField keystorePassword; private javax.swing.JTextField path; private javax.swing.JRadioButton release; private javax.swing.JCheckBox rememberPasswd; private javax.swing.JButton selectPath; private javax.swing.JRadioButton v1; private javax.swing.JRadioButton v2; // End of variables declaration//GEN-END:variables @Override public void actionPerformed(ActionEvent e) { storeSettings(); } private Component findDialogParent() { Component parent = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (parent == null) { parent = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow(); } if (parent == null) { Frame[] f = Frame.getFrames(); parent = f.length == 0 ? null : f[f.length - 1]; } return parent; } @Override public void keyTyped(KeyEvent e) { } @Override public void keyPressed(KeyEvent e) { } @Override public void keyReleased(KeyEvent e) { boolean enableChangeAlias = false; try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); enableChangeAlias = true; Key key = ks.getKey(alias.getText(), keyPassword.getPassword()); if (key != null && descriptor != null && (v1.isSelected() || v2.isSelected())) { descriptor.setValid((v1.isSelected() || v2.isSelected()) && (debug.isSelected() || release.isSelected())); changeAlias.setEnabled(enableChangeAlias); return; } } } catch (Exception ex) { } if (descriptor != null) { descriptor.setValid(false); changeAlias.setEnabled(enableChangeAlias); } } @Override public File getStoreFile() { return new File(path.getText()); } @Override public String getStorePassword() { return new String(keystorePassword.getPassword()); } @Override public String getKeyAlias() { return alias.getText(); } @Override public String getKeyPassword() { return new String(keyPassword.getPassword()); } @Override public String getStoreType() { try { File f = new File(path.getText()); if (f.exists()) { KeyStore ks = KeyStore.getInstance("jks"); ks.load(new FileInputStream(f), keystorePassword.getPassword()); return ks.getType(); } } catch (Exception keyStoreException) { } return "jks"; } @Override public boolean isV1SigningEnabled() { return v1.isSelected(); } @Override public boolean isV2SigningEnabled() { return v2.isSelected(); } @Override public boolean isSigningReady() { return true; } }
Keystore-Remove password from Keyring if Remember passwords unselected
src/main/java/org/nbandroid/netbeans/gradle/v2/apk/sign/keystore/KeystoreSelector.java
Keystore-Remove password from Keyring if Remember passwords unselected
<ide><path>rc/main/java/org/nbandroid/netbeans/gradle/v2/apk/sign/keystore/KeystoreSelector.java <ide> if (rememberPasswd.isSelected()) { <ide> Keyring.save(hash + KEY_STORE_PASSWORD, keystorePassword.getPassword(), "NBANDROID Project Keystore Password"); <ide> Keyring.save(hash + KEY_PASSWORD, keyPassword.getPassword(), "NBANDROID Project Keystore Key Password"); <add> } else { <add> Keyring.delete(hash + KEY_STORE_PASSWORD); <add> Keyring.delete(hash + KEY_PASSWORD); <ide> } <ide> NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_STORE_PATH, path.getText()); <ide> NbPreferences.forModule(KeystoreSelector.class).put(hash + KEY_ALIAS, alias.getText());
Java
bsd-3-clause
a3faff1a89cc921ae7378e16abf8fba17f5f6fd6
0
CCM-Modding/Pay2Spawn
/* * The MIT License (MIT) * * Copyright (c) 2013 Dries K. Aka Dries007 and the CCM modding crew. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package ccm.pay2spawn; import ccm.pay2spawn.network.RedonatePacket; import ccm.pay2spawn.util.EventHandler; import ccm.pay2spawn.util.Helper; import ccm.pay2spawn.util.JsonNBTHelper; import com.google.common.base.Strings; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import net.minecraft.client.Minecraft; import scala.sys.process.processInternal; import java.io.*; import java.net.URL; import java.util.ArrayList; /** * The thread that does the actual checking with nightdevs donationtracker * * @author Dries007 */ public class DonationCheckerThread extends Thread { final int interval; final String channel; final String API_Key; final String URL; boolean firstrun = true; JsonArray latest; public DonationCheckerThread(int interval, String channel, String API_Key) { super(DonationCheckerThread.class.getSimpleName()); this.interval = interval; this.channel = channel; this.API_Key = API_Key; this.URL = "http://donationtrack.nightdev.com/api/poll?channel=" + channel + "&key=" + API_Key; } ArrayList<String> doneIDs = new ArrayList<>(); ArrayList<JsonObject> backlog = new ArrayList<>(); public synchronized JsonObject getLatestById(int id) { return latest.get(id).getAsJsonObject(); } @Override public void run() { while (true) { try { for (JsonObject donation : backlog) process(donation); String input = readUrl(URL); JsonObject root = JsonNBTHelper.PARSER.parse(input).getAsJsonObject(); if (root.get("status").getAsString().equals("success")) { doFileAndHud(root); latest = root.getAsJsonArray("mostRecent"); for (JsonElement donation : root.getAsJsonArray("mostRecent")) process(donation.getAsJsonObject()); } else { throw new IllegalArgumentException("Could not fetch recent donations.\n Message:" + root.get("error").getAsString()); } firstrun = false; doWait(interval); } catch (Exception e) { if (Minecraft.getMinecraft().running) e.printStackTrace(); } } } private void process(JsonObject donation) { if (Minecraft.getMinecraft().thePlayer == null || !Pay2Spawn.enable) { if (!backlog.contains(donation)) backlog.add(donation); } else if (Pay2Spawn.debug || !doneIDs.contains(donation.get("transactionID").getAsString())) { doneIDs.add(donation.get("transactionID").getAsString()); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) return; try { Pay2Spawn.getRewardsDB().process(donation); } catch (Exception e) { Pay2Spawn.getLogger().warning("Error processing a donation."); e.printStackTrace(); } } } private void doWait(int time) { try { synchronized (this) { this.wait(time * 1000); } } catch (InterruptedException e) { e.printStackTrace(); } } @SuppressWarnings("ResultOfMethodCallIgnored") private void doFileAndHud(JsonObject root) { /** * Hud */ { /** * Top */ EventHandler.TOP.clear(); P2SConfig.HudSettings hudSettings = Pay2Spawn.getConfig().hud; if (hudSettings.top != 0) { String header = hudSettings.top_header.trim(); if (!Strings.isNullOrEmpty(header)) EventHandler.TOP.add(header); for (int i = 0; i < hudSettings.top_amount && i < root.getAsJsonArray("top").size(); i++) { JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; EventHandler.TOP.add(Helper.formatText(hudSettings.top_format, donation)); } } /** * Recent */ EventHandler.RECENT.clear(); if (hudSettings.recent != 0) { String header = hudSettings.recent_header.trim(); if (!Strings.isNullOrEmpty(header)) EventHandler.RECENT.add(header); for (int i = 0; i < hudSettings.recent_amount && i < root.getAsJsonArray("mostRecent").size(); i++) { JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; EventHandler.RECENT.add(Helper.formatText(hudSettings.recent_format, donation)); } } } /** * File */ { P2SConfig.FileSettings fileSettings = Pay2Spawn.getConfig().file; /** * Top */ if (fileSettings.top != 0) { try { String end = (fileSettings.top == 1 ? "\n" : ""); File file = new File(Pay2Spawn.getFolder(), "topList.txt"); //file.delete(); file.createNewFile(); PrintWriter pw = new PrintWriter(file); for (int i = 0; i < fileSettings.top_amount; i++) { if (i == fileSettings.top_amount - 1) end = ""; JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; pw.print(Helper.formatText(fileSettings.top_format, donation) + end); } pw.close(); } catch (IOException e) { e.printStackTrace(); } } /** * Recent */ if (fileSettings.recent != 0) { try { String end = (fileSettings.recent == 1 ? "\n" : ""); File file = new File(Pay2Spawn.getFolder(), "recentList.txt"); //file.delete(); file.createNewFile(); PrintWriter pw = new PrintWriter(file); for (int i = 0; i < fileSettings.recent_amount; i++) { if (i == fileSettings.recent_amount - 1) end = ""; JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; pw.print(Helper.formatText(fileSettings.recent_format, donation) + end); } pw.close(); } catch (IOException e) { e.printStackTrace(); } } } } private String readUrl(String urlString) throws Exception { BufferedReader reader = null; try { URL url = new URL(urlString); reader = new BufferedReader(new InputStreamReader(url.openStream())); StringBuilder buffer = new StringBuilder(); int read; char[] chars = new char[1024]; while ((read = reader.read(chars)) != -1) buffer.append(chars, 0, read); return buffer.toString(); } finally { if (reader != null) reader.close(); } } }
src/main/java/ccm/pay2spawn/DonationCheckerThread.java
/* * The MIT License (MIT) * * Copyright (c) 2013 Dries K. Aka Dries007 and the CCM modding crew. * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package ccm.pay2spawn; import ccm.pay2spawn.network.RedonatePacket; import ccm.pay2spawn.util.EventHandler; import ccm.pay2spawn.util.Helper; import ccm.pay2spawn.util.JsonNBTHelper; import com.google.common.base.Strings; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import net.minecraft.client.Minecraft; import scala.sys.process.processInternal; import java.io.*; import java.net.URL; import java.util.ArrayList; /** * The thread that does the actual checking with nightdevs donationtracker * * @author Dries007 */ public class DonationCheckerThread extends Thread { final int interval; final String channel; final String API_Key; final String URL; boolean firstrun = true; JsonArray latest; public DonationCheckerThread(int interval, String channel, String API_Key) { super(DonationCheckerThread.class.getSimpleName()); this.interval = interval; this.channel = channel; this.API_Key = API_Key; this.URL = "http://donationtrack.nightdev.com/api/poll?channel=" + channel + "&key=" + API_Key; } ArrayList<String> doneIDs = new ArrayList<>(); ArrayList<JsonObject> backlog = new ArrayList<>(); public synchronized JsonObject getLatestById(int id) { return latest.get(id).getAsJsonObject(); } @Override public void run() { while (true) { try { for (JsonObject donation : backlog) process(donation); String input = readUrl(URL); JsonObject root = JsonNBTHelper.PARSER.parse(input).getAsJsonObject(); if (root.get("status").getAsString().equals("success")) { doFileAndHud(root); latest = root.getAsJsonArray("mostRecent"); for (JsonElement donation : root.getAsJsonArray("mostRecent")) process(donation.getAsJsonObject()); } else { throw new IllegalArgumentException("Could not fetch recent donations.\n Message:" + root.get("error").getAsString()); } firstrun = false; doWait(interval); } catch (Exception e) { if (Minecraft.getMinecraft().running) e.printStackTrace(); } } } private void process(JsonObject donation) { if (Minecraft.getMinecraft().thePlayer == null || !Pay2Spawn.enable) { if (!backlog.contains(donation)) backlog.add(donation); } else if (Pay2Spawn.debug || !doneIDs.contains(donation.get("transactionID").getAsString())) { doneIDs.add(donation.get("transactionID").getAsString()); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) return; try { Pay2Spawn.getRewardsDB().process(donation); } catch (Exception e) { Pay2Spawn.getLogger().warning("Error processing a donation."); e.printStackTrace(); } } } private void doWait(int time) { try { synchronized (this) { this.wait(time * 1000); } } catch (InterruptedException e) { e.printStackTrace(); } } @SuppressWarnings("ResultOfMethodCallIgnored") private void doFileAndHud(JsonObject root) { /** * Hud */ { /** * Top */ EventHandler.TOP.clear(); P2SConfig.HudSettings hudSettings = Pay2Spawn.getConfig().hud; if (hudSettings.top != 0) { String header = hudSettings.top_header.trim(); if (!Strings.isNullOrEmpty(header)) EventHandler.TOP.add(header); for (int i = 0; i < hudSettings.top_amount; i++) { JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; EventHandler.TOP.add(Helper.formatText(hudSettings.top_format, donation)); } } /** * Recent */ EventHandler.RECENT.clear(); if (hudSettings.recent != 0) { String header = hudSettings.recent_header.trim(); if (!Strings.isNullOrEmpty(header)) EventHandler.RECENT.add(header); for (int i = 0; i < hudSettings.recent_amount; i++) { JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; EventHandler.RECENT.add(Helper.formatText(hudSettings.recent_format, donation)); } } } /** * File */ { P2SConfig.FileSettings fileSettings = Pay2Spawn.getConfig().file; /** * Top */ if (fileSettings.top != 0) { try { String end = (fileSettings.top == 1 ? "\n" : ""); File file = new File(Pay2Spawn.getFolder(), "topList.txt"); //file.delete(); file.createNewFile(); PrintWriter pw = new PrintWriter(file); for (int i = 0; i < fileSettings.top_amount; i++) { if (i == fileSettings.top_amount - 1) end = ""; JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; pw.print(Helper.formatText(fileSettings.top_format, donation) + end); } pw.close(); } catch (IOException e) { e.printStackTrace(); } } /** * Recent */ if (fileSettings.recent != 0) { try { String end = (fileSettings.recent == 1 ? "\n" : ""); File file = new File(Pay2Spawn.getFolder(), "recentList.txt"); //file.delete(); file.createNewFile(); PrintWriter pw = new PrintWriter(file); for (int i = 0; i < fileSettings.recent_amount; i++) { if (i == fileSettings.recent_amount - 1) end = ""; JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject(); if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; pw.print(Helper.formatText(fileSettings.recent_format, donation) + end); } pw.close(); } catch (IOException e) { e.printStackTrace(); } } } } private String readUrl(String urlString) throws Exception { BufferedReader reader = null; try { URL url = new URL(urlString); reader = new BufferedReader(new InputStreamReader(url.openStream())); StringBuilder buffer = new StringBuilder(); int read; char[] chars = new char[1024]; while ((read = reader.read(chars)) != -1) buffer.append(chars, 0, read); return buffer.toString(); } finally { if (reader != null) reader.close(); } } }
Fixed stupid issue, tnx @AzureusNation
src/main/java/ccm/pay2spawn/DonationCheckerThread.java
Fixed stupid issue, tnx @AzureusNation
<ide><path>rc/main/java/ccm/pay2spawn/DonationCheckerThread.java <ide> { <ide> String header = hudSettings.top_header.trim(); <ide> if (!Strings.isNullOrEmpty(header)) EventHandler.TOP.add(header); <del> for (int i = 0; i < hudSettings.top_amount; i++) <add> for (int i = 0; i < hudSettings.top_amount && i < root.getAsJsonArray("top").size(); i++) <ide> { <ide> JsonObject donation = root.getAsJsonArray("top").get(i).getAsJsonObject(); <ide> if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue; <ide> { <ide> String header = hudSettings.recent_header.trim(); <ide> if (!Strings.isNullOrEmpty(header)) EventHandler.RECENT.add(header); <del> for (int i = 0; i < hudSettings.recent_amount; i++) <add> for (int i = 0; i < hudSettings.recent_amount && i < root.getAsJsonArray("mostRecent").size(); i++) <ide> { <ide> JsonObject donation = root.getAsJsonArray("mostRecent").get(i).getAsJsonObject(); <ide> if (donation.get("amount").getAsDouble() < Pay2Spawn.getConfig().min_donation) continue;
Java
apache-2.0
057a80871039a3cc073f7d0af27079494fd1f77f
0
ak-67/osmdroid,dozd/osmdroid,prembasumatary/osmdroid,hyl1987419/osmdroid,DShamaev/osmdroid,Sarfarazsajjad/osmdroid,osmdroid/osmdroid,fpoyer/osmdroid,sibext/osmdroid-1,DT9/osmdroid,beemogmbh/osmdroid,ak-67/osmdroid,Sarfarazsajjad/osmdroid,GeoODK/osmdroid,osmdroid/osmdroid,sibext/osmdroid-1,DT9/osmdroid,beemogmbh/osmdroid,microg/android_external_osmdroid,dozd/osmdroid,1nv4d3r5/osmdroid,mozilla/osmdroid,osmdroid/osmdroid,prembasumatary/osmdroid,hyl1987419/osmdroid,DShamaev/osmdroid,microg/android_external_osmdroid,1nv4d3r5/osmdroid,fpoyer/osmdroid,osmdroid/osmdroid,GeoODK/osmdroid
// Created by plusminus on 17:45:56 - 25.09.2008 package org.osmdroid.views; import java.util.LinkedList; import java.util.List; import net.wigle.wigleandroid.ZoomButtonsController; import net.wigle.wigleandroid.ZoomButtonsController.OnZoomListener; import org.metalev.multitouch.controller.MultiTouchController; import org.metalev.multitouch.controller.MultiTouchController.MultiTouchObjectCanvas; import org.metalev.multitouch.controller.MultiTouchController.PointInfo; import org.metalev.multitouch.controller.MultiTouchController.PositionAndScale; import org.osmdroid.DefaultResourceProxyImpl; import org.osmdroid.ResourceProxy; import org.osmdroid.api.IMapView; import org.osmdroid.api.IProjection; import org.osmdroid.events.MapListener; import org.osmdroid.events.ScrollEvent; import org.osmdroid.events.ZoomEvent; import org.osmdroid.tileprovider.MapTileProviderBase; import org.osmdroid.tileprovider.MapTileProviderBasic; import org.osmdroid.tileprovider.tilesource.IStyledTileSource; import org.osmdroid.tileprovider.tilesource.ITileSource; import org.osmdroid.tileprovider.tilesource.TileSourceFactory; import org.osmdroid.tileprovider.util.SimpleInvalidationHandler; import org.osmdroid.util.BoundingBoxE6; import org.osmdroid.util.GeoPoint; import org.osmdroid.util.constants.GeoConstants; import org.osmdroid.views.overlay.Overlay; import org.osmdroid.views.overlay.Overlay.Snappable; import org.osmdroid.views.overlay.TilesOverlay; import org.osmdroid.views.util.Mercator; import org.osmdroid.views.util.constants.MapViewConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.Rect; import android.os.Bundle; import android.os.Handler; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.GestureDetector.OnGestureListener; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.view.animation.ScaleAnimation; import android.widget.Scroller; public class MapView extends View implements IMapView, MapViewConstants, MultiTouchObjectCanvas<Object> { // =========================================================== // Constants // =========================================================== private static final Logger logger = LoggerFactory.getLogger(MapView.class); final static String BUNDLE_TILE_SOURCE = "org.osmdroid.views.MapView.TILE_SOURCE"; final static String BUNDLE_SCROLL_X = "org.osmdroid.views.MapView.SCROLL_X"; final static String BUNDLE_SCROLL_Y = "org.osmdroid.views.MapView.SCROLL_Y"; final static String BUNDLE_ZOOM_LEVEL = "org.osmdroid.views.MapView.ZOOM"; private static final double ZOOM_SENSITIVITY = 1.3; private static final double ZOOM_LOG_BASE_INV = 1.0 / Math.log(2.0 / ZOOM_SENSITIVITY); // =========================================================== // Fields // =========================================================== /** Current zoom level for map tiles. */ private int mZoomLevel = 0; private int mTileSizePixels = 0; private final LinkedList<Overlay> mOverlays = new LinkedList<Overlay>(); private Projection mProjection; private final TilesOverlay mMapOverlay; private final GestureDetector mGestureDetector; /** Handles map scrolling */ private final Scroller mScroller; private final ScaleAnimation mZoomInAnimation; private final ScaleAnimation mZoomOutAnimation; private final MyAnimationListener mAnimationListener = new MyAnimationListener(); private final MapController mController; // XXX we can use android.widget.ZoomButtonsController if we upgrade the // dependency to Android 1.6 private final ZoomButtonsController mZoomController; private boolean mEnableZoomController = false; private ResourceProxy mResourceProxy; private MultiTouchController<Object> mMultiTouchController; private float mMultiTouchScale = 1.0f; protected MapListener mListener; // for speed (avoiding allocations) private final Matrix mMatrix = new Matrix(); private final MapTileProviderBase mTileProvider; private final Handler mTileRequestCompleteHandler; // =========================================================== // Constructors // =========================================================== private MapView(final Context context, final Handler tileRequestCompleteHandler, final AttributeSet attrs, final int tileSizePixels, MapTileProviderBase tileProvider) { super(context, attrs); mResourceProxy = new DefaultResourceProxyImpl(context); this.mController = new MapController(this); this.mScroller = new Scroller(context); this.mTileSizePixels = tileSizePixels; if (tileProvider == null) { final ITileSource tileSource = getTileSourceFromAttributes(attrs); tileProvider = new MapTileProviderBasic(context, tileSource); } mTileRequestCompleteHandler = tileRequestCompleteHandler == null ? new SimpleInvalidationHandler( this) : tileRequestCompleteHandler; mTileProvider = tileProvider; mTileProvider.setTileRequestCompleteHandler(mTileRequestCompleteHandler); this.mMapOverlay = new TilesOverlay(mTileProvider, mResourceProxy); mOverlays.add(this.mMapOverlay); this.mZoomController = new ZoomButtonsController(this); this.mZoomController.setOnZoomListener(new MapViewZoomListener()); mZoomInAnimation = new ScaleAnimation(1, 2, 1, 2, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f); mZoomOutAnimation = new ScaleAnimation(1, 0.5f, 1, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f); mZoomInAnimation.setDuration(ANIMATION_DURATION_SHORT); mZoomOutAnimation.setDuration(ANIMATION_DURATION_SHORT); mZoomInAnimation.setAnimationListener(mAnimationListener); mZoomOutAnimation.setAnimationListener(mAnimationListener); mGestureDetector = new GestureDetector(context, new MapViewGestureDetectorListener()); mGestureDetector.setOnDoubleTapListener(new MapViewDoubleClickListener()); } /** * Constructor used by XML layout resource (uses default tile source). */ public MapView(final Context context, final AttributeSet attrs) { this(context, null, attrs, 256, null); } /** * Standard Constructor. */ public MapView(final Context context, final int tileSizePixels, final MapTileProviderBase aTileProvider) { this(context, null, null, tileSizePixels, aTileProvider); } public MapView(final Context context, final int tileSizePixels) { this(context, null, null, tileSizePixels, null); } public MapView(final Context context, final Handler tileRequestCompleteHandler, final int tileSizePixels, final MapTileProviderBase aTileProvider) { this(context, tileRequestCompleteHandler, null, tileSizePixels, aTileProvider); } // =========================================================== // Getter & Setter // =========================================================== @Override public MapController getController() { return this.mController; } /** * You can add/remove/reorder your Overlays using the List of {@link Overlay}. The first (index * 0) Overlay gets drawn first, the one with the highest as the last one. */ public List<Overlay> getOverlays() { return this.mOverlays; } public MapTileProviderBase getTileProvider() { return mTileProvider; } public Scroller getScroller() { return mScroller; } public Handler getTileRequestCompleteHandler() { return mTileRequestCompleteHandler; } @Override public int getLatitudeSpan() { return this.getBoundingBox().getLatitudeSpanE6(); } @Override public int getLongitudeSpan() { return this.getBoundingBox().getLongitudeSpanE6(); } public static int getMapTileZoom(final int tileSizePixels) { if (tileSizePixels <= 0) { return 0; } int pixels = tileSizePixels; int a = 0; while (pixels != 0) { pixels >>= 1; a++; } return a - 1; } public BoundingBoxE6 getBoundingBox() { return getBoundingBox(getWidth(), getHeight()); } public BoundingBoxE6 getBoundingBox(final int pViewWidth, final int pViewHeight) { final int mapTileZoom = getMapTileZoom(mTileSizePixels); final int world_2 = 1 << mZoomLevel + mapTileZoom - 1; final int north = world_2 + getScrollY() - getHeight() / 2; final int south = world_2 + getScrollY() + getHeight() / 2; final int west = world_2 + getScrollX() - getWidth() / 2; final int east = world_2 + getScrollX() + getWidth() / 2; return Mercator .getBoundingBoxFromCoords(west, north, east, south, mZoomLevel + mapTileZoom); } /** * This class is only meant to be used during on call of onDraw(). Otherwise it may produce * strange results. * * @return */ @Override public Projection getProjection() { if (mProjection == null) { mProjection = new Projection(); } return mProjection; } void setMapCenter(final GeoPoint aCenter) { this.setMapCenter(aCenter.getLatitudeE6(), aCenter.getLongitudeE6()); } void setMapCenter(final int aLatitudeE6, final int aLongitudeE6) { final Point coords = Mercator.projectGeoPoint(aLatitudeE6, aLongitudeE6, getPixelZoomLevel(), null); final int worldSize_2 = getWorldSizePx() / 2; if (getAnimation() == null || getAnimation().hasEnded()) { logger.debug("StartScroll"); mScroller.startScroll(getScrollX(), getScrollY(), coords.x - worldSize_2 - getScrollX(), coords.y - worldSize_2 - getScrollY(), 500); postInvalidate(); } } public void setTileSource(final ITileSource aTileSource) { mTileProvider.setTileSource(aTileSource); mTileSizePixels = aTileSource.getTileSizePixels(); this.checkZoomButtons(); this.setZoomLevel(mZoomLevel); // revalidate zoom level postInvalidate(); } /** * @param aZoomLevel * the zoom level bound by the tile source */ int setZoomLevel(final int aZoomLevel) { final int minZoomLevel = getMinimumZoomLevel(); final int maxZoomLevel = getMaximumZoomLevel(); final int newZoomLevel = Math.max(minZoomLevel, Math.min(maxZoomLevel, aZoomLevel)); final int curZoomLevel = this.mZoomLevel; this.mZoomLevel = newZoomLevel; this.checkZoomButtons(); if (newZoomLevel > curZoomLevel) { scrollTo(getScrollX() << newZoomLevel - curZoomLevel, getScrollY() << newZoomLevel - curZoomLevel); } else if (newZoomLevel < curZoomLevel) { scrollTo(getScrollX() >> curZoomLevel - newZoomLevel, getScrollY() >> curZoomLevel - newZoomLevel); } // snap for all snappables final Point snapPoint = new Point(); // XXX why do we need a new projection here? mProjection = new Projection(); for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i) instanceof Snappable && ((Snappable) mOverlays.get(i)).onSnapToItem(getScrollX(), getScrollY(), snapPoint, this)) { scrollTo(snapPoint.x, snapPoint.y); } // do callback on listener if (newZoomLevel != curZoomLevel && mListener != null) { final ZoomEvent event = new ZoomEvent(this, newZoomLevel); mListener.onZoom(event); } return this.mZoomLevel; } /** * Get the current ZoomLevel for the map tiles. * * @return the current ZoomLevel between 0 (equator) and 18/19(closest), depending on the tile * source chosen. */ @Override public int getZoomLevel() { return getZoomLevel(true); } /** * Get the current ZoomLevel for the map tiles. * * @param aPending * if true and we're animating then return the zoom level that we're animating * towards, otherwise return the current zoom level * @return the zoom level */ public int getZoomLevel(final boolean aPending) { if (aPending && mAnimationListener.animating) { return mAnimationListener.targetZoomLevel; } else { return mZoomLevel; } } /** * Returns the minimum zoom level for the point currently at the center. * * @return The minimum zoom level for the map's current center. */ public int getMinimumZoomLevel() { return mMapOverlay.getMinimumZoomLevel(); } /** * @deprecated Replaced by {@link #getMaxZoomLevel()} */ @Deprecated public int getMaximumZoomLevel() { return mMapOverlay.getMaximumZoomLevel(); } /** * Returns the maximum zoom level for the point currently at the center. * * @return The maximum zoom level for the map's current center. */ @Override public int getMaxZoomLevel() { return mMapOverlay.getMaximumZoomLevel(); } public boolean canZoomIn() { final int maxZoomLevel = getMaxZoomLevel(); if (mZoomLevel >= maxZoomLevel) { return false; } if (mAnimationListener.animating && mAnimationListener.targetZoomLevel >= maxZoomLevel) { return false; } return true; } public boolean canZoomOut() { final int minZoomLevel = getMinimumZoomLevel(); if (mZoomLevel <= minZoomLevel) { return false; } if (mAnimationListener.animating && mAnimationListener.targetZoomLevel <= minZoomLevel) { return false; } return true; } /** * Zoom in by one zoom level. */ boolean zoomIn() { if (canZoomIn()) { if (mAnimationListener.animating) { // TODO extend zoom (and return true) return false; } else { mAnimationListener.targetZoomLevel = mZoomLevel + 1; mAnimationListener.animating = true; startAnimation(mZoomInAnimation); return true; } } else { return false; } } boolean zoomInFixing(final GeoPoint point) { setMapCenter(point); // TODO should fix on point, not center on it return zoomIn(); } boolean zoomInFixing(final int xPixel, final int yPixel) { setMapCenter(xPixel, yPixel); // TODO should fix on point, not center on it return zoomIn(); } /** * Zoom out by one zoom level. */ boolean zoomOut() { if (canZoomOut()) { if (mAnimationListener.animating) { // TODO extend zoom (and return true) return false; } else { mAnimationListener.targetZoomLevel = mZoomLevel - 1; mAnimationListener.animating = true; startAnimation(mZoomOutAnimation); return true; } } else { return false; } } boolean zoomOutFixing(final GeoPoint point) { setMapCenter(point); // TODO should fix on point, not center on it return zoomOut(); } boolean zoomOutFixing(final int xPixel, final int yPixel) { setMapCenter(xPixel, yPixel); // TODO should fix on point, not center on it return zoomOut(); } @Override public GeoPoint getMapCenter() { return new GeoPoint(getMapCenterLatitudeE6(), getMapCenterLongitudeE6()); } public int getMapCenterLatitudeE6() { return (int) (Mercator.tile2lat(getScrollY() + getWorldSizePx() / 2, getPixelZoomLevel()) * 1E6); } public int getMapCenterLongitudeE6() { return (int) (Mercator.tile2lon(getScrollX() + getWorldSizePx() / 2, getPixelZoomLevel()) * 1E6); } public void setResourceProxy(final ResourceProxy pResourceProxy) { mResourceProxy = pResourceProxy; } public void onSaveInstanceState(final Bundle state) { state.putInt(BUNDLE_SCROLL_X, getScrollX()); state.putInt(BUNDLE_SCROLL_Y, getScrollY()); state.putInt(BUNDLE_ZOOM_LEVEL, getZoomLevel()); } public void onRestoreInstanceState(final Bundle state) { setZoomLevel(state.getInt(BUNDLE_ZOOM_LEVEL, 1)); scrollTo(state.getInt(BUNDLE_SCROLL_X, 0), state.getInt(BUNDLE_SCROLL_Y, 0)); } /** * Whether to use the network connection if it's available. */ public boolean useDataConnection() { return mMapOverlay.useDataConnection(); } /** * Set whether to use the network connection if it's available. * * @param aMode * if true use the network connection if it's available. if false don't use the * network connection even if it's available. */ public void setUseDataConnection(final boolean aMode) { mMapOverlay.setUseDataConnection(aMode); } /** * Check mAnimationListener.animating to determine if view is animating. Useful for overlays to * avoid recalculating during an animation sequence. * * @return boolean indicating whether view is animating. */ public boolean isAnimating() { return mAnimationListener.animating; } // =========================================================== // Methods from SuperClass/Interfaces // =========================================================== public void onDetach() { for (int i = mOverlays.size() - 1; i >= 0; i--) mOverlays.get(i).onDetach(this); } public void onLongPress(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onLongPress(e, this)) return; } public boolean onSingleTapUp(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onSingleTapUp(e, this)) { postInvalidate(); return true; } return false; } @Override public boolean onKeyDown(final int keyCode, final KeyEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onKeyDown(keyCode, event, this)) return true; return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(final int keyCode, final KeyEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onKeyUp(keyCode, event, this)) return true; return super.onKeyUp(keyCode, event); } @Override public boolean onTrackballEvent(final MotionEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onTrackballEvent(event, this)) return true; scrollBy((int) (event.getX() * 25), (int) (event.getY() * 25)); return super.onTrackballEvent(event); } @Override public boolean onTouchEvent(final MotionEvent event) { if (DEBUGMODE) { logger.debug("onTouchEvent(" + event + ")"); } for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onTouchEvent(event, this)) { if (DEBUGMODE) { logger.debug("overlay handled onTouchEvent"); } return true; } if (mMultiTouchController != null && mMultiTouchController.onTouchEvent(event)) { if (DEBUGMODE) { logger.debug("mMultiTouchController handled onTouchEvent"); } return true; } if (mGestureDetector.onTouchEvent(event)) { if (DEBUGMODE) { logger.debug("mGestureDetector handled onTouchEvent"); } return true; } final boolean r = super.onTouchEvent(event); if (r) { if (DEBUGMODE) { logger.debug("super handled onTouchEvent"); } } else { if (DEBUGMODE) { logger.debug("no-one handled onTouchEvent"); } } return r; } @Override public void computeScroll() { if (mScroller.computeScrollOffset()) { if (mScroller.isFinished()) { // This will facilitate snapping-to any Snappable points. setZoomLevel(mZoomLevel); } else { scrollTo(mScroller.getCurrX(), mScroller.getCurrY()); } postInvalidate(); // Keep on drawing until the animation has // finished. } } @Override public void scrollTo(int x, int y) { final int worldSize = getWorldSizePx(); x %= worldSize; y %= worldSize; super.scrollTo(x, y); // do callback on listener if (mListener != null) { final ScrollEvent event = new ScrollEvent(this, x, y); mListener.onScroll(event); } } @Override public void onDraw(final Canvas c) { final long startMs = System.currentTimeMillis(); mProjection = new Projection(); if (mMultiTouchScale == 1.0f) { c.translate(getWidth() / 2, getHeight() / 2); } else { c.getMatrix(mMatrix); mMatrix.postTranslate(getWidth() / 2, getHeight() / 2); mMatrix.preScale(mMultiTouchScale, mMultiTouchScale, getScrollX(), getScrollY()); c.setMatrix(mMatrix); } /* Draw background */ c.drawColor(Color.LTGRAY); // This is too slow: // final Rect r = c.getClipBounds(); // mPaint.setColor(Color.GRAY); // mPaint.setPathEffect(new DashPathEffect(new float[] {1, 1}, 0)); // for (int x = r.left; x < r.right; x += 20) // c.drawLine(x, r.top, x, r.bottom, mPaint); // for (int y = r.top; y < r.bottom; y += 20) // c.drawLine(r.left, y, r.right, y, mPaint); /* Draw all Overlays. Avoid allocation by not doing enhanced loop. */ for (int i = 0; i < mOverlays.size(); i++) { mOverlays.get(i).onManagedDraw(c, this); } final long endMs = System.currentTimeMillis(); if (DEBUGMODE) { logger.debug("Rendering overall: " + (endMs - startMs) + "ms"); } } @Override protected void onDetachedFromWindow() { this.mZoomController.setVisible(false); this.onDetach(); super.onDetachedFromWindow(); } // =========================================================== // Implementation of MultiTouchObjectCanvas // =========================================================== @Override public Object getDraggableObjectAtPoint(final PointInfo pt) { return this; } @Override public void getPositionAndScale(final Object obj, final PositionAndScale objPosAndScaleOut) { objPosAndScaleOut.set(0, 0, true, mMultiTouchScale, false, 0, 0, false, 0); } @Override public void selectObject(final Object obj, final PointInfo pt) { // if obj is null it means we released the pointers // if scale is not 1 it means we pinched if (obj == null && mMultiTouchScale != 1.0f) { final float scaleDiffFloat = (float) (Math.log(mMultiTouchScale) * ZOOM_LOG_BASE_INV); final int scaleDiffInt = Math.round(scaleDiffFloat); setZoomLevel(mZoomLevel + scaleDiffInt); // XXX maybe zoom in/out instead of zooming direct to zoom level // - probably not a good idea because you'll repeat the animation } // reset scale mMultiTouchScale = 1.0f; } @Override public boolean setPositionAndScale(final Object obj, final PositionAndScale aNewObjPosAndScale, final PointInfo aTouchPoint) { mMultiTouchScale = aNewObjPosAndScale.getScale(); invalidate(); // redraw return true; } /* * Set the MapListener for this view */ public void setMapListener(final MapListener ml) { mListener = ml; } // =========================================================== // Package Methods // =========================================================== /** * Get the world size in pixels. */ int getWorldSizePx() { return 1 << getPixelZoomLevel(); } /** * Get the equivalent zoom level on pixel scale */ int getPixelZoomLevel() { return this.mZoomLevel + getMapTileZoom(mTileSizePixels); } // =========================================================== // Methods // =========================================================== private void checkZoomButtons() { this.mZoomController.setZoomInEnabled(canZoomIn()); this.mZoomController.setZoomOutEnabled(canZoomOut()); } /** * @param centerMapTileCoords * @param tileSizePx * @param reuse * just pass null if you do not have a Point to be 'recycled'. */ private Point getUpperLeftCornerOfCenterMapTileInScreen(final Point centerMapTileCoords, final int tileSizePx, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final int worldTiles_2 = 1 << mZoomLevel - 1; final int centerMapTileScreenLeft = (centerMapTileCoords.x - worldTiles_2) * tileSizePx - tileSizePx / 2; final int centerMapTileScreenTop = (centerMapTileCoords.y - worldTiles_2) * tileSizePx - tileSizePx / 2; out.set(centerMapTileScreenLeft, centerMapTileScreenTop); return out; } public void setBuiltInZoomControls(final boolean on) { this.mEnableZoomController = on; this.checkZoomButtons(); } public void setMultiTouchControls(final boolean on) { mMultiTouchController = on ? new MultiTouchController<Object>(this, false) : null; } private ITileSource getTileSourceFromAttributes(final AttributeSet aAttributeSet) { ITileSource tileSource = TileSourceFactory.DEFAULT_TILE_SOURCE; if (aAttributeSet != null) { final String tileSourceAttr = aAttributeSet.getAttributeValue(null, "tilesource"); if (tileSourceAttr != null) { try { final ITileSource r = TileSourceFactory.getTileSource(tileSourceAttr); logger.info("Using tile source specified in layout attributes: " + r); tileSource = r; } catch (final IllegalArgumentException e) { logger.warn("Invalid tile souce specified in layout attributes: " + tileSource); } } } if (aAttributeSet != null && tileSource instanceof IStyledTileSource) { String style = aAttributeSet.getAttributeValue(null, "style"); if (style == null) { // historic - old attribute name style = aAttributeSet.getAttributeValue(null, "cloudmadeStyle"); } if (style == null) { logger.info("Using default style: 1"); } else { logger.info("Using style specified in layout attributes: " + style); ((IStyledTileSource<?>) tileSource).setStyle(style); } } logger.info("Using tile source: " + tileSource); return tileSource; } // =========================================================== // Inner and Anonymous Classes // =========================================================== /** * This class may return valid results until the underlying {@link MapView} gets modified in any * way (i.e. new center). * * @author Nicolas Gramlich * @author Manuel Stahl */ public class Projection implements IProjection, GeoConstants { private final int viewWidth_2 = getWidth() / 2; private final int viewHeight_2 = getHeight() / 2; private final int worldSize_2 = getWorldSizePx() / 2; private final int offsetX = -worldSize_2; private final int offsetY = -worldSize_2; private final BoundingBoxE6 mBoundingBoxProjection; private final int mZoomLevelProjection; private final int mTileSizePixelsProjection; private final int mTileMapZoomProjection; private final Point mCenterMapTileCoordsProjection; private final Point mUpperLeftCornerOfCenterMapTileProjection; private Projection() { /* * Do some calculations and drag attributes to local variables to save some performance. */ mZoomLevelProjection = mZoomLevel; // TODO Draw to attributes and so make it only 'valid' for a short time. mTileSizePixelsProjection = mTileSizePixels; mTileMapZoomProjection = getMapTileZoom(getTileSizePixels()); /* * Get the center MapTile which is above this.mLatitudeE6 and this.mLongitudeE6 . */ mCenterMapTileCoordsProjection = calculateCenterMapTileCoords(getTileSizePixels(), getZoomLevel()); mUpperLeftCornerOfCenterMapTileProjection = getUpperLeftCornerOfCenterMapTileInScreen( getCenterMapTileCoords(), getTileSizePixels(), null); mBoundingBoxProjection = MapView.this.getBoundingBox(); } public int getTileSizePixels() { return mTileSizePixelsProjection; } public int getTileMapZoom() { return mTileMapZoomProjection; } public int getZoomLevel() { return mZoomLevelProjection; } public Point getCenterMapTileCoords() { return mCenterMapTileCoordsProjection; } public Point getUpperLeftCornerOfCenterMapTile() { return mUpperLeftCornerOfCenterMapTileProjection; } public BoundingBoxE6 getBoundingBox() { return mBoundingBoxProjection; } private Point calculateCenterMapTileCoords(final int tileSizePixels, final int zoomLevel) { final int mapTileZoom = getMapTileZoom(tileSizePixels); final int worldTiles_2 = 1 << zoomLevel - 1; // convert to tile coordinate and make positive return new Point((getScrollX() >> mapTileZoom) + worldTiles_2, (getScrollY() >> mapTileZoom) + worldTiles_2); } /** * Converts x/y ScreenCoordinates to the underlying GeoPoint. * * @param x * @param y * @return GeoPoint under x/y. */ public GeoPoint fromPixels(final float x, final float y) { return getBoundingBox().getGeoPointOfRelativePositionWithLinearInterpolation( x / getWidth(), y / getHeight()); } public Point fromMapPixels(final int x, final int y, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); out.set(x - viewWidth_2, y - viewHeight_2); out.offset(getScrollX(), getScrollY()); return out; } /** * Converts a GeoPoint to its ScreenCoordinates. <br/> * <br/> * <b>CAUTION</b> ! Conversion currently has a large error on <code>zoomLevels <= 7</code>.<br/> * The Error on ZoomLevels higher than 7, the error is below <code>1px</code>.<br/> * TODO: Add a linear interpolation to minimize this error. * * <PRE> * Zoom Error(m) Error(px) * 11 6m 1/12px * 10 24m 1/6px * 8 384m 1/2px * 6 6144m 3px * 4 98304m 10px * </PRE> * * @param in * the GeoPoint you want the onScreenCoordinates of. * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return the Point containing the approximated ScreenCoordinates of the GeoPoint passed. */ public Point toMapPixels(final GeoPoint in, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final Point coords = Mercator.projectGeoPoint(in.getLatitudeE6(), in.getLongitudeE6(), getPixelZoomLevel(), null); out.set(coords.x, coords.y); out.offset(offsetX, offsetY); return out; } /** * Performs only the first computationally heavy part of the projection, needToCall * toMapPixelsTranslated to get final position. * * @param latituteE6 * the latitute of the point * @param longitudeE6 * the longitude of the point * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return intermediate value to be stored and passed to toMapPixelsTranslated on paint. */ public Point toMapPixelsProjected(final int latituteE6, final int longitudeE6, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); // 26 is the biggest zoomlevel we can project final Point coords = Mercator.projectGeoPoint(latituteE6, longitudeE6, 28, out); out.set(coords.x, coords.y); return out; } /** * Performs the second computationally light part of the projection. * * @param in * the Point calculated by the toMapPixelsProjected * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return the Point containing the approximated ScreenCoordinates of the initial GeoPoint * passed to the toMapPixelsProjected. */ public Point toMapPixelsTranslated(final Point in, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); // 26 is the biggest zoomlevel we can project final int zoomDifference = 28 - getPixelZoomLevel(); out.set((in.x >> zoomDifference) + offsetX, (in.y >> zoomDifference) + offsetY); return out; } /** * Translates a rectangle from screen coordinates to intermediate coordinates. * * @param in * the rectangle in screen coordinates * @return a rectangle in intermediate coords. */ public Rect fromPixelsToProjected(final Rect in) { final Rect result = new Rect(); // 26 is the biggest zoomlevel we can project final int zoomDifference = 28 - getPixelZoomLevel(); final int x0 = in.left - offsetX << zoomDifference; final int x1 = in.right - offsetX << zoomDifference; final int y0 = in.bottom - offsetX << zoomDifference; final int y1 = in.top - offsetX << zoomDifference; result.set(Math.min(x0, x1), Math.min(y0, y1), Math.max(x0, x1), Math.max(y0, y1)); return result; } public Point toPixels(final Point tileCoords, final Point reuse) { return toPixels(tileCoords.x, tileCoords.y, reuse); } public Point toPixels(final int tileX, final int tileY, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); out.set(tileX * getTileSizePixels(), tileY * getTileSizePixels()); out.offset(offsetX, offsetY); return out; } // not presently used public Rect toPixels(final BoundingBoxE6 pBoundingBoxE6) { final Rect rect = new Rect(); final Point reuse = new Point(); toMapPixels( new GeoPoint(pBoundingBoxE6.getLatNorthE6(), pBoundingBoxE6.getLonWestE6()), reuse); rect.left = reuse.x; rect.top = reuse.y; toMapPixels( new GeoPoint(pBoundingBoxE6.getLatSouthE6(), pBoundingBoxE6.getLonEastE6()), reuse); rect.right = reuse.x; rect.bottom = reuse.y; return rect; } @Override public float metersToEquatorPixels(final float meters) { return meters / EQUATORCIRCUMFENCE * getWorldSizePx(); } @Override public Point toPixels(final GeoPoint in, final Point out) { return toMapPixels(in, out); } @Override public GeoPoint fromPixels(final int x, final int y) { return fromPixels((float) x, (float) y); } } private class MapViewGestureDetectorListener implements OnGestureListener { @Override public boolean onDown(final MotionEvent e) { mZoomController.setVisible(mEnableZoomController); return true; } @Override public boolean onFling(final MotionEvent e1, final MotionEvent e2, final float velocityX, final float velocityY) { final int worldSize = getWorldSizePx(); mScroller.fling(getScrollX(), getScrollY(), (int) -velocityX, (int) -velocityY, -worldSize, worldSize, -worldSize, worldSize); return true; } @Override public void onLongPress(final MotionEvent e) { MapView.this.onLongPress(e); } @Override public boolean onScroll(final MotionEvent e1, final MotionEvent e2, final float distanceX, final float distanceY) { scrollBy((int) distanceX, (int) distanceY); return true; } @Override public void onShowPress(final MotionEvent e) { } @Override public boolean onSingleTapUp(final MotionEvent e) { return MapView.this.onSingleTapUp(e); } } private class MapViewDoubleClickListener implements GestureDetector.OnDoubleTapListener { @Override public boolean onDoubleTap(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onDoubleTapUp(e, MapView.this)) return true; final GeoPoint center = getProjection().fromPixels(e.getX(), e.getY()); return zoomInFixing(center); } @Override public boolean onDoubleTapEvent(final MotionEvent e) { return false; } @Override public boolean onSingleTapConfirmed(final MotionEvent e) { return false; } } private class MapViewZoomListener implements OnZoomListener { @Override public void onZoom(final boolean zoomIn) { if (zoomIn) { getController().zoomIn(); } else { getController().zoomOut(); } } @Override public void onVisibilityChanged(final boolean visible) { } } private class MyAnimationListener implements AnimationListener { private int targetZoomLevel; private boolean animating; @Override public void onAnimationEnd(final Animation aAnimation) { animating = false; MapView.this.post(new Runnable() { @Override public void run() { setZoomLevel(targetZoomLevel); } }); } @Override public void onAnimationRepeat(final Animation aAnimation) { } @Override public void onAnimationStart(final Animation aAnimation) { animating = true; } } }
osmdroid-android/src/org/osmdroid/views/MapView.java
// Created by plusminus on 17:45:56 - 25.09.2008 package org.osmdroid.views; import java.util.LinkedList; import java.util.List; import net.wigle.wigleandroid.ZoomButtonsController; import net.wigle.wigleandroid.ZoomButtonsController.OnZoomListener; import org.metalev.multitouch.controller.MultiTouchController; import org.metalev.multitouch.controller.MultiTouchController.MultiTouchObjectCanvas; import org.metalev.multitouch.controller.MultiTouchController.PointInfo; import org.metalev.multitouch.controller.MultiTouchController.PositionAndScale; import org.osmdroid.DefaultResourceProxyImpl; import org.osmdroid.ResourceProxy; import org.osmdroid.api.IMapView; import org.osmdroid.api.IProjection; import org.osmdroid.events.MapListener; import org.osmdroid.events.ScrollEvent; import org.osmdroid.events.ZoomEvent; import org.osmdroid.tileprovider.MapTileProviderBase; import org.osmdroid.tileprovider.MapTileProviderBasic; import org.osmdroid.tileprovider.tilesource.IStyledTileSource; import org.osmdroid.tileprovider.tilesource.ITileSource; import org.osmdroid.tileprovider.tilesource.TileSourceFactory; import org.osmdroid.tileprovider.util.SimpleInvalidationHandler; import org.osmdroid.util.BoundingBoxE6; import org.osmdroid.util.GeoPoint; import org.osmdroid.util.constants.GeoConstants; import org.osmdroid.views.overlay.Overlay; import org.osmdroid.views.overlay.Overlay.Snappable; import org.osmdroid.views.overlay.TilesOverlay; import org.osmdroid.views.util.Mercator; import org.osmdroid.views.util.constants.MapViewConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.Rect; import android.os.Bundle; import android.os.Handler; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.GestureDetector.OnGestureListener; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.view.animation.ScaleAnimation; import android.widget.Scroller; public class MapView extends View implements IMapView, MapViewConstants, MultiTouchObjectCanvas<Object> { // =========================================================== // Constants // =========================================================== private static final Logger logger = LoggerFactory.getLogger(MapView.class); final static String BUNDLE_TILE_SOURCE = "org.osmdroid.views.MapView.TILE_SOURCE"; final static String BUNDLE_SCROLL_X = "org.osmdroid.views.MapView.SCROLL_X"; final static String BUNDLE_SCROLL_Y = "org.osmdroid.views.MapView.SCROLL_Y"; final static String BUNDLE_ZOOM_LEVEL = "org.osmdroid.views.MapView.ZOOM"; private static final double ZOOM_SENSITIVITY = 1.3; private static final double ZOOM_LOG_BASE_INV = 1.0 / Math.log(2.0 / ZOOM_SENSITIVITY); // =========================================================== // Fields // =========================================================== /** Current zoom level for map tiles. */ private int mZoomLevel = 0; private int mTileSizePixels = 0; private final LinkedList<Overlay> mOverlays = new LinkedList<Overlay>(); private Projection mProjection; private final TilesOverlay mMapOverlay; private final GestureDetector mGestureDetector; /** Handles map scrolling */ private final Scroller mScroller; private final ScaleAnimation mZoomInAnimation; private final ScaleAnimation mZoomOutAnimation; private final MyAnimationListener mAnimationListener = new MyAnimationListener(); private final MapController mController; // XXX we can use android.widget.ZoomButtonsController if we upgrade the // dependency to Android 1.6 private final ZoomButtonsController mZoomController; private boolean mEnableZoomController = false; private ResourceProxy mResourceProxy; private MultiTouchController<Object> mMultiTouchController; private float mMultiTouchScale = 1.0f; protected MapListener mListener; // for speed (avoiding allocations) private final Matrix mMatrix = new Matrix(); private final MapTileProviderBase mTileProvider; private final Handler mTileRequestCompleteHandler; // =========================================================== // Constructors // =========================================================== private MapView(final Context context, final Handler tileRequestCompleteHandler, final AttributeSet attrs, final int tileSizePixels, MapTileProviderBase tileProvider) { super(context, attrs); mResourceProxy = new DefaultResourceProxyImpl(context); this.mController = new MapController(this); this.mScroller = new Scroller(context); this.mTileSizePixels = tileSizePixels; if (tileProvider == null) { final ITileSource tileSource = getTileSourceFromAttributes(attrs); tileProvider = new MapTileProviderBasic(context, tileSource); } mTileRequestCompleteHandler = tileRequestCompleteHandler == null ? new SimpleInvalidationHandler( this) : tileRequestCompleteHandler; mTileProvider = tileProvider; mTileProvider.setTileRequestCompleteHandler(mTileRequestCompleteHandler); this.mMapOverlay = new TilesOverlay(mTileProvider, mResourceProxy); mOverlays.add(this.mMapOverlay); this.mZoomController = new ZoomButtonsController(this); this.mZoomController.setOnZoomListener(new MapViewZoomListener()); mZoomInAnimation = new ScaleAnimation(1, 2, 1, 2, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f); mZoomOutAnimation = new ScaleAnimation(1, 0.5f, 1, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f, Animation.RELATIVE_TO_SELF, 0.5f); mZoomInAnimation.setDuration(ANIMATION_DURATION_SHORT); mZoomOutAnimation.setDuration(ANIMATION_DURATION_SHORT); mZoomInAnimation.setAnimationListener(mAnimationListener); mZoomOutAnimation.setAnimationListener(mAnimationListener); mGestureDetector = new GestureDetector(context, new MapViewGestureDetectorListener()); mGestureDetector.setOnDoubleTapListener(new MapViewDoubleClickListener()); } /** * Constructor used by XML layout resource (uses default tile source). */ public MapView(final Context context, final AttributeSet attrs) { this(context, null, attrs, 256, null); } /** * Standard Constructor. */ public MapView(final Context context, final int tileSizePixels, final MapTileProviderBase aTileProvider) { this(context, null, null, tileSizePixels, aTileProvider); } public MapView(final Context context, final int tileSizePixels) { this(context, null, null, tileSizePixels, null); } public MapView(final Context context, final Handler tileRequestCompleteHandler, final int tileSizePixels, final MapTileProviderBase aTileProvider) { this(context, tileRequestCompleteHandler, null, tileSizePixels, aTileProvider); } // =========================================================== // Getter & Setter // =========================================================== @Override public MapController getController() { return this.mController; } /** * You can add/remove/reorder your Overlays using the List of {@link Overlay}. The first (index * 0) Overlay gets drawn first, the one with the highest as the last one. */ public List<Overlay> getOverlays() { return this.mOverlays; } public MapTileProviderBase getTileProvider() { return mTileProvider; } public Scroller getScroller() { return mScroller; } public Handler getTileRequestCompleteHandler() { return mTileRequestCompleteHandler; } @Override public int getLatitudeSpan() { return this.getBoundingBox().getLatitudeSpanE6(); } @Override public int getLongitudeSpan() { return this.getBoundingBox().getLongitudeSpanE6(); } public static int getMapTileZoom(final int tileSizePixels) { if (tileSizePixels <= 0) { return 0; } int pixels = tileSizePixels; int a = 0; while (pixels != 0) { pixels >>= 1; a++; } return a - 1; } public BoundingBoxE6 getBoundingBox() { return getBoundingBox(getWidth(), getHeight()); } public BoundingBoxE6 getBoundingBox(final int pViewWidth, final int pViewHeight) { final int mapTileZoom = getMapTileZoom(mTileSizePixels); final int world_2 = 1 << mZoomLevel + mapTileZoom - 1; final int north = world_2 + getScrollY() - getHeight() / 2; final int south = world_2 + getScrollY() + getHeight() / 2; final int west = world_2 + getScrollX() - getWidth() / 2; final int east = world_2 + getScrollX() + getWidth() / 2; return Mercator .getBoundingBoxFromCoords(west, north, east, south, mZoomLevel + mapTileZoom); } /** * This class is only meant to be used during on call of onDraw(). Otherwise it may produce * strange results. * * @return */ @Override public Projection getProjection() { if (mProjection == null) { mProjection = new Projection(); } return mProjection; } void setMapCenter(final GeoPoint aCenter) { this.setMapCenter(aCenter.getLatitudeE6(), aCenter.getLongitudeE6()); } void setMapCenter(final int aLatitudeE6, final int aLongitudeE6) { final Point coords = Mercator.projectGeoPoint(aLatitudeE6, aLongitudeE6, getPixelZoomLevel(), null); final int worldSize_2 = getWorldSizePx() / 2; if (getAnimation() == null || getAnimation().hasEnded()) { logger.debug("StartScroll"); mScroller.startScroll(getScrollX(), getScrollY(), coords.x - worldSize_2 - getScrollX(), coords.y - worldSize_2 - getScrollY(), 500); postInvalidate(); } } public void setTileSource(final ITileSource aTileSource) { mTileProvider.setTileSource(aTileSource); mTileSizePixels = aTileSource.getTileSizePixels(); this.checkZoomButtons(); this.setZoomLevel(mZoomLevel); // revalidate zoom level postInvalidate(); } /** * @param aZoomLevel * the zoom level bound by the tile source */ int setZoomLevel(final int aZoomLevel) { final int minZoomLevel = getMinimumZoomLevel(); final int maxZoomLevel = getMaximumZoomLevel(); final int newZoomLevel = Math.max(minZoomLevel, Math.min(maxZoomLevel, aZoomLevel)); final int curZoomLevel = this.mZoomLevel; this.mZoomLevel = newZoomLevel; this.checkZoomButtons(); if (newZoomLevel > curZoomLevel) { scrollTo(getScrollX() << newZoomLevel - curZoomLevel, getScrollY() << newZoomLevel - curZoomLevel); } else if (newZoomLevel < curZoomLevel) { scrollTo(getScrollX() >> curZoomLevel - newZoomLevel, getScrollY() >> curZoomLevel - newZoomLevel); } // snap for all snappables final Point snapPoint = new Point(); // XXX why do we need a new projection here? mProjection = new Projection(); for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i) instanceof Snappable && ((Snappable) mOverlays.get(i)).onSnapToItem(getScrollX(), getScrollY(), snapPoint, this)) { scrollTo(snapPoint.x, snapPoint.y); } // do callback on listener if (newZoomLevel != curZoomLevel && mListener != null) { final ZoomEvent event = new ZoomEvent(this, newZoomLevel); mListener.onZoom(event); } return this.mZoomLevel; } /** * Get the current ZoomLevel for the map tiles. * * @return the current ZoomLevel between 0 (equator) and 18/19(closest), depending on the tile * source chosen. */ @Override public int getZoomLevel() { return getZoomLevel(true); } /** * Get the current ZoomLevel for the map tiles. * * @param aPending * if true and we're animating then return the zoom level that we're animating * towards, otherwise return the current zoom level * @return the zoom level */ public int getZoomLevel(final boolean aPending) { if (aPending && mAnimationListener.animating) { return mAnimationListener.targetZoomLevel; } else { return mZoomLevel; } } /** * Returns the minimum zoom level for the point currently at the center. * * @return The minimum zoom level for the map's current center. */ public int getMinimumZoomLevel() { return mMapOverlay.getMinimumZoomLevel(); } /** * @deprecated Replaced by {@link #getMaxZoomLevel()} */ @Deprecated public int getMaximumZoomLevel() { return mMapOverlay.getMaximumZoomLevel(); } /** * Returns the maximum zoom level for the point currently at the center. * * @return The maximum zoom level for the map's current center. */ @Override public int getMaxZoomLevel() { return mMapOverlay.getMaximumZoomLevel(); } public boolean canZoomIn() { final int maxZoomLevel = getMaxZoomLevel(); if (mZoomLevel >= maxZoomLevel) { return false; } if (mAnimationListener.animating && mAnimationListener.targetZoomLevel >= maxZoomLevel) { return false; } return true; } public boolean canZoomOut() { final int minZoomLevel = getMinimumZoomLevel(); if (mZoomLevel <= minZoomLevel) { return false; } if (mAnimationListener.animating && mAnimationListener.targetZoomLevel <= minZoomLevel) { return false; } return true; } /** * Zoom in by one zoom level. */ boolean zoomIn() { if (canZoomIn()) { if (mAnimationListener.animating) { // TODO extend zoom (and return true) return false; } else { mAnimationListener.targetZoomLevel = mZoomLevel + 1; mAnimationListener.animating = true; startAnimation(mZoomInAnimation); return true; } } else { return false; } } boolean zoomInFixing(final GeoPoint point) { setMapCenter(point); // TODO should fix on point, not center on it return zoomIn(); } boolean zoomInFixing(final int xPixel, final int yPixel) { setMapCenter(xPixel, yPixel); // TODO should fix on point, not center on it return zoomIn(); } /** * Zoom out by one zoom level. */ boolean zoomOut() { if (canZoomOut()) { if (mAnimationListener.animating) { // TODO extend zoom (and return true) return false; } else { mAnimationListener.targetZoomLevel = mZoomLevel - 1; mAnimationListener.animating = true; startAnimation(mZoomOutAnimation); return true; } } else { return false; } } boolean zoomOutFixing(final GeoPoint point) { setMapCenter(point); // TODO should fix on point, not center on it return zoomOut(); } boolean zoomOutFixing(final int xPixel, final int yPixel) { setMapCenter(xPixel, yPixel); // TODO should fix on point, not center on it return zoomOut(); } @Override public GeoPoint getMapCenter() { return new GeoPoint(getMapCenterLatitudeE6(), getMapCenterLongitudeE6()); } public int getMapCenterLatitudeE6() { return (int) (Mercator.tile2lat(getScrollY() + getWorldSizePx() / 2, getPixelZoomLevel()) * 1E6); } public int getMapCenterLongitudeE6() { return (int) (Mercator.tile2lon(getScrollX() + getWorldSizePx() / 2, getPixelZoomLevel()) * 1E6); } public void setResourceProxy(final ResourceProxy pResourceProxy) { mResourceProxy = pResourceProxy; } public void onSaveInstanceState(final Bundle state) { state.putInt(BUNDLE_SCROLL_X, getScrollX()); state.putInt(BUNDLE_SCROLL_Y, getScrollY()); state.putInt(BUNDLE_ZOOM_LEVEL, getZoomLevel()); } public void onRestoreInstanceState(final Bundle state) { setZoomLevel(state.getInt(BUNDLE_ZOOM_LEVEL, 1)); scrollTo(state.getInt(BUNDLE_SCROLL_X, 0), state.getInt(BUNDLE_SCROLL_Y, 0)); } /** * Whether to use the network connection if it's available. */ public boolean useDataConnection() { return mMapOverlay.useDataConnection(); } /** * Set whether to use the network connection if it's available. * * @param aMode * if true use the network connection if it's available. if false don't use the * network connection even if it's available. */ public void setUseDataConnection(final boolean aMode) { mMapOverlay.setUseDataConnection(aMode); } /** * Check mAnimationListener.animating to determine if view is animating. Useful for overlays to * avoid recalculating during an animation sequence. * * @return boolean indicating whether view is animating. */ public boolean isAnimating() { return mAnimationListener.animating; } // =========================================================== // Methods from SuperClass/Interfaces // =========================================================== public void onDetach() { for (int i = mOverlays.size() - 1; i >= 0; i--) mOverlays.get(i).onDetach(this); } public void onLongPress(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onLongPress(e, this)) return; } public boolean onSingleTapUp(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onSingleTapUp(e, this)) { postInvalidate(); return true; } return false; } @Override public boolean onKeyDown(final int keyCode, final KeyEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onKeyDown(keyCode, event, this)) return true; return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(final int keyCode, final KeyEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onKeyUp(keyCode, event, this)) return true; return super.onKeyUp(keyCode, event); } @Override public boolean onTrackballEvent(final MotionEvent event) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onTrackballEvent(event, this)) return true; scrollBy((int) (event.getX() * 25), (int) (event.getY() * 25)); return super.onTrackballEvent(event); } @Override public boolean onTouchEvent(final MotionEvent event) { if (DEBUGMODE) { logger.debug("onTouchEvent(" + event + ")"); } for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onTouchEvent(event, this)) { if (DEBUGMODE) { logger.debug("overlay handled onTouchEvent"); } return true; } if (mMultiTouchController != null && mMultiTouchController.onTouchEvent(event)) { if (DEBUGMODE) { logger.debug("mMultiTouchController handled onTouchEvent"); } return true; } if (mGestureDetector.onTouchEvent(event)) { if (DEBUGMODE) { logger.debug("mGestureDetector handled onTouchEvent"); } return true; } final boolean r = super.onTouchEvent(event); if (r) { if (DEBUGMODE) { logger.debug("super handled onTouchEvent"); } } else { if (DEBUGMODE) { logger.debug("no-one handled onTouchEvent"); } } return r; } @Override public void computeScroll() { if (mScroller.computeScrollOffset()) { if (mScroller.isFinished()) { setZoomLevel(mZoomLevel); } else { scrollTo(mScroller.getCurrX(), mScroller.getCurrY()); } postInvalidate(); // Keep on drawing until the animation has // finished. } } @Override public void scrollTo(int x, int y) { final int worldSize = getWorldSizePx(); x %= worldSize; y %= worldSize; super.scrollTo(x, y); // do callback on listener if (mListener != null) { final ScrollEvent event = new ScrollEvent(this, x, y); mListener.onScroll(event); } } @Override public void onDraw(final Canvas c) { final long startMs = System.currentTimeMillis(); mProjection = new Projection(); if (mMultiTouchScale == 1.0f) { c.translate(getWidth() / 2, getHeight() / 2); } else { c.getMatrix(mMatrix); mMatrix.postTranslate(getWidth() / 2, getHeight() / 2); mMatrix.preScale(mMultiTouchScale, mMultiTouchScale, getScrollX(), getScrollY()); c.setMatrix(mMatrix); } /* Draw background */ c.drawColor(Color.LTGRAY); // This is too slow: // final Rect r = c.getClipBounds(); // mPaint.setColor(Color.GRAY); // mPaint.setPathEffect(new DashPathEffect(new float[] {1, 1}, 0)); // for (int x = r.left; x < r.right; x += 20) // c.drawLine(x, r.top, x, r.bottom, mPaint); // for (int y = r.top; y < r.bottom; y += 20) // c.drawLine(r.left, y, r.right, y, mPaint); /* Draw all Overlays. Avoid allocation by not doing enhanced loop. */ for (int i = 0; i < mOverlays.size(); i++) { mOverlays.get(i).onManagedDraw(c, this); } final long endMs = System.currentTimeMillis(); if (DEBUGMODE) { logger.debug("Rendering overall: " + (endMs - startMs) + "ms"); } } @Override protected void onDetachedFromWindow() { this.mZoomController.setVisible(false); this.onDetach(); super.onDetachedFromWindow(); } // =========================================================== // Implementation of MultiTouchObjectCanvas // =========================================================== @Override public Object getDraggableObjectAtPoint(final PointInfo pt) { return this; } @Override public void getPositionAndScale(final Object obj, final PositionAndScale objPosAndScaleOut) { objPosAndScaleOut.set(0, 0, true, mMultiTouchScale, false, 0, 0, false, 0); } @Override public void selectObject(final Object obj, final PointInfo pt) { // if obj is null it means we released the pointers // if scale is not 1 it means we pinched if (obj == null && mMultiTouchScale != 1.0f) { final float scaleDiffFloat = (float) (Math.log(mMultiTouchScale) * ZOOM_LOG_BASE_INV); final int scaleDiffInt = Math.round(scaleDiffFloat); setZoomLevel(mZoomLevel + scaleDiffInt); // XXX maybe zoom in/out instead of zooming direct to zoom level // - probably not a good idea because you'll repeat the animation } // reset scale mMultiTouchScale = 1.0f; } @Override public boolean setPositionAndScale(final Object obj, final PositionAndScale aNewObjPosAndScale, final PointInfo aTouchPoint) { mMultiTouchScale = aNewObjPosAndScale.getScale(); invalidate(); // redraw return true; } /* * Set the MapListener for this view */ public void setMapListener(final MapListener ml) { mListener = ml; } // =========================================================== // Package Methods // =========================================================== /** * Get the world size in pixels. */ int getWorldSizePx() { return 1 << getPixelZoomLevel(); } /** * Get the equivalent zoom level on pixel scale */ int getPixelZoomLevel() { return this.mZoomLevel + getMapTileZoom(mTileSizePixels); } // =========================================================== // Methods // =========================================================== private void checkZoomButtons() { this.mZoomController.setZoomInEnabled(canZoomIn()); this.mZoomController.setZoomOutEnabled(canZoomOut()); } /** * @param centerMapTileCoords * @param tileSizePx * @param reuse * just pass null if you do not have a Point to be 'recycled'. */ private Point getUpperLeftCornerOfCenterMapTileInScreen(final Point centerMapTileCoords, final int tileSizePx, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final int worldTiles_2 = 1 << mZoomLevel - 1; final int centerMapTileScreenLeft = (centerMapTileCoords.x - worldTiles_2) * tileSizePx - tileSizePx / 2; final int centerMapTileScreenTop = (centerMapTileCoords.y - worldTiles_2) * tileSizePx - tileSizePx / 2; out.set(centerMapTileScreenLeft, centerMapTileScreenTop); return out; } public void setBuiltInZoomControls(final boolean on) { this.mEnableZoomController = on; this.checkZoomButtons(); } public void setMultiTouchControls(final boolean on) { mMultiTouchController = on ? new MultiTouchController<Object>(this, false) : null; } private ITileSource getTileSourceFromAttributes(final AttributeSet aAttributeSet) { ITileSource tileSource = TileSourceFactory.DEFAULT_TILE_SOURCE; if (aAttributeSet != null) { final String tileSourceAttr = aAttributeSet.getAttributeValue(null, "tilesource"); if (tileSourceAttr != null) { try { final ITileSource r = TileSourceFactory.getTileSource(tileSourceAttr); logger.info("Using tile source specified in layout attributes: " + r); tileSource = r; } catch (final IllegalArgumentException e) { logger.warn("Invalid tile souce specified in layout attributes: " + tileSource); } } } if (aAttributeSet != null && tileSource instanceof IStyledTileSource) { String style = aAttributeSet.getAttributeValue(null, "style"); if (style == null) { // historic - old attribute name style = aAttributeSet.getAttributeValue(null, "cloudmadeStyle"); } if (style == null) { logger.info("Using default style: 1"); } else { logger.info("Using style specified in layout attributes: " + style); ((IStyledTileSource<?>) tileSource).setStyle(style); } } logger.info("Using tile source: " + tileSource); return tileSource; } // =========================================================== // Inner and Anonymous Classes // =========================================================== /** * This class may return valid results until the underlying {@link MapView} gets modified in any * way (i.e. new center). * * @author Nicolas Gramlich * @author Manuel Stahl */ public class Projection implements IProjection, GeoConstants { private final int viewWidth_2 = getWidth() / 2; private final int viewHeight_2 = getHeight() / 2; private final int worldSize_2 = getWorldSizePx() / 2; private final int offsetX = -worldSize_2; private final int offsetY = -worldSize_2; private final BoundingBoxE6 mBoundingBoxProjection; private final int mZoomLevelProjection; private final int mTileSizePixelsProjection; private final int mTileMapZoomProjection; private final Point mCenterMapTileCoordsProjection; private final Point mUpperLeftCornerOfCenterMapTileProjection; private Projection() { /* * Do some calculations and drag attributes to local variables to save some performance. */ mZoomLevelProjection = mZoomLevel; // TODO Draw to attributes and so make it only 'valid' for a short time. mTileSizePixelsProjection = mTileSizePixels; mTileMapZoomProjection = getMapTileZoom(getTileSizePixels()); /* * Get the center MapTile which is above this.mLatitudeE6 and this.mLongitudeE6 . */ mCenterMapTileCoordsProjection = calculateCenterMapTileCoords(getTileSizePixels(), getZoomLevel()); mUpperLeftCornerOfCenterMapTileProjection = getUpperLeftCornerOfCenterMapTileInScreen( getCenterMapTileCoords(), getTileSizePixels(), null); mBoundingBoxProjection = MapView.this.getBoundingBox(); } public int getTileSizePixels() { return mTileSizePixelsProjection; } public int getTileMapZoom() { return mTileMapZoomProjection; } public int getZoomLevel() { return mZoomLevelProjection; } public Point getCenterMapTileCoords() { return mCenterMapTileCoordsProjection; } public Point getUpperLeftCornerOfCenterMapTile() { return mUpperLeftCornerOfCenterMapTileProjection; } public BoundingBoxE6 getBoundingBox() { return mBoundingBoxProjection; } private Point calculateCenterMapTileCoords(final int tileSizePixels, final int zoomLevel) { final int mapTileZoom = getMapTileZoom(tileSizePixels); final int worldTiles_2 = 1 << zoomLevel - 1; // convert to tile coordinate and make positive return new Point((getScrollX() >> mapTileZoom) + worldTiles_2, (getScrollY() >> mapTileZoom) + worldTiles_2); } /** * Converts x/y ScreenCoordinates to the underlying GeoPoint. * * @param x * @param y * @return GeoPoint under x/y. */ public GeoPoint fromPixels(final float x, final float y) { return getBoundingBox().getGeoPointOfRelativePositionWithLinearInterpolation( x / getWidth(), y / getHeight()); } public Point fromMapPixels(final int x, final int y, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); out.set(x - viewWidth_2, y - viewHeight_2); out.offset(getScrollX(), getScrollY()); return out; } /** * Converts a GeoPoint to its ScreenCoordinates. <br/> * <br/> * <b>CAUTION</b> ! Conversion currently has a large error on <code>zoomLevels <= 7</code>.<br/> * The Error on ZoomLevels higher than 7, the error is below <code>1px</code>.<br/> * TODO: Add a linear interpolation to minimize this error. * * <PRE> * Zoom Error(m) Error(px) * 11 6m 1/12px * 10 24m 1/6px * 8 384m 1/2px * 6 6144m 3px * 4 98304m 10px * </PRE> * * @param in * the GeoPoint you want the onScreenCoordinates of. * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return the Point containing the approximated ScreenCoordinates of the GeoPoint passed. */ public Point toMapPixels(final GeoPoint in, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final Point coords = Mercator.projectGeoPoint(in.getLatitudeE6(), in.getLongitudeE6(), getPixelZoomLevel(), null); out.set(coords.x, coords.y); out.offset(offsetX, offsetY); return out; } /** * Performs only the first computationally heavy part of the projection, needToCall * toMapPixelsTranslated to get final position. * * @param latituteE6 * the latitute of the point * @param longitudeE6 * the longitude of the point * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return intermediate value to be stored and passed to toMapPixelsTranslated on paint. */ public Point toMapPixelsProjected(final int latituteE6, final int longitudeE6, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); // 26 is the biggest zoomlevel we can project final Point coords = Mercator.projectGeoPoint(latituteE6, longitudeE6, 28, out); out.set(coords.x, coords.y); return out; } /** * Performs the second computationally light part of the projection. * * @param in * the Point calculated by the toMapPixelsProjected * @param reuse * just pass null if you do not have a Point to be 'recycled'. * @return the Point containing the approximated ScreenCoordinates of the initial GeoPoint * passed to the toMapPixelsProjected. */ public Point toMapPixelsTranslated(final Point in, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); // 26 is the biggest zoomlevel we can project final int zoomDifference = 28 - getPixelZoomLevel(); out.set((in.x >> zoomDifference) + offsetX, (in.y >> zoomDifference) + offsetY); return out; } /** * Translates a rectangle from screen coordinates to intermediate coordinates. * * @param in * the rectangle in screen coordinates * @return a rectangle in intermediate coords. */ public Rect fromPixelsToProjected(final Rect in) { final Rect result = new Rect(); // 26 is the biggest zoomlevel we can project final int zoomDifference = 28 - getPixelZoomLevel(); final int x0 = in.left - offsetX << zoomDifference; final int x1 = in.right - offsetX << zoomDifference; final int y0 = in.bottom - offsetX << zoomDifference; final int y1 = in.top - offsetX << zoomDifference; result.set(Math.min(x0, x1), Math.min(y0, y1), Math.max(x0, x1), Math.max(y0, y1)); return result; } public Point toPixels(final Point tileCoords, final Point reuse) { return toPixels(tileCoords.x, tileCoords.y, reuse); } public Point toPixels(final int tileX, final int tileY, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); out.set(tileX * getTileSizePixels(), tileY * getTileSizePixels()); out.offset(offsetX, offsetY); return out; } // not presently used public Rect toPixels(final BoundingBoxE6 pBoundingBoxE6) { final Rect rect = new Rect(); final Point reuse = new Point(); toMapPixels( new GeoPoint(pBoundingBoxE6.getLatNorthE6(), pBoundingBoxE6.getLonWestE6()), reuse); rect.left = reuse.x; rect.top = reuse.y; toMapPixels( new GeoPoint(pBoundingBoxE6.getLatSouthE6(), pBoundingBoxE6.getLonEastE6()), reuse); rect.right = reuse.x; rect.bottom = reuse.y; return rect; } @Override public float metersToEquatorPixels(final float meters) { return meters / EQUATORCIRCUMFENCE * getWorldSizePx(); } @Override public Point toPixels(final GeoPoint in, final Point out) { return toMapPixels(in, out); } @Override public GeoPoint fromPixels(final int x, final int y) { return fromPixels((float) x, (float) y); } } private class MapViewGestureDetectorListener implements OnGestureListener { @Override public boolean onDown(final MotionEvent e) { mZoomController.setVisible(mEnableZoomController); return true; } @Override public boolean onFling(final MotionEvent e1, final MotionEvent e2, final float velocityX, final float velocityY) { final int worldSize = getWorldSizePx(); mScroller.fling(getScrollX(), getScrollY(), (int) -velocityX, (int) -velocityY, -worldSize, worldSize, -worldSize, worldSize); return true; } @Override public void onLongPress(final MotionEvent e) { MapView.this.onLongPress(e); } @Override public boolean onScroll(final MotionEvent e1, final MotionEvent e2, final float distanceX, final float distanceY) { scrollBy((int) distanceX, (int) distanceY); return true; } @Override public void onShowPress(final MotionEvent e) { } @Override public boolean onSingleTapUp(final MotionEvent e) { return MapView.this.onSingleTapUp(e); } } private class MapViewDoubleClickListener implements GestureDetector.OnDoubleTapListener { @Override public boolean onDoubleTap(final MotionEvent e) { for (int i = mOverlays.size() - 1; i >= 0; i--) if (mOverlays.get(i).onDoubleTapUp(e, MapView.this)) return true; final GeoPoint center = getProjection().fromPixels(e.getX(), e.getY()); return zoomInFixing(center); } @Override public boolean onDoubleTapEvent(final MotionEvent e) { return false; } @Override public boolean onSingleTapConfirmed(final MotionEvent e) { return false; } } private class MapViewZoomListener implements OnZoomListener { @Override public void onZoom(final boolean zoomIn) { if (zoomIn) { getController().zoomIn(); } else { getController().zoomOut(); } } @Override public void onVisibilityChanged(final boolean visible) { } } private class MyAnimationListener implements AnimationListener { private int targetZoomLevel; private boolean animating; @Override public void onAnimationEnd(final Animation aAnimation) { animating = false; MapView.this.post(new Runnable() { @Override public void run() { setZoomLevel(targetZoomLevel); } }); } @Override public void onAnimationRepeat(final Animation aAnimation) { } @Override public void onAnimationStart(final Animation aAnimation) { animating = true; } } }
- Just a little documentation for an non-obvious call.
osmdroid-android/src/org/osmdroid/views/MapView.java
- Just a little documentation for an non-obvious call.
<ide><path>smdroid-android/src/org/osmdroid/views/MapView.java <ide> public void computeScroll() { <ide> if (mScroller.computeScrollOffset()) { <ide> if (mScroller.isFinished()) { <add> // This will facilitate snapping-to any Snappable points. <ide> setZoomLevel(mZoomLevel); <ide> } else { <ide> scrollTo(mScroller.getCurrX(), mScroller.getCurrY());
JavaScript
mit
e32082f3f9f62aa6900d7025297b7592547ce8d9
0
yfinkelstein/node-zookeeper,yfinkelstein/node-zookeeper,yfinkelstein/node-zookeeper,yfinkelstein/node-zookeeper
/** * ACL * @typedef {Object} acl * @property {number} perms * @property {string} scheme * @property {string} auth */ /** * stat * @typedef {Object} stat * @property {number} czxid * @property {number} mzxid * @property {number} ctime * @property {number} mtime * @property {number} version * @property {number} cversion * @property {number} aversion * @property {string} ephemeralOwner * @property {number} dataLength * @property {number} numChildren * @property {number} pzxid */ /** * Mkdir callback * @callback mkdirCb * @param {Error} error * @param {boolean} [success] */ /** * Connect callback * @callback connectCb * @param {Error} error * @param {ZooKeeper} client */ /** * Path callback * @callback pathCb * @param {number} rc * @param {number} error * @param {string} path */ /** * Stat callback * @callback statCb * @param {number} rc * @param {number} error * @param {stat} stat */ /** * Data callback * @callback dataCb * @param {number} rc * @param {number} error * @param {stat} stat * @param {string|Buffer} data */ /** * Child callback * @callback childCb * @param {number} rc * @param {number} error * @param {Array.<string>} children */ /** * Child2 callback * @callback child2Cb * @param {number} rc * @param {number} error * @param {Array.<string>} children * @param {stat} stat */ /** * Value callback * @callback valueCb * @param {number} rc * @param {number} error * @param {*} value */ /** * Void callback * @callback voidCb * @param {number} rc * @param {number} error */ /** * Watch callback * @callback watchCb * @param {number} type * @param {number} state * @param {string} path */ /** * ACL callback * @callback aclCb * @param {number} rc * @param {number} error * @param {acl} acl * @param {stat} stat */
lib/typedefs.js
/** * ACL * @typedef {Object} acl * @property {number} perms * @property {string} scheme * @property {string} auth */ /** * stat * @typedef {Object} stat * @property {number} czxid * @property {number} mzxid * @property {number} ctime * @property {number} mtime * @property {number} version * @property {number} cversion * @property {number} aversion * @property {string} ephemeralOwner * @property {number} dataLength * @property {number} numChildren * @property {number} pzxid */ /** * Mkdir callback * @typedef {callback} mkdirCb * @param {Error} error * @param {boolean} [success] */ /** * Connect callback * @typedef {callback} connectCb * @param {Error} error * @param {ZooKeeper} client */ /** * Path callback * @typedef {callback} pathCb * @param {number} rc * @param {number} error * @param {string} path */ /** * Stat callback * @typedef {callback} statCb * @param {number} rc * @param {number} error * @param {stat} stat */ /** * Data callback * @typedef {callback} dataCb * @param {number} rc * @param {number} error * @param {stat} stat * @param {string|Buffer} data */ /** * Child callback * @typedef {callback} childCb * @param {number} rc * @param {number} error * @param {Array.<string>} children */ /** * Child2 callback * @typedef {callback} child2Cb * @param {number} rc * @param {number} error * @param {Array.<string>} children * @param {stat} stat */ /** * Value callback * @typedef {callback} valueCb * @param {number} rc * @param {number} error * @param {*} value */ /** * Void callback * @typedef {callback} voidCb * @param {number} rc * @param {number} error */ /** * Watch callback * @typedef {callback} watchCb * @param {number} type * @param {number} state * @param {string} path */ /** * ACL callback * @typedef {callback} aclCb * @param {number} rc * @param {number} error * @param {acl} acl * @param {stat} stat */
fix: use @callback to define types
lib/typedefs.js
fix: use @callback to define types
<ide><path>ib/typedefs.js <ide> <ide> /** <ide> * Mkdir callback <del> * @typedef {callback} mkdirCb <add> * @callback mkdirCb <ide> * @param {Error} error <ide> * @param {boolean} [success] <ide> */ <ide> <ide> /** <ide> * Connect callback <del> * @typedef {callback} connectCb <add> * @callback connectCb <ide> * @param {Error} error <ide> * @param {ZooKeeper} client <ide> */ <ide> <ide> /** <ide> * Path callback <del> * @typedef {callback} pathCb <add> * @callback pathCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {string} path <ide> <ide> /** <ide> * Stat callback <del> * @typedef {callback} statCb <add> * @callback statCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {stat} stat <ide> <ide> /** <ide> * Data callback <del> * @typedef {callback} dataCb <add> * @callback dataCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {stat} stat <ide> <ide> /** <ide> * Child callback <del> * @typedef {callback} childCb <add> * @callback childCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {Array.<string>} children <ide> <ide> /** <ide> * Child2 callback <del> * @typedef {callback} child2Cb <add> * @callback child2Cb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {Array.<string>} children <ide> <ide> /** <ide> * Value callback <del> * @typedef {callback} valueCb <add> * @callback valueCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {*} value <ide> <ide> /** <ide> * Void callback <del> * @typedef {callback} voidCb <add> * @callback voidCb <ide> * @param {number} rc <ide> * @param {number} error <ide> */ <ide> <ide> /** <ide> * Watch callback <del> * @typedef {callback} watchCb <add> * @callback watchCb <ide> * @param {number} type <ide> * @param {number} state <ide> * @param {string} path <ide> <ide> /** <ide> * ACL callback <del> * @typedef {callback} aclCb <add> * @callback aclCb <ide> * @param {number} rc <ide> * @param {number} error <ide> * @param {acl} acl
Java
apache-2.0
17796e24a84ae5135a5c638c238bbee32e8f9c48
0
kingmook/sakai,lorenamgUMU/sakai,wfuedu/sakai,ouit0408/sakai,ktakacs/sakai,zqian/sakai,introp-software/sakai,kingmook/sakai,whumph/sakai,noondaysun/sakai,pushyamig/sakai,ouit0408/sakai,noondaysun/sakai,kwedoff1/sakai,OpenCollabZA/sakai,liubo404/sakai,lorenamgUMU/sakai,joserabal/sakai,introp-software/sakai,bzhouduke123/sakai,surya-janani/sakai,bkirschn/sakai,wfuedu/sakai,whumph/sakai,whumph/sakai,Fudan-University/sakai,conder/sakai,noondaysun/sakai,puramshetty/sakai,buckett/sakai-gitflow,ktakacs/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,clhedrick/sakai,introp-software/sakai,liubo404/sakai,OpenCollabZA/sakai,pushyamig/sakai,frasese/sakai,wfuedu/sakai,clhedrick/sakai,colczr/sakai,kwedoff1/sakai,hackbuteer59/sakai,conder/sakai,puramshetty/sakai,joserabal/sakai,frasese/sakai,zqian/sakai,udayg/sakai,Fudan-University/sakai,ktakacs/sakai,bzhouduke123/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,ktakacs/sakai,rodriguezdevera/sakai,lorenamgUMU/sakai,rodriguezdevera/sakai,kwedoff1/sakai,zqian/sakai,conder/sakai,tl-its-umich-edu/sakai,kwedoff1/sakai,whumph/sakai,wfuedu/sakai,rodriguezdevera/sakai,willkara/sakai,willkara/sakai,pushyamig/sakai,bkirschn/sakai,Fudan-University/sakai,ouit0408/sakai,pushyamig/sakai,liubo404/sakai,udayg/sakai,tl-its-umich-edu/sakai,bkirschn/sakai,frasese/sakai,joserabal/sakai,duke-compsci290-spring2016/sakai,willkara/sakai,rodriguezdevera/sakai,zqian/sakai,surya-janani/sakai,puramshetty/sakai,zqian/sakai,willkara/sakai,zqian/sakai,ouit0408/sakai,surya-janani/sakai,conder/sakai,lorenamgUMU/sakai,buckett/sakai-gitflow,ktakacs/sakai,colczr/sakai,ktakacs/sakai,hackbuteer59/sakai,hackbuteer59/sakai,buckett/sakai-gitflow,kingmook/sakai,pushyamig/sakai,rodriguezdevera/sakai,bzhouduke123/sakai,tl-its-umich-edu/sakai,Fudan-University/sakai,surya-janani/sakai,bzhouduke123/sakai,puramshetty/sakai,ktakacs/sakai,buckett/sakai-gitflow,hackbuteer59/sakai,frasese/sakai,puramshetty/sakai,Fudan-University/sakai,frasese/sakai,liubo404/sakai,frasese/sakai,conder/sakai,wfuedu/sakai,clhedrick/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,lorenamgUMU/sakai,zqian/sakai,duke-compsci290-spring2016/sakai,bkirschn/sakai,joserabal/sakai,bkirschn/sakai,introp-software/sakai,ouit0408/sakai,introp-software/sakai,buckett/sakai-gitflow,colczr/sakai,tl-its-umich-edu/sakai,whumph/sakai,joserabal/sakai,hackbuteer59/sakai,clhedrick/sakai,colczr/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,kingmook/sakai,surya-janani/sakai,joserabal/sakai,kwedoff1/sakai,OpenCollabZA/sakai,ktakacs/sakai,pushyamig/sakai,bkirschn/sakai,tl-its-umich-edu/sakai,colczr/sakai,clhedrick/sakai,ouit0408/sakai,hackbuteer59/sakai,pushyamig/sakai,clhedrick/sakai,tl-its-umich-edu/sakai,liubo404/sakai,ouit0408/sakai,buckett/sakai-gitflow,puramshetty/sakai,clhedrick/sakai,bzhouduke123/sakai,Fudan-University/sakai,puramshetty/sakai,willkara/sakai,introp-software/sakai,colczr/sakai,noondaysun/sakai,conder/sakai,kingmook/sakai,Fudan-University/sakai,zqian/sakai,rodriguezdevera/sakai,tl-its-umich-edu/sakai,frasese/sakai,joserabal/sakai,OpenCollabZA/sakai,kingmook/sakai,udayg/sakai,surya-janani/sakai,tl-its-umich-edu/sakai,willkara/sakai,kingmook/sakai,liubo404/sakai,noondaysun/sakai,ouit0408/sakai,buckett/sakai-gitflow,Fudan-University/sakai,OpenCollabZA/sakai,bzhouduke123/sakai,duke-compsci290-spring2016/sakai,conder/sakai,willkara/sakai,udayg/sakai,bkirschn/sakai,wfuedu/sakai,colczr/sakai,liubo404/sakai,hackbuteer59/sakai,willkara/sakai,introp-software/sakai,noondaysun/sakai,kwedoff1/sakai,joserabal/sakai,kingmook/sakai,conder/sakai,surya-janani/sakai,rodriguezdevera/sakai,whumph/sakai,udayg/sakai,udayg/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,kwedoff1/sakai,wfuedu/sakai,lorenamgUMU/sakai,buckett/sakai-gitflow,clhedrick/sakai,noondaysun/sakai,kwedoff1/sakai,frasese/sakai,pushyamig/sakai,liubo404/sakai,duke-compsci290-spring2016/sakai,udayg/sakai,OpenCollabZA/sakai,colczr/sakai,surya-janani/sakai,puramshetty/sakai,wfuedu/sakai,rodriguezdevera/sakai,whumph/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,noondaysun/sakai,duke-compsci290-spring2016/sakai,OpenCollabZA/sakai,whumph/sakai
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2006, 2007, 2008, 2009 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.content.impl; import java.io.PrintWriter; import java.net.URI; import java.util.Collections; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentEntity; import org.sakaiproject.content.cover.ContentHostingService; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.util.Validator; /** * <p> * CollectionAccessFormatter is formatter for collection access. * </p> */ @SuppressWarnings("deprecation") public class CollectionAccessFormatter { private static final Log M_log = LogFactory.getLog(CollectionAccessFormatter.class); /** * Format the collection as an HTML display. */ @SuppressWarnings({ "unchecked" }) public static void format(ContentCollection x, Reference ref, HttpServletRequest req, HttpServletResponse res, ResourceLoader rb, String accessPointTrue, String accessPointFalse) { // do not allow directory listings for /attachments and its subfolders if(ContentHostingService.isAttachmentResource(x.getId())) { try { res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } catch ( java.io.IOException e ) { return; } } PrintWriter out = null; // don't set the writer until we verify that // getallresources is going to work. boolean printedHeader = false; boolean printedDiv = false; try { res.setContentType("text/html; charset=UTF-8"); out = res.getWriter(); ResourceProperties pl = x.getProperties(); String webappRoot = ServerConfigurationService.getServerUrl(); String skinRepo = ServerConfigurationService.getString("skin.repo", "/library/skin"); String skinName = "default"; String[] parts= StringUtils.split(x.getId(), Entity.SEPARATOR); // Is this a site folder (Resources or Dropbox)? If so, get the site skin if (x.getId().startsWith(org.sakaiproject.content.api.ContentHostingService.COLLECTION_SITE) || x.getId().startsWith(org.sakaiproject.content.api.ContentHostingService.COLLECTION_DROPBOX)) { if (parts.length > 1) { String siteId = parts[1]; try { Site site = SiteService.getSite(siteId); if (site.getSkin() != null) { skinName = site.getSkin(); } } catch (IdUnusedException e) { // Cannot get site - ignore it } } } // Output the headers out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"); out.println("<html><head>"); out.println("<title>" + rb.getFormattedMessage("colformat.pagetitle", new Object[]{ Validator.escapeHtml(pl.getProperty(ResourceProperties.PROP_DISPLAY_NAME))}) + "</title>"); out.println("<link href=\"" + webappRoot + skinRepo+ "/" + skinName + "/access.css\" type=\"text/css\" rel=\"stylesheet\" media=\"screen\">"); out.println("<script src=\"" + webappRoot + "/library/js/jquery.js\" type=\"text/javascript\">"); out.println("</script>"); out.println("</head><body class=\"specialLink\">"); out.println("<script type=\"text/javascript\">$(document).ready(function(){resizeFrame();function resizeFrame(){if (window.name != \"\") {var frame = parent.document.getElementById(window.name);if (frame) {var clientH = document.body.clientHeight + 10;$(frame).height(clientH);}}}jQuery.fn.fadeToggle = function(speed, easing, callback){return this.animate({opacity: \'toggle\'}, speed, easing, callback);};if ($(\'.textPanel\').size() < 1){$(\'a#toggler\').hide();}$(\'a#toggler\').click(function(){$(\'.textPanel\').fadeToggle(\'1000\', \'\', \'resizeFrame\');});\n$(\'.file a\').each(function (i){\n$(this).addClass(getFileExtension($(this).attr(\'href\')));\n})\nfunction getFileExtension(filename)\n{\nvar ext = /^.+\\.([^.]+)$/.exec(filename);\nreturn ext == null ? \"\" : ext[1].toLowerCase();\n}\n});</script>"); out.println("<div class=\"directoryIndex\">"); // for content listing it's best to use a real title out.println("<h3>" + Validator.escapeHtml(pl.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</h3>"); out.println("<p id=\"toggle\"><a id=\"toggler\" href=\"#\">" + rb.getString("colformat.showhide") + "</a></p>"); String folderdesc = pl.getProperty(ResourceProperties.PROP_DESCRIPTION); if (folderdesc != null && !folderdesc.equals("")) out.println("<div class=\"textPanel\">" + folderdesc + "</div>"); out.println("<ul>"); out.println("<li style=\"display:none\">"); out.println("</li>"); printedHeader = true; printedDiv = true; if (parts.length > 2) { // go up a level out.println("<li class=\"upfolder\"><a href=\"../\"><img src=\"/library/image/sakai/folder-up.gif\" alt=\"" + rb.getString("colformat.uplevel.alttext") + "\"/>" + rb.getString("colformat.uplevel") + "</a></li>"); } // Sort the collection items List<ContentEntity> members = x.getMemberResources(); boolean hasCustomSort = false; try { hasCustomSort = x.getProperties().getBooleanProperty(ResourceProperties.PROP_HAS_CUSTOM_SORT); } catch (Exception e) { // use false that's already there } if (hasCustomSort) Collections.sort(members, new ContentHostingComparator(ResourceProperties.PROP_CONTENT_PRIORITY, true)); else Collections.sort(members, new ContentHostingComparator(ResourceProperties.PROP_DISPLAY_NAME, true)); // Iterate through content items URI baseUri = new URI(x.getUrl()); for (ContentEntity content : members) { ResourceProperties properties = content.getProperties(); boolean isCollection = content.isCollection(); String xs = content.getId(); String contentUrl = content.getUrl(); // These both perform the same check in the implementation but we should observe the API. // This also checks to see if a resource is hidden or time limited. if ( isCollection) { if (!ContentHostingService.allowGetCollection(xs)) { continue; } } else { if (!ContentHostingService.allowGetResource(xs)) { continue; } } if (isCollection) { xs = xs.substring(0, xs.length() - 1); xs = xs.substring(xs.lastIndexOf('/') + 1) + '/'; } else { xs = xs.substring(xs.lastIndexOf('/') + 1); } try { // Relativize the URL (canonical item URL relative to canonical collection URL). // Inter alias this will preserve alternate access paths via aliases, e.g. /web/ URI contentUri = new URI(contentUrl); URI relativeUri = baseUri.relativize(contentUri); contentUrl = relativeUri.toString(); if (isCollection) { // Folder String desc = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); if ((desc == null) || desc.equals("")) desc = ""; else desc = "<div class=\"textPanel\">" + desc + "</div>"; out.println("<li class=\"folder\"><a href=\"" + contentUrl + "\">" + Validator.escapeHtml(properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</a>" + desc + "</li>"); } else { // File /* String createdBy = getUserProperty(properties, ResourceProperties.PROP_CREATOR).getDisplayName(); Time modTime = properties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTime = modTime.toStringLocalShortDate() + " " + modTime.toStringLocalShort(); ContentResource contentResource = (ContentResource) content; long filesize = ((contentResource.getContentLength() - 1) / 1024) + 1; String filetype = contentResource.getContentType(); */ String desc = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); if ((desc == null) || desc.equals("")) desc = ""; else desc = "<div class=\"textPanel\">" + Validator.escapeHtml(desc) + "</div>"; String resourceType = content.getResourceType().replace('.', '_'); out.println("<li class=\"file\"><a href=\"" + contentUrl + "\" target=_blank class=\"" + resourceType+"\">" + Validator.escapeHtml(properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</a>" + desc + "</li>"); } } catch (Throwable ignore) { // TODO - what types of failures are being caught here? out.println("<li class=\"file\"><a href=\"" + contentUrl + "\" target=_blank>" + Validator.escapeHtml(xs) + "</a></li>"); } } } catch (Throwable e) { M_log.warn("Problem formatting HTML for collection: "+ x.getId(), e); } if (out != null && printedHeader) { out.println("</ul>"); if (printedDiv) out.println("</div>"); out.println("</body></html>"); } } protected static User getUserProperty(ResourceProperties props, String name) { String id = props.getProperty(name); if (id != null) { try { return UserDirectoryService.getUser(id); } catch (UserNotDefinedException e) { } } return null; } }
kernel/kernel-impl/src/main/java/org/sakaiproject/content/impl/CollectionAccessFormatter.java
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2006, 2007, 2008, 2009 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.osedu.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.content.impl; import java.io.PrintWriter; import java.util.Collections; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.component.cover.ServerConfigurationService; import org.sakaiproject.content.api.ContentCollection; import org.sakaiproject.content.api.ContentEntity; import org.sakaiproject.content.cover.ContentHostingService; import org.sakaiproject.entity.api.Entity; import org.sakaiproject.entity.api.Reference; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.exception.IdUnusedException; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.cover.SiteService; import org.sakaiproject.user.api.User; import org.sakaiproject.user.api.UserNotDefinedException; import org.sakaiproject.user.cover.UserDirectoryService; import org.sakaiproject.util.ResourceLoader; import org.sakaiproject.util.Validator; /** * <p> * CollectionAccessFormatter is formatter for collection access. * </p> */ @SuppressWarnings("deprecation") public class CollectionAccessFormatter { private static final Log M_log = LogFactory.getLog(CollectionAccessFormatter.class); /** * Format the collection as an HTML display. */ @SuppressWarnings({ "unchecked" }) public static void format(ContentCollection x, Reference ref, HttpServletRequest req, HttpServletResponse res, ResourceLoader rb, String accessPointTrue, String accessPointFalse) { // do not allow directory listings for /attachments and its subfolders if(ContentHostingService.isAttachmentResource(x.getId())) { try { res.sendError(HttpServletResponse.SC_NOT_FOUND); return; } catch ( java.io.IOException e ) { return; } } PrintWriter out = null; // don't set the writer until we verify that // getallresources is going to work. boolean printedHeader = false; boolean printedDiv = false; try { res.setContentType("text/html; charset=UTF-8"); out = res.getWriter(); ResourceProperties pl = x.getProperties(); String webappRoot = ServerConfigurationService.getServerUrl(); String skinRepo = ServerConfigurationService.getString("skin.repo", "/library/skin"); String skinName = "default"; String[] parts= StringUtils.split(x.getId(), Entity.SEPARATOR); // Is this a site folder (Resources or Dropbox)? If so, get the site skin if (x.getId().startsWith(org.sakaiproject.content.api.ContentHostingService.COLLECTION_SITE) || x.getId().startsWith(org.sakaiproject.content.api.ContentHostingService.COLLECTION_DROPBOX)) { if (parts.length > 1) { String siteId = parts[1]; try { Site site = SiteService.getSite(siteId); if (site.getSkin() != null) { skinName = site.getSkin(); } } catch (IdUnusedException e) { // Cannot get site - ignore it } } } // Output the headers out.println("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"); out.println("<html><head>"); out.println("<title>" + rb.getFormattedMessage("colformat.pagetitle", new Object[]{ Validator.escapeHtml(pl.getProperty(ResourceProperties.PROP_DISPLAY_NAME))}) + "</title>"); out.println("<link href=\"" + webappRoot + skinRepo+ "/" + skinName + "/access.css\" type=\"text/css\" rel=\"stylesheet\" media=\"screen\">"); out.println("<script src=\"" + webappRoot + "/library/js/jquery.js\" type=\"text/javascript\">"); out.println("</script>"); out.println("</head><body class=\"specialLink\">"); out.println("<script type=\"text/javascript\">$(document).ready(function(){resizeFrame();function resizeFrame(){if (window.name != \"\") {var frame = parent.document.getElementById(window.name);if (frame) {var clientH = document.body.clientHeight + 10;$(frame).height(clientH);}}}jQuery.fn.fadeToggle = function(speed, easing, callback){return this.animate({opacity: \'toggle\'}, speed, easing, callback);};if ($(\'.textPanel\').size() < 1){$(\'a#toggler\').hide();}$(\'a#toggler\').click(function(){$(\'.textPanel\').fadeToggle(\'1000\', \'\', \'resizeFrame\');});\n$(\'.file a\').each(function (i){\n$(this).addClass(getFileExtension($(this).attr(\'href\')));\n})\nfunction getFileExtension(filename)\n{\nvar ext = /^.+\\.([^.]+)$/.exec(filename);\nreturn ext == null ? \"\" : ext[1].toLowerCase();\n}\n});</script>"); out.println("<div class=\"directoryIndex\">"); // for content listing it's best to use a real title out.println("<h3>" + Validator.escapeHtml(pl.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</h3>"); out.println("<p id=\"toggle\"><a id=\"toggler\" href=\"#\">" + rb.getString("colformat.showhide") + "</a></p>"); String folderdesc = pl.getProperty(ResourceProperties.PROP_DESCRIPTION); if (folderdesc != null && !folderdesc.equals("")) out.println("<div class=\"textPanel\">" + folderdesc + "</div>"); out.println("<ul>"); out.println("<li style=\"display:none\">"); out.println("</li>"); printedHeader = true; printedDiv = true; if (parts.length > 2) { // go up a level out.println("<li class=\"upfolder\"><a href=\"../\"><img src=\"/library/image/sakai/folder-up.gif\" alt=\"" + rb.getString("colformat.uplevel.alttext") + "\"/>" + rb.getString("colformat.uplevel") + "</a></span></li>"); } // Sort the collection items List<ContentEntity> members = x.getMemberResources(); boolean hasCustomSort = false; try { hasCustomSort = x.getProperties().getBooleanProperty(ResourceProperties.PROP_HAS_CUSTOM_SORT); } catch (Exception e) { // use false that's already there } if (hasCustomSort) Collections.sort(members, new ContentHostingComparator(ResourceProperties.PROP_CONTENT_PRIORITY, true)); else Collections.sort(members, new ContentHostingComparator(ResourceProperties.PROP_DISPLAY_NAME, true)); // Iterate through content items for (ContentEntity content : members) { ResourceProperties properties = content.getProperties(); boolean isCollection = content.isCollection(); String xs = content.getId(); String contentUrl = content.getUrl(); // These both perform the same check in the implementation but we should observe the API. // This also checks to see if a resource is hidden or time limited. if ( isCollection) { if (!ContentHostingService.allowGetCollection(xs)) { continue; } } else { if (!ContentHostingService.allowGetResource(xs)) { continue; } } if (isCollection) { xs = xs.substring(0, xs.length() - 1); xs = xs.substring(xs.lastIndexOf('/') + 1) + '/'; } else { xs = xs.substring(xs.lastIndexOf('/') + 1); } try { if (isCollection) { // Folder String desc = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); if ((desc == null) || desc.equals("")) desc = ""; else desc = "<div class=\"textPanel\">" + desc + "</div>"; out.println("<li class=\"folder\"><a href=\"" + contentUrl + "\">" + Validator.escapeHtml(properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</a>" + desc + "</li>"); } else { // File /* String createdBy = getUserProperty(properties, ResourceProperties.PROP_CREATOR).getDisplayName(); Time modTime = properties.getTimeProperty(ResourceProperties.PROP_MODIFIED_DATE); String modifiedTime = modTime.toStringLocalShortDate() + " " + modTime.toStringLocalShort(); ContentResource contentResource = (ContentResource) content; long filesize = ((contentResource.getContentLength() - 1) / 1024) + 1; String filetype = contentResource.getContentType(); */ String desc = properties.getProperty(ResourceProperties.PROP_DESCRIPTION); if ((desc == null) || desc.equals("")) desc = ""; else desc = "<div class=\"textPanel\">" + Validator.escapeHtml(desc) + "</div>"; String resourceType = content.getResourceType().replace('.', '_'); out.println("<li class=\"file\"><a href=\"" + contentUrl + "\" target=_blank class=\"" + resourceType+"\">" + Validator.escapeHtml(properties.getProperty(ResourceProperties.PROP_DISPLAY_NAME)) + "</a>" + desc + "</li>"); } } catch (Throwable ignore) { // TODO - what types of failures are being caught here? out.println("<li class=\"file\"><a href=\"" + contentUrl + "\" target=_blank>" + Validator.escapeHtml(xs) + "</a></li>"); } } } catch (Throwable e) { M_log.warn("Problem formatting HTML for collection: "+ x.getId(), e); } if (out != null && printedHeader) { out.println("</ul>"); if (printedDiv) out.println("</div>"); out.println("</body></html>"); } } protected static User getUserProperty(ResourceProperties props, String name) { String id = props.getProperty(name); if (id != null) { try { return UserDirectoryService.getUser(id); } catch (UserNotDefinedException e) { } } return null; } }
KNL-305 Use relative URLs for folders and files (preserve alias paths), fix errant span git-svn-id: 1bc6f63533c24e8bbbfb67ee5594815536d0113b@69259 66ffb92e-73f9-0310-93c1-f5514f145a0a
kernel/kernel-impl/src/main/java/org/sakaiproject/content/impl/CollectionAccessFormatter.java
KNL-305 Use relative URLs for folders and files (preserve alias paths), fix errant span
<ide><path>ernel/kernel-impl/src/main/java/org/sakaiproject/content/impl/CollectionAccessFormatter.java <ide> package org.sakaiproject.content.impl; <ide> <ide> import java.io.PrintWriter; <add>import java.net.URI; <ide> import java.util.Collections; <ide> import java.util.List; <ide> <ide> if (parts.length > 2) <ide> { <ide> // go up a level <del> out.println("<li class=\"upfolder\"><a href=\"../\"><img src=\"/library/image/sakai/folder-up.gif\" alt=\"" + rb.getString("colformat.uplevel.alttext") + "\"/>" + rb.getString("colformat.uplevel") + "</a></span></li>"); <add> out.println("<li class=\"upfolder\"><a href=\"../\"><img src=\"/library/image/sakai/folder-up.gif\" alt=\"" + rb.getString("colformat.uplevel.alttext") + "\"/>" + rb.getString("colformat.uplevel") + "</a></li>"); <ide> } <ide> <ide> // Sort the collection items <ide> <ide> // Iterate through content items <ide> <add> URI baseUri = new URI(x.getUrl()); <add> <ide> for (ContentEntity content : members) { <ide> <ide> ResourceProperties properties = content.getProperties(); <ide> <ide> try <ide> { <del> <add> // Relativize the URL (canonical item URL relative to canonical collection URL). <add> // Inter alias this will preserve alternate access paths via aliases, e.g. /web/ <add> <add> URI contentUri = new URI(contentUrl); <add> URI relativeUri = baseUri.relativize(contentUri); <add> contentUrl = relativeUri.toString(); <add> <ide> if (isCollection) <ide> { <ide> // Folder
JavaScript
mit
5b5017888ba69e28a45fc910b3de04be0f312945
0
VulcanJS/Vulcan,VulcanJS/Vulcan
/* eslint-disable no-console */ import VulcanEmail from '../namespace.js'; import Juice from 'juice'; import htmlToText from 'html-to-text'; import Handlebars from 'handlebars'; import { Utils, getSetting, registerSetting, runQuery, Strings, getString } from 'meteor/vulcan:lib'; // import from vulcan:lib because vulcan:core is not loaded yet import { Email } from 'meteor/email'; /* Get intl string. Usage: {{__ "posts.create"}} */ Handlebars.registerHelper('__', function(id, context) { const s = getString({ id, locale: context.data.root.locale }); return new Handlebars.SafeString(s); }); /* Get intl string, accepts a second variables argument. Usage: {{__ "posts.create" postVariables}} */ Handlebars.registerHelper('___', function(id, variables, context) { const s = getString({ id, variables, locale: context.data.root.locale }); return new Handlebars.SafeString(s); }); registerSetting('secondaryColor', '#444444'); registerSetting('accentColor', '#DD3416'); registerSetting('title', 'My App'); registerSetting('tagline'); registerSetting('emailFooter'); registerSetting('logoUrl'); registerSetting('logoHeight'); registerSetting('logoWidth'); registerSetting('defaultEmail', '[email protected]'); registerSetting('title', 'Vulcan'); registerSetting('enableDevelopmentEmails', false); VulcanEmail.templates = {}; VulcanEmail.addTemplates = templates => { _.extend(VulcanEmail.templates, templates); }; VulcanEmail.getTemplate = templateName => { if (!VulcanEmail.templates[templateName]) { throw new Error(`Couldn't find email template named “${templateName}”`); } return Handlebars.compile(VulcanEmail.templates[templateName], { noEscape: true, strict: true }); }; VulcanEmail.buildTemplate = (htmlContent, data = {}, locale) => { const emailProperties = { secondaryColor: getSetting('secondaryColor', '#444444'), accentColor: getSetting('accentColor', '#DD3416'), siteName: getSetting('title', 'My App'), tagline: getSetting('tagline'), siteUrl: Utils.getSiteUrl(), body: htmlContent, unsubscribe: '', accountLink: Utils.getSiteUrl() + 'account', footer: getSetting('emailFooter'), logoUrl: getSetting('logoUrl'), logoHeight: getSetting('logoHeight'), logoWidth: getSetting('logoWidth'), ...data, __: Strings[locale], }; const emailHTML = VulcanEmail.getTemplate('wrapper')(emailProperties); const inlinedHTML = Juice(emailHTML, { preserveMediaQueries: true }); const doctype = '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">'; return doctype + inlinedHTML; }; VulcanEmail.generateTextVersion = html => { return htmlToText.fromString(html, { wordwrap: 130, }); }; VulcanEmail.send = (to, subject, html, text, throwErrors, cc, bcc, replyTo, headers, attachments) => { // TODO: limit who can send emails // TODO: fix this error: Error: getaddrinfo ENOTFOUND if (typeof to === 'object') { // eslint-disable-next-line no-redeclare var { to, cc, bcc, replyTo, subject, html, text, throwErrors, headers, attachments } = to; } const from = getSetting('defaultEmail', '[email protected]'); const siteName = getSetting('title', 'Vulcan'); subject = subject || '[' + siteName + ']'; if (typeof text === 'undefined') { // Auto-generate text version if it doesn't exist. Has bugs, but should be good enough. text = VulcanEmail.generateTextVersion(html); } const email = { from: from, to, cc, bcc, replyTo, subject, headers, text, html, attachments, }; const shouldSendEmail = process.env.NODE_ENV === 'production' || getSetting('enableDevelopmentEmails', false); console.log(`//////// sending email${shouldSendEmail ? '' : ' (simulation)'}…`); // eslint-disable-line console.log('from: ' + from); // eslint-disable-line console.log('to: ' + to); // eslint-disable-line console.log('cc: ' + cc); // eslint-disable-line console.log('bcc: ' + bcc); // eslint-disable-line console.log('replyTo: ' + replyTo); // eslint-disable-line console.log('headers: ' + JSON.stringify(headers)); // eslint-disable-line if (shouldSendEmail) { try { Email.send(email); } catch (error) { console.log('// error while sending email:'); // eslint-disable-line console.log(error); // eslint-disable-line if (throwErrors) throw error; } } return email; }; VulcanEmail.build = async ({ emailName, variables, locale }) => { // execute email's GraphQL query const email = VulcanEmail.emails[emailName]; const result = email.query ? await runQuery(email.query, variables, { locale }) : { data: {} }; // if email has a data() function, merge its return value with results from the query const data = email.data ? { ...result.data, ...email.data({ data: result.data, variables, locale }) } : result.data; const subject = typeof email.subject === 'function' ? email.subject({ data, variables, locale }) : email.subject; data.__ = Strings[locale]; data.locale = locale; const html = VulcanEmail.buildTemplate(VulcanEmail.getTemplate(email.template)(data), data, locale); return { data, subject, html }; }; VulcanEmail.buildAndSend = async ({ to, cc, bcc, replyTo, emailName, variables, locale = getSetting('locale'), headers, attachments }) => { const email = await VulcanEmail.build({ to, emailName, variables, locale }); return VulcanEmail.send({ to, cc, bcc, replyTo, subject: email.subject, html: email.html, headers, attachments }); }; VulcanEmail.buildAndSendHTML = (to, subject, html) => VulcanEmail.send(to, subject, VulcanEmail.buildTemplate(html));
packages/vulcan-email/lib/server/email.js
/* eslint-disable no-console */ import VulcanEmail from '../namespace.js'; import Juice from 'juice'; import htmlToText from 'html-to-text'; import Handlebars from 'handlebars'; import { Utils, getSetting, registerSetting, runQuery, Strings, getString } from 'meteor/vulcan:lib'; // import from vulcan:lib because vulcan:core is not loaded yet /* Get intl string. Usage: {{__ "posts.create"}} */ Handlebars.registerHelper('__', function(id, context) { const s = getString({ id, locale: context.data.root.locale }); return new Handlebars.SafeString(s); }); /* Get intl string, accepts a second variables argument. Usage: {{__ "posts.create" postVariables}} */ Handlebars.registerHelper('___', function(id, variables, context) { const s = getString({ id, variables, locale: context.data.root.locale }); return new Handlebars.SafeString(s); }); registerSetting('secondaryColor', '#444444'); registerSetting('accentColor', '#DD3416'); registerSetting('title', 'My App'); registerSetting('tagline'); registerSetting('emailFooter'); registerSetting('logoUrl'); registerSetting('logoHeight'); registerSetting('logoWidth'); registerSetting('defaultEmail', '[email protected]'); registerSetting('title', 'Vulcan'); registerSetting('enableDevelopmentEmails', false); VulcanEmail.templates = {}; VulcanEmail.addTemplates = templates => { _.extend(VulcanEmail.templates, templates); }; VulcanEmail.getTemplate = templateName => { if (!VulcanEmail.templates[templateName]) { throw new Error(`Couldn't find email template named “${templateName}”`); } return Handlebars.compile(VulcanEmail.templates[templateName], { noEscape: true, strict: true }); }; VulcanEmail.buildTemplate = (htmlContent, data = {}, locale) => { const emailProperties = { secondaryColor: getSetting('secondaryColor', '#444444'), accentColor: getSetting('accentColor', '#DD3416'), siteName: getSetting('title', 'My App'), tagline: getSetting('tagline'), siteUrl: Utils.getSiteUrl(), body: htmlContent, unsubscribe: '', accountLink: Utils.getSiteUrl() + 'account', footer: getSetting('emailFooter'), logoUrl: getSetting('logoUrl'), logoHeight: getSetting('logoHeight'), logoWidth: getSetting('logoWidth'), ...data, __: Strings[locale], }; const emailHTML = VulcanEmail.getTemplate('wrapper')(emailProperties); const inlinedHTML = Juice(emailHTML, { preserveMediaQueries: true }); const doctype = '<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">'; return doctype + inlinedHTML; }; VulcanEmail.generateTextVersion = html => { return htmlToText.fromString(html, { wordwrap: 130, }); }; VulcanEmail.send = (to, subject, html, text, throwErrors, cc, bcc, replyTo, headers) => { // TODO: limit who can send emails // TODO: fix this error: Error: getaddrinfo ENOTFOUND if (typeof to === 'object') { // eslint-disable-next-line no-redeclare var { to, cc, bcc, replyTo, subject, html, text, throwErrors, headers } = to; } const from = getSetting('defaultEmail', '[email protected]'); const siteName = getSetting('title', 'Vulcan'); subject = subject || '[' + siteName + ']'; if (typeof text === 'undefined') { // Auto-generate text version if it doesn't exist. Has bugs, but should be good enough. text = VulcanEmail.generateTextVersion(html); } const email = { from: from, to: to, cc: cc, bcc: bcc, replyTo: replyTo, subject: subject, headers: headers, text: text, html: html, }; const shouldSendEmail = process.env.NODE_ENV === 'production' || getSetting('enableDevelopmentEmails', false); console.log(`//////// sending email${shouldSendEmail ? '' : ' (simulation)'}…`); // eslint-disable-line console.log('from: ' + from); // eslint-disable-line console.log('to: ' + to); // eslint-disable-line console.log('cc: ' + cc); // eslint-disable-line console.log('bcc: ' + bcc); // eslint-disable-line console.log('replyTo: ' + replyTo); // eslint-disable-line console.log('headers: ' + JSON.stringify(headers)); // eslint-disable-line if (shouldSendEmail) { try { Email.send(email); } catch (error) { console.log('// error while sending email:'); // eslint-disable-line console.log(error); // eslint-disable-line if (throwErrors) throw error; } } return email; }; VulcanEmail.build = async ({ emailName, variables, locale }) => { // execute email's GraphQL query const email = VulcanEmail.emails[emailName]; const result = email.query ? await runQuery(email.query, variables, { locale }) : { data: {} }; // if email has a data() function, merge its return value with results from the query const data = email.data ? { ...result.data, ...email.data({ data: result.data, variables, locale }) } : result.data; const subject = typeof email.subject === 'function' ? email.subject({ data, variables, locale }) : email.subject; data.__ = Strings[locale]; data.locale = locale; const html = VulcanEmail.buildTemplate(VulcanEmail.getTemplate(email.template)(data), data, locale); return { data, subject, html }; }; VulcanEmail.buildAndSend = async ({ to, cc, bcc, replyTo, emailName, variables, locale = getSetting('locale'), headers }) => { const email = await VulcanEmail.build({ to, emailName, variables, locale }); return VulcanEmail.send({ to, cc, bcc, replyTo, subject: email.subject, html: email.html, headers }); }; VulcanEmail.buildAndSendHTML = (to, subject, html) => VulcanEmail.send(to, subject, VulcanEmail.buildTemplate(html));
Add support for attachments to email
packages/vulcan-email/lib/server/email.js
Add support for attachments to email
<ide><path>ackages/vulcan-email/lib/server/email.js <ide> import htmlToText from 'html-to-text'; <ide> import Handlebars from 'handlebars'; <ide> import { Utils, getSetting, registerSetting, runQuery, Strings, getString } from 'meteor/vulcan:lib'; // import from vulcan:lib because vulcan:core is not loaded yet <add>import { Email } from 'meteor/email'; <ide> <ide> /* <ide> <ide> }); <ide> }; <ide> <del>VulcanEmail.send = (to, subject, html, text, throwErrors, cc, bcc, replyTo, headers) => { <add>VulcanEmail.send = (to, subject, html, text, throwErrors, cc, bcc, replyTo, headers, attachments) => { <ide> // TODO: limit who can send emails <ide> // TODO: fix this error: Error: getaddrinfo ENOTFOUND <ide> <ide> if (typeof to === 'object') { <ide> // eslint-disable-next-line no-redeclare <del> var { to, cc, bcc, replyTo, subject, html, text, throwErrors, headers } = to; <add> var { to, cc, bcc, replyTo, subject, html, text, throwErrors, headers, attachments } = to; <ide> } <ide> <ide> const from = getSetting('defaultEmail', '[email protected]'); <ide> <ide> const email = { <ide> from: from, <del> to: to, <del> cc: cc, <del> bcc: bcc, <del> replyTo: replyTo, <del> subject: subject, <del> headers: headers, <del> text: text, <del> html: html, <add> to, <add> cc, <add> bcc, <add> replyTo, <add> subject, <add> headers, <add> text, <add> html, <add> attachments, <ide> }; <ide> <ide> const shouldSendEmail = process.env.NODE_ENV === 'production' || getSetting('enableDevelopmentEmails', false); <ide> return { data, subject, html }; <ide> }; <ide> <del>VulcanEmail.buildAndSend = async ({ to, cc, bcc, replyTo, emailName, variables, locale = getSetting('locale'), headers }) => { <add>VulcanEmail.buildAndSend = async ({ to, cc, bcc, replyTo, emailName, variables, locale = getSetting('locale'), headers, attachments }) => { <ide> const email = await VulcanEmail.build({ to, emailName, variables, locale }); <del> return VulcanEmail.send({ to, cc, bcc, replyTo, subject: email.subject, html: email.html, headers }); <add> return VulcanEmail.send({ to, cc, bcc, replyTo, subject: email.subject, html: email.html, headers, attachments }); <ide> }; <ide> <ide> VulcanEmail.buildAndSendHTML = (to, subject, html) => VulcanEmail.send(to, subject, VulcanEmail.buildTemplate(html));
Java
apache-2.0
8ec41e53c436c9b1a475c2c7706407bcc645b6c2
0
fusepool/datalifecycle
package eu.fusepool.datalifecycle; import java.io.BufferedReader; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.security.AccessController; import java.security.AllPermission; import java.security.Permission; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.locks.Lock; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.apache.clerezza.jaxrs.utils.RedirectUtil; import org.apache.clerezza.jaxrs.utils.TrailingSlash; import org.apache.clerezza.rdf.core.MGraph; import org.apache.clerezza.rdf.core.NonLiteral; import org.apache.clerezza.rdf.core.Resource; import org.apache.clerezza.rdf.core.Triple; import org.apache.clerezza.rdf.core.TripleCollection; import org.apache.clerezza.rdf.core.UriRef; import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException; import org.apache.clerezza.rdf.core.access.LockableMGraph; import org.apache.clerezza.rdf.core.access.TcManager; import org.apache.clerezza.rdf.core.access.TcProvider; import org.apache.clerezza.rdf.core.access.security.TcAccessController; import org.apache.clerezza.rdf.core.access.security.TcPermission; import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl; import org.apache.clerezza.rdf.core.impl.SimpleMGraph; import org.apache.clerezza.rdf.core.impl.TripleImpl; import org.apache.clerezza.rdf.core.serializedform.Parser; import org.apache.clerezza.rdf.core.serializedform.Serializer; import org.apache.clerezza.rdf.core.serializedform.SupportedFormat; import org.apache.clerezza.rdf.ontologies.DCTERMS; import org.apache.clerezza.rdf.ontologies.OWL; import org.apache.clerezza.rdf.ontologies.RDF; import org.apache.clerezza.rdf.ontologies.RDFS; import org.apache.clerezza.rdf.utils.GraphNode; import org.apache.clerezza.rdf.utils.UnionMGraph; //import org.apache.clerezza.rdf.utils.smushing.SameAsSmusher; import org.apache.commons.io.IOUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.stanbol.commons.indexedgraph.IndexedMGraph; import org.apache.stanbol.commons.web.viewable.RdfViewable; import org.apache.stanbol.entityhub.servicesapi.site.SiteManager; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is the controller class of the fusepool data life cycle component. The main functionalities provided are * 1) XML2RDF transformation * 2) Indexing and Information Extraction * 3) Reconciliation/Interlinking * 4) Smushing */ @Component @Property(name = "javax.ws.rs", boolValue = true) @Service(Object.class) @Path("sourcing") public class SourcingAdmin { /** * Using slf4j for normal logging */ private static final Logger log = LoggerFactory.getLogger(SourcingAdmin.class); @Reference private Parser parser; @Reference private Serializer serializer; /** * This service allows accessing and creating persistent triple collections */ @Reference private TcManager tcManager; @Reference private Interlinker interlinker; @Reference(target="(extractorType=patent)") private RdfDigester patentDigester; @Reference(target="(extractorType=pubmed)") private RdfDigester pubmedDigester; /** * This is the name of the graph in which we "log" the requests */ //private UriRef REQUEST_LOG_GRAPH_NAME = new UriRef("http://example.org/resource-resolver-log.graph"); /** * Name of the data life cycle graph. It is used as a register of other * graphs to manage their life cycle */ public static final UriRef DATA_LIFECYCLE_GRAPH_REFERENCE = new UriRef("urn:x-localinstance:/dlc/meta.graph"); /** * Register graph referencing graphs for life cycle monitoring; */ private final String CONTENT_GRAPH_NAME = "urn:x-localinstance:/content.graph"; private UriRef CONTENT_GRAPH_REF = new UriRef(CONTENT_GRAPH_NAME); // Operation codes private final int RDFIZE = 1; private final int ADD_TRIPLES_OPERATION = 2; private final int TEXT_EXTRACTION = 3; private final int RECONCILE_GRAPH_OPERATION = 4; private final int SMUSH_GRAPH_OPERATION = 5; private final int PUBLISH_DATA = 6; // RDFdigester private final String PUBMED_RDFDIGESTER = "pubmed"; private final String PATENT_RDFDIGESTER = "patent"; // RDFizer private final String PUBMED_RDFIZER = "pubmed"; private final String PATENT_RDFIZER = "patent"; // URI for rewriting from urn scheme to http private String baseURI = "http://fusepool.info"; /** * For each rdf triple collection uploaded 5 graphs are created. * 1) a source graph to store the rdf data * 2) an enhancements graph to store the text extracted for indexing and the * entities extracted from the text by NLP engines in the default enhancement chain * 3) a graph to store the result of the interlinking task * 4) a graph to store the smushed graph * 5) a graph to store the published graph i.e. the smushed graph in a coherent state with data in the content graph * The name convention for these graphs is * GRAPH_URN_PREFIX + timestamp + SUFFIX * where SUFFIX can be one of SOURCE_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX, * INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX, PUBLISH_GRAPH_URN_SUFFIX */ // base graph uri public static final String GRAPH_URN_PREFIX = "urn:x-localinstance:/dlc/"; // graph suffix public static final String SOURCE_GRAPH_URN_SUFFIX = "/rdf.graph"; // enhancements graph suffix public static final String ENHANCE_GRAPH_URN_SUFFIX = "/enhance.graph"; // interlink graph suffix public static final String INTERLINK_GRAPH_URN_SUFFIX = "/interlink.graph"; // smushed graph suffix public static final String SMUSH_GRAPH_URN_SUFFIX = "/smush.graph"; // published graph suffix public static final String PUBLISH_GRAPH_URN_SUFFIX = "/publish.graph"; private UriRef pipeRef = null; @Activate protected void activate(ComponentContext context) { log.info("The Sourcing Admin Service is being activated"); // Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets try { createDlcGraph(); log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle"); } catch (EntityAlreadyExistsException ex) { log.info("Data Lifecycle Graph already exists."); } } @Deactivate protected void deactivate(ComponentContext context) { log.info("The Sourcing Admin Service is being deactivated"); } /** * This method return an RdfViewable, this is an RDF serviceUri with * associated presentational information. */ @GET public RdfViewable serviceEntry(@Context final UriInfo uriInfo, @QueryParam("url") final UriRef url, @HeaderParam("user-agent") String userAgent) throws Exception { //this maks sure we are nt invoked with a trailing slash which would affect //relative resolution of links (e.g. css) TrailingSlash.enforcePresent(uriInfo); final String resourcePath = uriInfo.getAbsolutePath().toString(); if (url != null) { String query = url.toString(); log.info(query); } //The URI at which this service was accessed, this will be the //central serviceUri in the response final UriRef serviceUri = new UriRef(resourcePath); //the in memory graph to which the triples for the response are added //final MGraph responseGraph = new IndexedMGraph(); //This GraphNode represents the service within our result graph //final GraphNode node = new GraphNode(serviceUri, responseGraph); //node.addProperty(Ontology.graph, new UriRef("http://fusepool.com/graphs/patentdata")); //node.addPropertyValue(RDFS.label, "A graph of patent data"); //What we return is the GraphNode we created with a template path final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); return new RdfViewable("SourcingAdmin", node, SourcingAdmin.class); } private void setPipeRef(UriRef pipeRef) { this.pipeRef = pipeRef; } private LockableMGraph getSourceGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX)); } private LockableMGraph getEnhanceGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX)); } private LockableMGraph getInterlinkGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX)); } private LockableMGraph getSmushGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SMUSH_GRAPH_URN_SUFFIX)); } private LockableMGraph getPublishGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + PUBLISH_GRAPH_URN_SUFFIX)); } private LockableMGraph getContentGraph() { return tcManager.getMGraph( CONTENT_GRAPH_REF ); } /** * Creates a new empty graph * * @param uriInfo * @param graphName * @return * @throws Exception */ @POST @Path("create_pipe") @Produces("text/plain") public Response createPipeRequest(@Context final UriInfo uriInfo, @FormParam("pipe_label") final String pipeLabel) throws Exception { AccessController.checkPermission(new AllPermission()); //some simplicistic (and too restrictive) validation /* try { new URI(graphName); } catch (URISyntaxException e) { return Response.status(Response.Status.BAD_REQUEST) .entity("Graphname is not a valid URI: " + e.getReason()).build(); } if (!graphName.contains(":")) { return Response.status(Response.Status.BAD_REQUEST) .entity("Graphname is not a valid URI: No colon separating scheme").build(); } */ // Set up the pipe's graphs AccessController.checkPermission(new AllPermission()); if (createPipe(pipeLabel)) { return Response.status(Response.Status.BAD_REQUEST) .entity("Cannot create graph" + pipeLabel).build(); } else { return RedirectUtil.createSeeOtherResponse("./", uriInfo); } } /** * Creates a new pipe with tasks and product graphs and adds its uri and a label to the data life cycle graph. * A graph will contain the RDF data uploaded or sent by a transformation task * that have to be processed (text extraction, NLP processing, reconciliation, smushing). * The following graphs are created to store the results of the processing tasks * enhance.graph * interlink.graph * smush.graph * These graphs will be empty at the beginning. * * * @return */ private boolean createPipe(String pipeLabel) { boolean graphExists = false; try { String timeStamp = String.valueOf(System.currentTimeMillis()); // create a pipe UriRef pipeRef = new UriRef(GRAPH_URN_PREFIX + timeStamp); getDlcGraph().add(new TripleImpl(pipeRef, RDF.type, Ontology.Pipe)); if(pipeLabel != null & ! "".equals("")) { getDlcGraph().add(new TripleImpl(pipeRef, RDFS.label, new PlainLiteralImpl(pipeLabel))); } getDlcGraph().add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.pipe, pipeRef)); // create tasks //rdf task UriRef rdfTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/rdf"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, rdfTaskRef)); getDlcGraph().add(new TripleImpl(rdfTaskRef, RDF.type, Ontology.RdfTask)); // enhance task UriRef enhanceTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/enhance"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, enhanceTaskRef)); getDlcGraph().add(new TripleImpl(enhanceTaskRef, RDF.type, Ontology.EnhanceTask)); // interlink task UriRef interlinkTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/interlink"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, interlinkTaskRef)); getDlcGraph().add(new TripleImpl(interlinkTaskRef, RDF.type, Ontology.InterlinkTask)); // smush task UriRef smushTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/smush"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, smushTaskRef)); getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.SmushTask)); // publish task UriRef publishTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/publish"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, publishTaskRef)); getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.PublishTask)); // create the source graph for the dataset (result of transformation in RDF) String sourceGraphName = GRAPH_URN_PREFIX + timeStamp + SOURCE_GRAPH_URN_SUFFIX; UriRef sourceGraphRef = new UriRef(sourceGraphName); tcManager.createMGraph(sourceGraphRef); //GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); //dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef); getDlcGraph().add(new TripleImpl(rdfTaskRef, Ontology.deliverable, sourceGraphRef)); getDlcGraph().add(new TripleImpl(sourceGraphRef, RDF.type, Ontology.voidDataset)); // create the graph to store text and enhancements String enhancementsGraphName = GRAPH_URN_PREFIX + timeStamp + ENHANCE_GRAPH_URN_SUFFIX; UriRef enhancementsGraphRef = new UriRef(enhancementsGraphName); tcManager.createMGraph(enhancementsGraphRef); getDlcGraph().add(new TripleImpl(enhanceTaskRef, Ontology.deliverable, enhancementsGraphRef)); getDlcGraph().add(new TripleImpl(enhancementsGraphRef, RDFS.label, new PlainLiteralImpl("Contains a sioc:content property with text " + "for indexing and references to entities found in the text by NLP enhancement engines"))); // create the graph to store the result of the interlinking task String interlinkGraphName = GRAPH_URN_PREFIX + timeStamp + INTERLINK_GRAPH_URN_SUFFIX; UriRef interlinkGraphRef = new UriRef(interlinkGraphName); tcManager.createMGraph(interlinkGraphRef); getDlcGraph().add(new TripleImpl(interlinkTaskRef, Ontology.deliverable, interlinkGraphRef)); getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDF.type, Ontology.voidLinkset)); getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidSubjectsTarget, sourceGraphRef)); getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidLinkPredicate, OWL.sameAs)); getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDFS.label, new PlainLiteralImpl("Contains equivalence links"))); // create the graph to store the result of the smushing task String smushGraphName = GRAPH_URN_PREFIX + timeStamp + SMUSH_GRAPH_URN_SUFFIX; UriRef smushGraphRef = new UriRef(smushGraphName); tcManager.createMGraph(smushGraphRef); getDlcGraph().add(new TripleImpl(smushTaskRef, Ontology.deliverable, smushGraphRef)); // create the graph to store the result of the publishing task String publishGraphName = GRAPH_URN_PREFIX + timeStamp + PUBLISH_GRAPH_URN_SUFFIX; UriRef publishGraphRef = new UriRef(publishGraphName); tcManager.createMGraph(publishGraphRef); getDlcGraph().add(new TripleImpl(publishTaskRef, Ontology.deliverable, publishGraphRef)); setPipeRef(pipeRef); } catch (UnsupportedOperationException uoe) { log.error("Error while creating a graph"); } return graphExists; } /** * Applies one of the following operations to a graph: - add triples * (operation code: 1) - remove all triples (operation code: 2) - delete * graph (operation code: 3) - reconcile (operation code: 4) - smush * (operation code: 5) */ @POST @Path("operate") @Produces("text/plain") public String operateOnGraphCommand(@Context final UriInfo uriInfo, @FormParam("pipe") final UriRef pipeRef, @FormParam("operation_code") final int operationCode, @FormParam("data_url") final URL dataUrl, @FormParam("rdfizer") final String rdfizer, @FormParam("rdfdigester") final String rdfdigester, @HeaderParam("Content-Type") String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); // validate arguments and handle all the connection exceptions return operateOnPipe(pipeRef, operationCode, dataUrl, rdfizer, rdfdigester, mediaType); } private String operateOnPipe(UriRef pipeRef, int operationCode, URL dataUrl, String rdfizer, String rdfdigester, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); String message = ""; if (pipeExists(pipeRef)) { setPipeRef(pipeRef); switch (operationCode) { case ADD_TRIPLES_OPERATION: message = addTriples(pipeRef, dataUrl, mediaType); break; case RECONCILE_GRAPH_OPERATION: message = reconcile(pipeRef, null); break; case SMUSH_GRAPH_OPERATION: message = smush(pipeRef); break; case TEXT_EXTRACTION: message = extractText(pipeRef, rdfdigester); break; case RDFIZE: message = transformXml(dataUrl, rdfizer); break; case PUBLISH_DATA: message = publishData(pipeRef); break; } } else { message = "The pipe does not exist."; } return message; } private String transformXml(URL dataUrl, String rdfizer) { String message = ""; if(PUBMED_RDFIZER.equals(rdfizer)){ message = transformPubMedXml(dataUrl); } else if (PATENT_RDFIZER.equals(rdfizer)) { message = transformPatentXml(dataUrl); } return message; } private String transformPubMedXml(URL dataUrl) { String message = "PubMed XML->RDF transformation to be implemented."; return message; } private String transformPatentXml(URL dataUrl) { String message = "Marec Patent XML->RDF transformation to be implemented"; return message; } /** * Load RDF data into an existing graph from a URL (schemes: "file://" or "http://"). * The arguments to be passed are: * 1) graph in which the RDF data must be stored * 2) url of the dataset * After the upload the input graph is sent to a digester to extract text for indexing and * adding entities found by NLP components (in the default chain) as subject */ private String addTriples(UriRef pipeRef, URL dataUrl, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); String message = ""; // look up the pipe's rdf graph to which add the data UriRef graphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); // add the triples of the temporary graph into the graph selected by the user if (isValidUrl(dataUrl)) { MGraph updatedGraph = addTriplesCommand(graphRef, dataUrl, mediaType); message = "Added " + updatedGraph.size() + " triples to " + graphRef.getUnicodeString() + "\n"; } else { message = "The URL of the data is not a valid one.\n"; } log.info(message); return message; } private MGraph addTriplesCommand(UriRef graphRef, URL dataUrl, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); MGraph graph = null; URLConnection connection = dataUrl.openConnection(); connection.addRequestProperty("Accept", "application/rdf+xml; q=.9, text/turte;q=1"); // create a temporary graph to store the data SimpleMGraph tempGraph = new SimpleMGraph(); InputStream data = connection.getInputStream(); if (data != null) { if (mediaType.equals("application/x-www-form-urlencoded")) { mediaType = getContentTypeFromUrl(dataUrl); } parser.parse(tempGraph, data, mediaType); // add the triples of the temporary graph into the graph selected by the user if (graphExists(graphRef)) { graph = tcManager.getMGraph(graphRef); graph.addAll(tempGraph); } } return graph; } /** * Removes all the triples from the graph * */ private String emptyGraph(UriRef graphRef) { // removes all the triples from the graph MGraph graph = tcManager.getMGraph(graphRef); graph.clear(); return "Graph " + graphRef.getUnicodeString() + " is now empty."; } /** * Deletes a graph, the reference to it in the DLC graph and deletes all the * derived graphs linked to it by the dcterms:source property. * * @param graphRef * @return */ private String deleteGraph(UriRef graphRef) { tcManager.deleteTripleCollection(graphRef); GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); //remove the relation with the data lifecycle graph and all the information (triples) about the deleted graph (label). dlcGraphNode.deleteProperty(DCTERMS.hasPart, graphRef); return "Graph " + graphRef.getUnicodeString() + " has been deleted."; } /** * Reconciles a graph with a target graph. The result of the reconciliation is an equivalence set * stored in the interlink graph of the pipe. The graph used as source is the source rdf graph * @param sourceGraphRef the URI of the referenced graph, i.e. the graph for which the reconciliation should be performed. * @param targetGraphRef the URI of the target graph. If null the target graph is the same as the source graph. * @return * @throws Exception */ private String reconcile(UriRef pipeRef, UriRef targetGraphRef) throws Exception { String message = ""; UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); if (graphExists(sourceGraphRef)) { //if target graph is not provided the reconciliation will be done against the source graph itself if(targetGraphRef == null){ targetGraphRef = sourceGraphRef; } // reconcile the source graph with the target graph UriRef interlinkGraphRef = reconcileCommand(pipeRef, sourceGraphRef, targetGraphRef); TripleCollection interlinkGraph = tcManager.getMGraph(interlinkGraphRef); if (interlinkGraph.size() > 0) { message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n" + interlinkGraph.size() + " owl:sameAs statements have been created and stored in " + interlinkGraphRef.getUnicodeString(); } else { message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n" + "No equivalent entities have been found."; } } else { message = "The source graph does not exist."; } log.info(message); return message; } private UriRef reconcileCommand(UriRef pipeRef, UriRef sourceGraphRef, UriRef targetGraphRef) throws Exception { TripleCollection owlSameAs = null; // get the pipe's interlink graph to store the result of the reconciliation task UriRef interlinkGraphRef = new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX); if (graphExists(sourceGraphRef)) { TripleCollection sourceGrah = tcManager.getMGraph(sourceGraphRef); // reconcile the source graph with the target graph owlSameAs = interlinker.interlink(sourceGrah, targetGraphRef); if (owlSameAs.size() > 0) { LockableMGraph sameAsGraph = tcManager.getMGraph(interlinkGraphRef); sameAsGraph.addAll(owlSameAs); // log the result (the equivalence set should be serialized and stored) Lock l = sameAsGraph.getLock().readLock(); l.lock(); try { Iterator<Triple> isameas = owlSameAs.iterator(); while (isameas.hasNext()) { Triple t = isameas.next(); NonLiteral s = t.getSubject(); UriRef p = t.getPredicate(); Resource o = t.getObject(); log.info(s.toString() + p.getUnicodeString() + o.toString() + " .\n"); } } finally { l.unlock(); } // add a reference of the equivalence set to the source graph getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidSubjectsTarget, sourceGraphRef)); // add a reference of the equivalence set to the target graph getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidObjectsTarget, targetGraphRef)); } } return interlinkGraphRef; } /** * Smush the enhanced graph using the interlinking graph. More precisely collates data coming * from different equivalent resources in a single one chosen among them. The triples in the * source graph are copied in the smush graph that is then smushed using the interlinking * graph. * @param graphToSmushRef * @return */ private String smush(UriRef pipeRef) { String message = "Smushing task.\n"; // As the smush.graph must be published it has to contain the sioc.content property and all the subject // extracted during the extraction phase that are stored in the enhance.graph with all the triples from // the rdf UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); if(getInterlinkGraph().size() > 0) { LockableMGraph smushedGraph = smushCommand(enhanceGraphRef, getInterlinkGraph()); message = "Smushing of " + enhanceGraphRef.getUnicodeString() + " with equivalence set completed. " + "Smushed graph size = " + smushedGraph.size() + "\n"; } else { message = "No equivalence links available for " + enhanceGraphRef.getUnicodeString() + "\n" + "Start a reconciliation task before smushing."; } return message; } private LockableMGraph smushCommand(UriRef enhanceGraphRef, LockableMGraph equivalenceSet) { if(getSmushGraph().size() > 0) { getSmushGraph().clear(); } // add triples from source graph to smush graph getSmushGraph().addAll(getEnhanceGraph()); SimpleMGraph tempEquivalenceSet = new SimpleMGraph(); tempEquivalenceSet.addAll(equivalenceSet); // smush and canonicalize uris IriSmusher smusher = new CanonicalizingSameAsSmusher(); smusher.smush(getSmushGraph(), tempEquivalenceSet, true); //serializer.serialize(System.out, getSmushGraph(), SupportedFormat.RDF_XML); return getSmushGraph(); } private String extractText(UriRef pipeRef, String rdfdigester) { String message = ""; if(PATENT_RDFDIGESTER.equals(rdfdigester)){ message = extractTextFromPatent(pipeRef); } else if (PUBMED_RDFDIGESTER.equals(rdfdigester)) { message = extractTextFromPubMed(pipeRef); } return message; } /** * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords * will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime * the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities * and a rdfs:label if available, are added to the patent resource using dcterms:subject property. * @param pipeRef * @return */ private String extractTextFromPatent(UriRef pipeRef){ String message = "Extracts text from patents and adds a sioc:content property.\n"; UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); SimpleMGraph tempGraph = new SimpleMGraph(); Lock rl = sourceGraph.getLock().readLock(); rl.lock(); try { tempGraph.addAll(sourceGraph); } finally { rl.unlock(); } enhanceGraph.addAll(tempGraph); patentDigester.extractText(enhanceGraph); message += "Extracted text from " + enhanceGraphRef.getUnicodeString(); return message; } /** * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords * will be related to a PubMed article (resource of type bibo:Document) so that the article will be retrieved any time * the keywords are searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities * and a rdfs:label if available, are added to the article resource using dcterms:subject property. * @param pipeRef * @return */ private String extractTextFromPubMed(UriRef pipeRef){ String message = "Extract text from PubMed articles and adding a sioc:content property.\n"; UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); SimpleMGraph tempGraph = new SimpleMGraph(); Lock rl = sourceGraph.getLock().readLock(); rl.lock(); try { tempGraph.addAll(sourceGraph); } finally { rl.unlock(); } enhanceGraph.addAll(tempGraph); pubmedDigester.extractText(enhanceGraph); message += "Extracted text from " + enhanceGraphRef.getUnicodeString(); return message; } /** * Moves data from smush.grah to content.graph. The triples (facts) in the two graphs must be coherent, i.e. the same. * Before publishing the current smushed data must be compared with the last published data. New triples * in the smushed graph not in the published graph must be added while triples in the published graph absent * in the smushed graph must be removed. The algorithm is as follows * 1) find triples in smush.graph not in publish.graph (new triples) * 2) find triples in publish.graph not in smush.graph (old triples) * 3) add new triples to content.graph * 4) remove old triples from content.graph * 5) delete all triples in publish.graph * 6) copy triples from smush.graph to publish.graph */ private String publishData(UriRef pipeRef) { String message = ""; // add these triples to the content.graph SimpleMGraph triplesToAdd = new SimpleMGraph(); // remove these triples from the content.graph SimpleMGraph triplesToRemove = new SimpleMGraph(); // triples to add to the content.graph Lock ls = getSmushGraph().getLock().readLock(); ls.lock(); try { Iterator<Triple> ismush = getSmushGraph().iterator(); while (ismush.hasNext()) { Triple smushTriple = ismush.next(); if( ! getPublishGraph().contains(smushTriple) ) { triplesToAdd.add(smushTriple); } } } finally { ls.unlock(); } // triples to remove from the content.graph Lock lp = getPublishGraph().getLock().readLock(); lp.lock(); try { Iterator<Triple> ipublish = getPublishGraph().iterator(); while (ipublish.hasNext()) { Triple publishTriple = ipublish.next(); if( ! getSmushGraph().contains(publishTriple) ) { triplesToRemove.add(publishTriple); } } } finally { lp.unlock(); } getContentGraph().removeAll(triplesToRemove); getContentGraph().addAll(triplesToAdd); getPublishGraph().clear(); getPublishGraph().addAll(getSmushGraph()); message = "Copied " + getPublishGraph().size() + " triples from " + pipeRef.getUnicodeString() + " to content-graph"; return message; } /** * Validate URL * A valid URL must start with file:/// or http:// */ private boolean isValidUrl(URL url) { boolean isValidUrl = false; if(url != null) { if( url.toString().startsWith("http://") || url.toString().startsWith("file:/")) { isValidUrl = true; } } return isValidUrl; } /** * Extracts the content type from the file extension * * @param url * @return */ private String getContentTypeFromUrl(URL url) { String contentType = null; if (url.getFile().endsWith("ttl")) { contentType = "text/turtle"; } else if (url.getFile().endsWith("nt")) { contentType = "text/turtle"; } else { contentType = "application/rdf+xml"; } return contentType; } /** * Returns the data life cycle graph containing all the monitored graphs. It * creates it if doesn't exit yet. * * @return */ private LockableMGraph getDlcGraph() { return tcManager.getMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE); } /** * Checks if a graph exists and returns a boolean value. * * @param graph_ref * @return */ private boolean graphExists(UriRef graph_ref) { Set<UriRef> graphs = tcManager.listMGraphs(); Iterator<UriRef> igraphs = graphs.iterator(); while (igraphs.hasNext()) { UriRef graphRef = igraphs.next(); if (graph_ref.toString().equals(graphRef.toString())) { return true; } } return false; } /** * Checks whether a pipe exists */ private boolean pipeExists(UriRef pipeRef) { boolean result = false; if (pipeRef != null) { GraphNode pipeNode = new GraphNode(pipeRef, getDlcGraph()); if(pipeNode != null) { result = true; } } return result; } /** * Creates the data lifecycle graph. Must be called at the bundle * activation if the graph doesn't exists yet. */ private MGraph createDlcGraph() { MGraph dlcGraph = tcManager.createMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE); TcAccessController tca = new TcAccessController(tcManager); tca.setRequiredReadPermissions(DATA_LIFECYCLE_GRAPH_REFERENCE, Collections.singleton((Permission) new TcPermission( "urn:x-localinstance:/content.graph", "read"))); return dlcGraph; } /** * Generates a new http URI that will be used as the canonical one in place * of a set of equivalent non-http URIs. An owl:sameAs statement is added to * the interlinking graph stating that the canonical http URI is equivalent * to one of the non-http URI in the set of equivalent URIs. * @param uriRefs * @return */ private UriRef generateNewHttpUri(Set<UriRef> uriRefs) { UriRef bestNonHttp = chooseBest(uriRefs); String nonHttpString = bestNonHttp.getUnicodeString(); if (!nonHttpString.startsWith("urn:x-temp:")) { throw new RuntimeException("Sorry we current assume all non-http " + "URIs to be canonicalized to be urn:x-temp"); } String httpUriString = nonHttpString.replaceFirst("urn:x-temp:", baseURI); UriRef httpUriRef = new UriRef(httpUriString); // add an owl:sameAs statement in the interlinking graph getInterlinkGraph().add(new TripleImpl(bestNonHttp, OWL.sameAs, httpUriRef)); return httpUriRef; } private UriRef chooseBest(Set<UriRef> httpUri) { Iterator<UriRef> iter = httpUri.iterator(); UriRef best = iter.next(); while (iter.hasNext()) { UriRef next = iter.next(); if (next.getUnicodeString().compareTo(best.getUnicodeString()) < 0) { best = next; } } return best; } /** * An inline class to canonicalize URI from urn to http scheme. A http URI is chosen * among the equivalent ones.if no one http URI is available a new one is created. */ private class CanonicalizingSameAsSmusher extends IriSmusher { @Override protected UriRef getPreferedIri(Set<UriRef> uriRefs) { Set<UriRef> httpUri = new HashSet<UriRef>(); for (UriRef uriRef : uriRefs) { if (uriRef.getUnicodeString().startsWith("http")) { httpUri.add(uriRef); } } if (httpUri.size() == 1) { return httpUri.iterator().next(); } // There is no http URI in the set of equivalent resource. The entity was unknown. // A new representation of the entity with http URI will be created. if (httpUri.size() == 0) { return generateNewHttpUri(uriRefs); } if (httpUri.size() > 1) { return chooseBest(httpUri); } throw new Error("Negative size set."); } } }
src/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java
package eu.fusepool.datalifecycle; import java.io.BufferedReader; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.security.AccessController; import java.security.AllPermission; import java.security.Permission; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.locks.Lock; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.apache.clerezza.jaxrs.utils.RedirectUtil; import org.apache.clerezza.jaxrs.utils.TrailingSlash; import org.apache.clerezza.rdf.core.MGraph; import org.apache.clerezza.rdf.core.NonLiteral; import org.apache.clerezza.rdf.core.Resource; import org.apache.clerezza.rdf.core.Triple; import org.apache.clerezza.rdf.core.TripleCollection; import org.apache.clerezza.rdf.core.UriRef; import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException; import org.apache.clerezza.rdf.core.access.LockableMGraph; import org.apache.clerezza.rdf.core.access.TcManager; import org.apache.clerezza.rdf.core.access.security.TcAccessController; import org.apache.clerezza.rdf.core.access.security.TcPermission; import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl; import org.apache.clerezza.rdf.core.impl.SimpleMGraph; import org.apache.clerezza.rdf.core.impl.TripleImpl; import org.apache.clerezza.rdf.core.serializedform.Parser; import org.apache.clerezza.rdf.core.serializedform.Serializer; import org.apache.clerezza.rdf.core.serializedform.SupportedFormat; import org.apache.clerezza.rdf.ontologies.DCTERMS; import org.apache.clerezza.rdf.ontologies.OWL; import org.apache.clerezza.rdf.ontologies.RDF; import org.apache.clerezza.rdf.ontologies.RDFS; import org.apache.clerezza.rdf.utils.GraphNode; import org.apache.clerezza.rdf.utils.UnionMGraph; //import org.apache.clerezza.rdf.utils.smushing.SameAsSmusher; import org.apache.commons.io.IOUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.stanbol.commons.indexedgraph.IndexedMGraph; import org.apache.stanbol.commons.web.viewable.RdfViewable; import org.apache.stanbol.entityhub.servicesapi.site.SiteManager; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is the controller class of the fusepool data life cycle component. The main functionalities provided are * 1) XML2RDF transformation * 2) Indexing and Information Extraction * 3) Reconciliation/Interlinking * 4) Smushing */ @Component @Property(name = "javax.ws.rs", boolValue = true) @Service(Object.class) @Path("sourcing") public class SourcingAdmin { /** * Using slf4j for normal logging */ private static final Logger log = LoggerFactory.getLogger(SourcingAdmin.class); @Reference private Parser parser; @Reference private Serializer serializer; /** * This service allows accessing and creating persistent triple collections */ @Reference private TcManager tcManager; @Reference private Interlinker interlinker; @Reference(target="(extractorType=patent)") private RdfDigester patentDigester; @Reference(target="(extractorType=pubmed)") private RdfDigester pubmedDigester; /** * This is the name of the graph in which we "log" the requests */ //private UriRef REQUEST_LOG_GRAPH_NAME = new UriRef("http://example.org/resource-resolver-log.graph"); /** * Name of the data life cycle graph. It is used as a register of other * graphs to manage their life cycle */ public static final UriRef DATA_LIFECYCLE_GRAPH_REFERENCE = new UriRef("urn:x-localinstance:/dlc/meta.graph"); /** * Register graph referencing graphs for life cycle monitoring; */ private final String CONTENT_GRAPH_NAME = "urn:x-localinstance:/content.graph"; private UriRef CONTENT_GRAPH_REF = new UriRef(CONTENT_GRAPH_NAME); // Operation codes private final int RDFIZE = 1; private final int ADD_TRIPLES_OPERATION = 2; private final int TEXT_EXTRACTION = 3; private final int RECONCILE_GRAPH_OPERATION = 4; private final int SMUSH_GRAPH_OPERATION = 5; private final int PUBLISH_DATA = 6; // RDFdigester private final String PUBMED_RDFDIGESTER = "pubmed"; private final String PATENT_RDFDIGESTER = "patent"; // RDFizer private final String PUBMED_RDFIZER = "pubmed"; private final String PATENT_RDFIZER = "patent"; //TODO make this a component parameter // URI for rewriting from urn scheme to http private String baseURI = "http://fusepool.info"; /** * For each rdf triple collection uploaded 5 graphs are created. * 1) a source graph to store the rdf data * 2) an enhancements graph to store the text extracted for indexing and the * entities extracted from the text by NLP engines in the default enhancement chain * 3) a graph to store the result of the interlinking task * 4) a graph to store the smushed graph * 5) a graph to store the published graph i.e. the smushed graph in a coerent state with data in the content graph * The name convention for these graphs is * GRAPH_URN_PREFIX + timestamp + SUFFIX * where SUFFIX can be one of BUFFER_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX, * INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX */ // base graph uri public static final String GRAPH_URN_PREFIX = "urn:x-localinstance:/dlc/"; // graph suffix public static final String SOURCE_GRAPH_URN_SUFFIX = "/rdf.graph"; // enhancements graph suffix public static final String ENHANCE_GRAPH_URN_SUFFIX = "/enhance.graph"; // interlink graph suffix public static final String INTERLINK_GRAPH_URN_SUFFIX = "/interlink.graph"; // smushed graph suffix public static final String SMUSH_GRAPH_URN_SUFFIX = "/smush.graph"; // published graph suffix public static final String PUBLISH_GRAPH_URN_SUFFIX = "/publish.graph"; private UriRef pipeRef = null; @Activate protected void activate(ComponentContext context) { log.info("The Data Life Cycle service is being activated"); try { if (interlinker != null) { log.info("Silk interlinker service available"); } else { log.info("Silk interlinker service NOT available"); } // Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets try { createDlcGraph(); log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle"); } catch (EntityAlreadyExistsException ex) { log.info("Data Lifecycle Graph already exists."); } } catch (EntityAlreadyExistsException ex) { log.debug("The graph for the request log already exists"); } } @Deactivate protected void deactivate(ComponentContext context) { log.info("The Data Life Cycle service is being deactivated"); } /** * This method return an RdfViewable, this is an RDF serviceUri with * associated presentational information. */ @GET public RdfViewable serviceEntry(@Context final UriInfo uriInfo, @QueryParam("url") final UriRef url, @HeaderParam("user-agent") String userAgent) throws Exception { //this maks sure we are nt invoked with a trailing slash which would affect //relative resolution of links (e.g. css) TrailingSlash.enforcePresent(uriInfo); final String resourcePath = uriInfo.getAbsolutePath().toString(); if (url != null) { String query = url.toString(); log.info(query); } //The URI at which this service was accessed, this will be the //central serviceUri in the response final UriRef serviceUri = new UriRef(resourcePath); //the in memory graph to which the triples for the response are added //final MGraph responseGraph = new IndexedMGraph(); //This GraphNode represents the service within our result graph //final GraphNode node = new GraphNode(serviceUri, responseGraph); //node.addProperty(Ontology.graph, new UriRef("http://fusepool.com/graphs/patentdata")); //node.addPropertyValue(RDFS.label, "A graph of patent data"); //What we return is the GraphNode we created with a template path final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); return new RdfViewable("SourcingAdmin", node, SourcingAdmin.class); } private void setPipeRef(UriRef pipeRef) { this.pipeRef = pipeRef; } private LockableMGraph getSourceGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX)); } private LockableMGraph getEnhanceGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX)); } private LockableMGraph getInterlinkGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX)); } private LockableMGraph getSmushGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SMUSH_GRAPH_URN_SUFFIX)); } private LockableMGraph getPublishGraph() { return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + PUBLISH_GRAPH_URN_SUFFIX)); } private LockableMGraph getContentGraph() { return tcManager.getMGraph( CONTENT_GRAPH_REF ); } /** * Creates a new empty graph * * @param uriInfo * @param graphName * @return * @throws Exception */ @POST @Path("create_pipe") @Produces("text/plain") public Response createPipeRequest(@Context final UriInfo uriInfo, @FormParam("pipe_label") final String pipeLabel) throws Exception { AccessController.checkPermission(new AllPermission()); //some simplicistic (and too restrictive) validation /* try { new URI(graphName); } catch (URISyntaxException e) { return Response.status(Response.Status.BAD_REQUEST) .entity("Graphname is not a valid URI: " + e.getReason()).build(); } if (!graphName.contains(":")) { return Response.status(Response.Status.BAD_REQUEST) .entity("Graphname is not a valid URI: No colon separating scheme").build(); } */ // Set up the pipe's graphs AccessController.checkPermission(new AllPermission()); if (createPipe(pipeLabel)) { return Response.status(Response.Status.BAD_REQUEST) .entity("Cannot create graph" + pipeLabel).build(); } else { return RedirectUtil.createSeeOtherResponse("./", uriInfo); } } /** * Creates a new pipe with tasks and product graphs and adds its uri and a label to the data life cycle graph. * A graph will contain the RDF data uploaded or sent by a transformation task * that have to be processed (text extraction, NLP processing, reconciliation, smushing). * The following graphs are created to store the results of the processing tasks * enhance.graph * interlink.graph * smush.graph * These graphs will be empty at the beginning. * * * @return */ private boolean createPipe(String pipeLabel) { boolean graphExists = false; String label = ""; if (pipeLabel != null) { label = pipeLabel; } try { String timeStamp = String.valueOf(System.currentTimeMillis()); // create a pipe UriRef pipeRef = new UriRef(GRAPH_URN_PREFIX + timeStamp); getDlcGraph().add(new TripleImpl(pipeRef, RDF.type, Ontology.Pipe)); getDlcGraph().add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.pipe, pipeRef)); // create tasks //rdf task UriRef rdfTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/rdf"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, rdfTaskRef)); getDlcGraph().add(new TripleImpl(rdfTaskRef, RDF.type, Ontology.RdfTask)); // enhance task UriRef enhanceTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/enhance"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, enhanceTaskRef)); getDlcGraph().add(new TripleImpl(enhanceTaskRef, RDF.type, Ontology.EnhanceTask)); // interlink task UriRef interlinkTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/interlink"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, interlinkTaskRef)); getDlcGraph().add(new TripleImpl(interlinkTaskRef, RDF.type, Ontology.InterlinkTask)); // smush task UriRef smushTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/smush"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, smushTaskRef)); getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.SmushTask)); // publish task UriRef publishTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/publish"); getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, publishTaskRef)); getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.PublishTask)); // create the source graph for the dataset (result of transformation in RDF) String sourceGraphName = GRAPH_URN_PREFIX + timeStamp + SOURCE_GRAPH_URN_SUFFIX; UriRef sourceGraphRef = new UriRef(sourceGraphName); tcManager.createMGraph(sourceGraphRef); //GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); //dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef); getDlcGraph().add(new TripleImpl(rdfTaskRef, Ontology.deliverable, sourceGraphRef)); getDlcGraph().add(new TripleImpl(sourceGraphRef, RDF.type, Ontology.voidDataset)); getDlcGraph().add(new TripleImpl(sourceGraphRef, RDFS.label, new PlainLiteralImpl(label))); // create the graph to store text and enhancements String enhancementsGraphName = GRAPH_URN_PREFIX + timeStamp + ENHANCE_GRAPH_URN_SUFFIX; UriRef enhancementsGraphRef = new UriRef(enhancementsGraphName); tcManager.createMGraph(enhancementsGraphRef); getDlcGraph().add(new TripleImpl(enhanceTaskRef, Ontology.deliverable, enhancementsGraphRef)); getDlcGraph().add(new TripleImpl(enhancementsGraphRef, RDFS.label, new PlainLiteralImpl("Contains a sioc:content property with text " + "for indexing and references to entities found in the text by NLP enhancement engines"))); // create the graph to store the result of the interlinking task String interlinkGraphName = GRAPH_URN_PREFIX + timeStamp + INTERLINK_GRAPH_URN_SUFFIX; UriRef interlinkGraphRef = new UriRef(interlinkGraphName); tcManager.createMGraph(interlinkGraphRef); getDlcGraph().add(new TripleImpl(interlinkTaskRef, Ontology.deliverable, interlinkGraphRef)); getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDF.type, Ontology.voidLinkset)); getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidSubjectsTarget, sourceGraphRef)); getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidLinkPredicate, OWL.sameAs)); getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDFS.label, new PlainLiteralImpl("Contains equivalence links"))); // create the graph to store the result of the smushing task String smushGraphName = GRAPH_URN_PREFIX + timeStamp + SMUSH_GRAPH_URN_SUFFIX; UriRef smushGraphRef = new UriRef(smushGraphName); tcManager.createMGraph(smushGraphRef); getDlcGraph().add(new TripleImpl(smushTaskRef, Ontology.deliverable, smushGraphRef)); // create the graph to store the result of the publishing task String publishGraphName = GRAPH_URN_PREFIX + timeStamp + PUBLISH_GRAPH_URN_SUFFIX; UriRef publishGraphRef = new UriRef(publishGraphName); tcManager.createMGraph(publishGraphRef); getDlcGraph().add(new TripleImpl(publishTaskRef, Ontology.deliverable, publishGraphRef)); setPipeRef(pipeRef); } catch (UnsupportedOperationException uoe) { log.error("Error while creating a graph"); } return graphExists; } /** * Applies one of the following operations to a graph: - add triples * (operation code: 1) - remove all triples (operation code: 2) - delete * graph (operation code: 3) - reconcile (operation code: 4) - smush * (operation code: 5) */ @POST @Path("operate") @Produces("text/plain") public String operateOnGraphCommand(@Context final UriInfo uriInfo, @FormParam("pipe") final UriRef pipeRef, @FormParam("operation_code") final int operationCode, @FormParam("data_url") final URL dataUrl, @FormParam("rdfizer") final String rdfizer, @FormParam("rdfdigester") final String rdfdigester, @HeaderParam("Content-Type") String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); // validate arguments and handle all the connection exceptions return operateOnPipe(pipeRef, operationCode, dataUrl, rdfizer, rdfdigester, mediaType); } private String operateOnPipe(UriRef pipeRef, int operationCode, URL dataUrl, String rdfizer, String rdfdigester, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); String message = ""; if (pipeExists(pipeRef)) { setPipeRef(pipeRef); switch (operationCode) { case ADD_TRIPLES_OPERATION: message = addTriples(pipeRef, dataUrl, mediaType); break; case RECONCILE_GRAPH_OPERATION: message = reconcile(pipeRef, null); break; case SMUSH_GRAPH_OPERATION: message = smush(pipeRef); break; case TEXT_EXTRACTION: message = extractText(pipeRef, rdfdigester); break; case RDFIZE: message = transformXml(dataUrl, rdfizer); break; case PUBLISH_DATA: message = publishData(pipeRef); break; } } else { message = "The pipe does not exist."; } return message; } private String transformXml(URL dataUrl, String rdfizer) { String message = ""; if(PUBMED_RDFIZER.equals(rdfizer)){ message = transformPubMedXml(dataUrl); } else if (PATENT_RDFIZER.equals(rdfizer)) { message = transformPatentXml(dataUrl); } return message; } private String transformPubMedXml(URL dataUrl) { String message = "PubMed XML->RDF transformation to be implemented."; return message; } private String transformPatentXml(URL dataUrl) { String message = "Marec Patent XML->RDF transformation to be implemented"; return message; } /** * Load RDF data into an existing graph from a URL (schemes: "file://" or "http://"). * The arguments to be passed are: * 1) graph in which the RDF data must be stored * 2) url of the dataset * After the upload the input graph is sent to a digester to extract text for indexing and * adding entities found by NLP components (in the default chain) as subject */ private String addTriples(UriRef pipeRef, URL dataUrl, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); String message = ""; // look up the pipe's rdf graph to which add the data UriRef graphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); // add the triples of the temporary graph into the graph selected by the user if (isValidUrl(dataUrl)) { MGraph updatedGraph = addTriplesCommand(graphRef, dataUrl, mediaType); message = "Added " + updatedGraph.size() + " triples to " + graphRef.getUnicodeString() + "\n"; } else { message = "The URL of the data is not a valid one.\n"; } log.info(message); return message; } private MGraph addTriplesCommand(UriRef graphRef, URL dataUrl, String mediaType) throws Exception { AccessController.checkPermission(new AllPermission()); MGraph graph = null; URLConnection connection = dataUrl.openConnection(); connection.addRequestProperty("Accept", "application/rdf+xml; q=.9, text/turte;q=1"); // create a temporary graph to store the data SimpleMGraph tempGraph = new SimpleMGraph(); InputStream data = connection.getInputStream(); if (data != null) { if (mediaType.equals("application/x-www-form-urlencoded")) { mediaType = getContentTypeFromUrl(dataUrl); } parser.parse(tempGraph, data, mediaType); // add the triples of the temporary graph into the graph selected by the user if (graphExists(graphRef)) { graph = tcManager.getMGraph(graphRef); graph.addAll(tempGraph); } } return graph; } /** * Removes all the triples from the graph * */ private String emptyGraph(UriRef graphRef) { // removes all the triples from the graph MGraph graph = tcManager.getMGraph(graphRef); graph.clear(); return "Graph " + graphRef.getUnicodeString() + " is now empty."; } /** * Deletes a graph, the reference to it in the DLC graph and deletes all the * derived graphs linked to it by the dcterms:source property. * * @param graphRef * @return */ private String deleteGraph(UriRef graphRef) { tcManager.deleteTripleCollection(graphRef); GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph()); //remove the relation with the data lifecycle graph and all the information (triples) about the deleted graph (label). dlcGraphNode.deleteProperty(DCTERMS.hasPart, graphRef); return "Graph " + graphRef.getUnicodeString() + " has been deleted."; } /** * Reconciles a source graph with a target graph. The result of the reconciliation is an equivalence link set * stored in the interlink graph of the pipe. * @param sourceGraphRef the URI of the referenced graph, ie. the graph for which the reconciliation should be performed. * @param targetGraphRef the URI of the target graph. If null the target graph is the same as the source graph. * @return * @throws Exception */ private String reconcile(UriRef pipeRef, UriRef targetGraphRef) throws Exception { String message = ""; UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); if (graphExists(sourceGraphRef)) { //if target graph is not provided the reconciliation will be done against the source graph itself if(targetGraphRef == null){ targetGraphRef = sourceGraphRef; } // reconcile the source graph with the target graph UriRef interlinkGraphRef = reconcileCommand(pipeRef, sourceGraphRef, targetGraphRef); TripleCollection interlinkGraph = tcManager.getMGraph(interlinkGraphRef); if (interlinkGraph.size() > 0) { message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n" + interlinkGraph.size() + " owl:sameAs statements have been created and stored in " + interlinkGraphRef.getUnicodeString(); } else { message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n" + "No equivalent entities have been found."; } } else { message = "The source graph does not exist."; } log.info(message); return message; } private UriRef reconcileCommand(UriRef pipeRef, UriRef sourceGraphRef, UriRef targetGraphRef) throws Exception { TripleCollection owlSameAs = null; // get the pipe's interlink graph to store the result of the reconciliation task UriRef interlinkGraphRef = new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX); if (graphExists(sourceGraphRef)) { TripleCollection sourceGrah = tcManager.getMGraph(sourceGraphRef); // reconcile the source graph with the target graph owlSameAs = interlinker.interlink(sourceGrah, targetGraphRef); if (owlSameAs.size() > 0) { LockableMGraph sameAsGraph = tcManager.getMGraph(interlinkGraphRef); sameAsGraph.addAll(owlSameAs); // log the result (the equivalence set should be serialized and stored) Lock l = sameAsGraph.getLock().readLock(); l.lock(); try { Iterator<Triple> isameas = owlSameAs.iterator(); while (isameas.hasNext()) { Triple t = isameas.next(); NonLiteral s = t.getSubject(); UriRef p = t.getPredicate(); Resource o = t.getObject(); log.info(s.toString() + p.getUnicodeString() + o.toString() + " .\n"); } } finally { l.unlock(); } // add a reference of the equivalence set to the source graph getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidSubjectsTarget, sourceGraphRef)); // add a reference of the equivalence set to the target graph getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidObjectsTarget, targetGraphRef)); } } return interlinkGraphRef; } /** * Smush the source graph using the interlinking graph. More precisely collates data coming * from different equivalent resources in a single one chosen among them. The triples in the * source graph are copied in the smush graph that is then smushed using the interlinking * graph. * @param graphToSmushRef * @return */ private String smush(UriRef pipeRef) { String message = "Smushing task.\n"; UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); if(getInterlinkGraph().size() > 0) { LockableMGraph smushedGraph = smushCommand(sourceGraphRef, getInterlinkGraph()); message = "Smushing of " + sourceGraphRef.getUnicodeString() + " with linkset completed. " + "Smushed graph size = " + smushedGraph.size() + "\n"; } else { message = "No equivalence links available for " + sourceGraphRef.getUnicodeString() + "\n" + "Start a reconciliation task before smushing."; } return message; } private LockableMGraph smushCommand(UriRef sourceGraphRef, LockableMGraph equivalenceSet) { if(getSmushGraph().size() > 0) { getSmushGraph().clear(); } // add triples from source graph to smush graph getSmushGraph().addAll(getSourceGraph()); SimpleMGraph tempEquivalenceSet = new SimpleMGraph(); tempEquivalenceSet.addAll(equivalenceSet); // smush and canonicalize uris IriSmusher smusher = new CanonicalizingSameAsSmusher(); smusher.smush(getSmushGraph(), tempEquivalenceSet, true); //serializer.serialize(System.out, getSmushGraph(), SupportedFormat.RDF_XML); return getSmushGraph(); } private String extractText(UriRef pipeRef, String rdfdigester) { String message = ""; if(PATENT_RDFDIGESTER.equals(rdfdigester)){ message = extractTextFromPatent(pipeRef); } else if (PUBMED_RDFDIGESTER.equals(rdfdigester)) { message = extractTextFromPubMed(pipeRef); } return message; } /** * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords * will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime * the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities * and a rdfs:label if available, are added to the patent resource using dcterms:subject property. * @param pipeRef * @return */ private String extractTextFromPatent(UriRef pipeRef){ String message = "Extract text from patents and adding a sioc:content property.\n"; UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); MGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); enhanceGraph.addAll(sourceGraph); patentDigester.extractText(enhanceGraph); message += "Extracted text from " + enhanceGraphRef.getUnicodeString(); return message; } /** * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords * will be related to a PubMed article (resource of type bibo:Document) so that the article will be retrieved any time * the keywords are searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities * and a rdfs:label if available, are added to the article resource using dcterms:subject property. * @param pipeRef * @return */ private String extractTextFromPubMed(UriRef pipeRef){ String message = "Extract text from PubMed articles and adding a sioc:content property.\n"; UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); MGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); enhanceGraph.addAll(sourceGraph); pubmedDigester.extractText(enhanceGraph); message += "Extracted text from " + enhanceGraphRef.getUnicodeString(); return message; } /** * Moves data from smush.grah to content.graph. The triples (facts) in the two graphs must be coherent, i.e. the same. * Before publishing the current smushed data must be compared with the last published data. New triples * in the smushed graph not in the published graph must be added while triples in the published graph absent * in the smushed graph must be removed. The algorithm is as follows * 1) find triples in smush.graph not in publish.graph (new triples) * 2) find triples in publish.graph not in smush.graph (old triples) * 3) add new triples to content.graph * 4) remove old triples from content.graph * 5) delete all triples in publish.graph * 6) copy triples from smush.graph to publish.graph */ private String publishData(UriRef pipeRef) { String message = ""; // add these triples to the content.graph SimpleMGraph triplesToAdd = new SimpleMGraph(); // remove these triples from the content.graph SimpleMGraph triplesToRemove = new SimpleMGraph(); // triples to add to the content.graph Lock ls = getSmushGraph().getLock().readLock(); ls.lock(); try { Iterator<Triple> ismush = getSmushGraph().iterator(); while (ismush.hasNext()) { Triple smushTriple = ismush.next(); if( ! getPublishGraph().contains(smushTriple) ) { triplesToAdd.add(smushTriple); } } } finally { ls.unlock(); } // triples to remove from the content.graph Lock lp = getPublishGraph().getLock().readLock(); lp.lock(); try { Iterator<Triple> ipublish = getPublishGraph().iterator(); while (ipublish.hasNext()) { Triple publishTriple = ipublish.next(); if( ! getSmushGraph().contains(publishTriple) ) { triplesToRemove.add(publishTriple); } } } finally { lp.unlock(); } getContentGraph().removeAll(triplesToRemove); getContentGraph().addAll(triplesToAdd); getPublishGraph().clear(); getPublishGraph().addAll(getSmushGraph()); message = "Copied " + getPublishGraph().size() + " triples from " + pipeRef.getUnicodeString() + " to content-graph"; return message; } /** * Validate URL * A valid URL must start with file:/// or http:// */ private boolean isValidUrl(URL url) { boolean isValidUrl = false; if(url != null) { if( url.toString().startsWith("http://") || url.toString().startsWith("file:/")) { isValidUrl = true; } } return isValidUrl; } /** * Extracts the content type from the file extension * * @param url * @return */ private String getContentTypeFromUrl(URL url) { String contentType = null; if (url.getFile().endsWith("ttl")) { contentType = "text/turtle"; } else if (url.getFile().endsWith("nt")) { contentType = "text/turtle"; } else { contentType = "application/rdf+xml"; } return contentType; } /** * Returns the data life cycle graph containing all the monitored graphs. It * creates it if doesn't exit yet. * * @return */ private LockableMGraph getDlcGraph() { return tcManager.getMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE); } /** * Checks if a graph exists and returns a boolean value. * * @param graph_ref * @return */ private boolean graphExists(UriRef graph_ref) { Set<UriRef> graphs = tcManager.listMGraphs(); Iterator<UriRef> igraphs = graphs.iterator(); while (igraphs.hasNext()) { UriRef graphRef = igraphs.next(); if (graph_ref.toString().equals(graphRef.toString())) { return true; } } return false; } /** * Checks whether a pipe exists */ private boolean pipeExists(UriRef pipeRef) { boolean result = false; if (pipeRef != null) { GraphNode pipeNode = new GraphNode(pipeRef, getDlcGraph()); if(pipeNode != null) { result = true; } } return result; } /** * Creates the data lifecycle graph. Must be called at the bundle * activation if the graph doesn't exists yet. */ private MGraph createDlcGraph() { MGraph dlcGraph = tcManager.createMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE); TcAccessController tca = new TcAccessController(tcManager); tca.setRequiredReadPermissions(DATA_LIFECYCLE_GRAPH_REFERENCE, Collections.singleton((Permission) new TcPermission( "urn:x-localinstance:/content.graph", "read"))); return dlcGraph; } /** * Generates a new http URI that will be used as the canonical one in place * of a set of equivalent non-http URIs. An owl:sameAs statement is added to * the interlinking graph stating that the canonical http URI is equivalent * to one of the non-http URI in the set of equivalent URIs. * @param uriRefs * @return */ private UriRef generateNewHttpUri(Set<UriRef> uriRefs) { UriRef bestNonHttp = chooseBest(uriRefs); String nonHttpString = bestNonHttp.getUnicodeString(); if (!nonHttpString.startsWith("urn:x-temp:")) { throw new RuntimeException("Sorry we current assume all non-http " + "URIs to be canonicalized to be urn:x-temp"); } String httpUriString = nonHttpString.replaceFirst("urn:x-temp:", baseURI); UriRef httpUriRef = new UriRef(httpUriString); // add an owl:sameAs statement in the interlinking graph getInterlinkGraph().add(new TripleImpl(bestNonHttp, OWL.sameAs, httpUriRef)); return httpUriRef; } private UriRef chooseBest(Set<UriRef> httpUri) { Iterator<UriRef> iter = httpUri.iterator(); UriRef best = iter.next(); while (iter.hasNext()) { UriRef next = iter.next(); if (next.getUnicodeString().compareTo(best.getUnicodeString()) < 0) { best = next; } } return best; } /** * An inline class to canonicalize URI from urn to http scheme. A http URI is chosen * among the equivalent ones.if no one http URI is available a new one is created. */ private class CanonicalizingSameAsSmusher extends IriSmusher { @Override protected UriRef getPreferedIri(Set<UriRef> uriRefs) { Set<UriRef> httpUri = new HashSet<UriRef>(); for (UriRef uriRef : uriRefs) { if (uriRef.getUnicodeString().startsWith("http")) { httpUri.add(uriRef); } } if (httpUri.size() == 1) { return httpUri.iterator().next(); } // There is no http URI in the set of equivalent resource. The entity was unknown. // A new representation of the entity with http URI will be created. if (httpUri.size() == 0) { return generateNewHttpUri(uriRefs); } if (httpUri.size() > 1) { return chooseBest(httpUri); } throw new Error("Negative size set."); } } }
use enhancement.graph for smushing instead of rdf.graph
src/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java
use enhancement.graph for smushing instead of rdf.graph
<ide><path>rc/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java <ide> import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException; <ide> import org.apache.clerezza.rdf.core.access.LockableMGraph; <ide> import org.apache.clerezza.rdf.core.access.TcManager; <add>import org.apache.clerezza.rdf.core.access.TcProvider; <ide> import org.apache.clerezza.rdf.core.access.security.TcAccessController; <ide> import org.apache.clerezza.rdf.core.access.security.TcPermission; <ide> import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl; <ide> private final String PUBMED_RDFIZER = "pubmed"; <ide> private final String PATENT_RDFIZER = "patent"; <ide> <del> //TODO make this a component parameter <add> <ide> // URI for rewriting from urn scheme to http <ide> private String baseURI = "http://fusepool.info"; <ide> <ide> * entities extracted from the text by NLP engines in the default enhancement chain <ide> * 3) a graph to store the result of the interlinking task <ide> * 4) a graph to store the smushed graph <del> * 5) a graph to store the published graph i.e. the smushed graph in a coerent state with data in the content graph <add> * 5) a graph to store the published graph i.e. the smushed graph in a coherent state with data in the content graph <ide> * The name convention for these graphs is <ide> * GRAPH_URN_PREFIX + timestamp + SUFFIX <del> * where SUFFIX can be one of BUFFER_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX, <del> * INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX <add> * where SUFFIX can be one of SOURCE_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX, <add> * INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX, PUBLISH_GRAPH_URN_SUFFIX <ide> */ <ide> // base graph uri <ide> public static final String GRAPH_URN_PREFIX = "urn:x-localinstance:/dlc/"; <ide> @Activate <ide> protected void activate(ComponentContext context) { <ide> <del> log.info("The Data Life Cycle service is being activated"); <add> log.info("The Sourcing Admin Service is being activated"); <add> // Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets <ide> try { <del> if (interlinker != null) { <del> log.info("Silk interlinker service available"); <del> } else { <del> log.info("Silk interlinker service NOT available"); <del> } <del> // Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets <del> try { <del> createDlcGraph(); <del> log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle"); <del> } catch (EntityAlreadyExistsException ex) { <del> log.info("Data Lifecycle Graph already exists."); <del> } <del> <del> <add> createDlcGraph(); <add> log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle"); <ide> } catch (EntityAlreadyExistsException ex) { <del> log.debug("The graph for the request log already exists"); <del> } <del> <add> log.info("Data Lifecycle Graph already exists."); <add> } <add> <ide> } <ide> <ide> @Deactivate <ide> protected void deactivate(ComponentContext context) { <del> log.info("The Data Life Cycle service is being deactivated"); <add> log.info("The Sourcing Admin Service is being deactivated"); <ide> } <ide> <ide> /** <ide> */ <ide> private boolean createPipe(String pipeLabel) { <ide> boolean graphExists = false; <del> String label = ""; <del> if (pipeLabel != null) { <del> label = pipeLabel; <del> } <ide> <ide> try { <ide> String timeStamp = String.valueOf(System.currentTimeMillis()); <ide> // create a pipe <ide> UriRef pipeRef = new UriRef(GRAPH_URN_PREFIX + timeStamp); <ide> getDlcGraph().add(new TripleImpl(pipeRef, RDF.type, Ontology.Pipe)); <add> if(pipeLabel != null & ! "".equals("")) { <add> getDlcGraph().add(new TripleImpl(pipeRef, RDFS.label, new PlainLiteralImpl(pipeLabel))); <add> } <ide> getDlcGraph().add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.pipe, pipeRef)); <add> <ide> // create tasks <ide> //rdf task <ide> UriRef rdfTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/rdf"); <ide> //dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef); <ide> getDlcGraph().add(new TripleImpl(rdfTaskRef, Ontology.deliverable, sourceGraphRef)); <ide> getDlcGraph().add(new TripleImpl(sourceGraphRef, RDF.type, Ontology.voidDataset)); <del> getDlcGraph().add(new TripleImpl(sourceGraphRef, RDFS.label, new PlainLiteralImpl(label))); <add> <ide> <ide> <ide> // create the graph to store text and enhancements <ide> } <ide> <ide> /** <del> * Reconciles a source graph with a target graph. The result of the reconciliation is an equivalence link set <del> * stored in the interlink graph of the pipe. <del> * @param sourceGraphRef the URI of the referenced graph, ie. the graph for which the reconciliation should be performed. <add> * Reconciles a graph with a target graph. The result of the reconciliation is an equivalence set <add> * stored in the interlink graph of the pipe. The graph used as source is the source rdf graph <add> * @param sourceGraphRef the URI of the referenced graph, i.e. the graph for which the reconciliation should be performed. <ide> * @param targetGraphRef the URI of the target graph. If null the target graph is the same as the source graph. <ide> * @return <ide> * @throws Exception <ide> } <ide> <ide> /** <del> * Smush the source graph using the interlinking graph. More precisely collates data coming <add> * Smush the enhanced graph using the interlinking graph. More precisely collates data coming <ide> * from different equivalent resources in a single one chosen among them. The triples in the <ide> * source graph are copied in the smush graph that is then smushed using the interlinking <ide> * graph. <ide> */ <ide> private String smush(UriRef pipeRef) { <ide> String message = "Smushing task.\n"; <del> <del> UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); <add> // As the smush.graph must be published it has to contain the sioc.content property and all the subject <add> // extracted during the extraction phase that are stored in the enhance.graph with all the triples from <add> // the rdf <add> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); <ide> <ide> if(getInterlinkGraph().size() > 0) { <ide> <del> LockableMGraph smushedGraph = smushCommand(sourceGraphRef, getInterlinkGraph()); <del> <del> message = "Smushing of " + sourceGraphRef.getUnicodeString() <del> + " with linkset completed. " <add> LockableMGraph smushedGraph = smushCommand(enhanceGraphRef, getInterlinkGraph()); <add> <add> message = "Smushing of " + enhanceGraphRef.getUnicodeString() <add> + " with equivalence set completed. " <ide> + "Smushed graph size = " + smushedGraph.size() + "\n"; <ide> } <ide> else { <del> message = "No equivalence links available for " + sourceGraphRef.getUnicodeString() + "\n" <add> message = "No equivalence links available for " + enhanceGraphRef.getUnicodeString() + "\n" <ide> + "Start a reconciliation task before smushing."; <ide> } <ide> <ide> return message; <ide> } <ide> <del> private LockableMGraph smushCommand(UriRef sourceGraphRef, LockableMGraph equivalenceSet) { <add> private LockableMGraph smushCommand(UriRef enhanceGraphRef, LockableMGraph equivalenceSet) { <ide> <ide> if(getSmushGraph().size() > 0) { <ide> getSmushGraph().clear(); <ide> } <ide> <ide> // add triples from source graph to smush graph <del> getSmushGraph().addAll(getSourceGraph()); <add> getSmushGraph().addAll(getEnhanceGraph()); <ide> SimpleMGraph tempEquivalenceSet = new SimpleMGraph(); <ide> tempEquivalenceSet.addAll(equivalenceSet); <ide> <ide> * @return <ide> */ <ide> private String extractTextFromPatent(UriRef pipeRef){ <del> String message = "Extract text from patents and adding a sioc:content property.\n"; <add> String message = "Extracts text from patents and adds a sioc:content property.\n"; <ide> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); <ide> MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); <ide> UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); <del> MGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); <del> <del> enhanceGraph.addAll(sourceGraph); <add> LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); <add> <add> SimpleMGraph tempGraph = new SimpleMGraph(); <add> Lock rl = sourceGraph.getLock().readLock(); <add> rl.lock(); <add> try { <add> tempGraph.addAll(sourceGraph); <add> } <add> finally { <add> rl.unlock(); <add> } <add> <add> enhanceGraph.addAll(tempGraph); <ide> <ide> patentDigester.extractText(enhanceGraph); <ide> message += "Extracted text from " + enhanceGraphRef.getUnicodeString(); <ide> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX); <ide> MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef); <ide> UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX); <del> MGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); <del> <del> enhanceGraph.addAll(sourceGraph); <add> LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef); <add> <add> SimpleMGraph tempGraph = new SimpleMGraph(); <add> Lock rl = sourceGraph.getLock().readLock(); <add> rl.lock(); <add> try { <add> tempGraph.addAll(sourceGraph); <add> } <add> finally { <add> rl.unlock(); <add> } <add> <add> enhanceGraph.addAll(tempGraph); <ide> <ide> pubmedDigester.extractText(enhanceGraph); <ide> message += "Extracted text from " + enhanceGraphRef.getUnicodeString();
Java
apache-2.0
a8651a35610e5a330093f276c52bad3b2aac14fd
0
wyona/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.Topic; import org.wyona.yanel.core.api.attributes.CreatableV2; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.util.PathUtil; import org.wyona.yarep.core.Node; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.wyona.yarep.core.Revision; import org.wyona.yarep.util.RepoPath; import javax.servlet.http.HttpServletRequest; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.util.Date; import org.apache.log4j.Category; /** * Generic Node Resource */ public class NodeResource extends Resource implements ViewableV2, ModifiableV2, VersionableV2, IntrospectableV1 { //public class NodeResource extends Resource implements ViewableV2, ModifiableV2, VersionableV2, CreatableV2 { private static Category log = Category.getInstance(NodeResource.class); /** * */ public NodeResource() { } /** * */ public ViewDescriptor[] getViewDescriptors() { return null; } public View getView(String viewId, String revisionName) throws Exception { View defaultView = new View(); defaultView.setInputStream(getRealm().getRepository().getNode(getPath()) .getRevision(revisionName).getInputStream()); defaultView.setMimeType(getMimeType(viewId)); return defaultView; } /** * */ public View getView(String viewId) throws Exception { View defaultView = new View(); defaultView.setInputStream(getRealm().getRepository().getNode(getPath()).getInputStream()); defaultView.setMimeType(getMimeType(viewId)); return defaultView; } /** * */ public String getMimeType(String viewId) throws Exception { String mimeType = null; ResourceConfiguration resConfig = getConfiguration(); if (resConfig != null) { mimeType = resConfig.getProperty("mime-type"); } else { log.warn("Depracted: " + getPath()); mimeType = getRTI().getProperty("mime-type"); } if (mimeType != null) return mimeType; // TODO: Load config mime.types ... String suffix = PathUtil.getSuffix(getPath()); if (suffix != null) { log.debug("SUFFIX: " + suffix); if (suffix.equals("html")) { mimeType = "text/html"; } else if (suffix.equals("htm")) { mimeType = "text/html"; } else if (suffix.equals("xhtml")) { mimeType = "application/xhtml+xml"; } else if (suffix.equals("xml")) { mimeType = "application/xml"; } else if (suffix.equals("css")) { mimeType = "text/css"; } else if (suffix.equals("js")) { mimeType = "application/x-javascript"; } else if (suffix.equals("png")) { mimeType = "image/png"; } else if (suffix.equals("jpg")) { mimeType = "image/jpeg"; } else if (suffix.equals("gif")) { mimeType = "image/gif"; } else if (suffix.equals("pdf")) { mimeType = "application/pdf"; } else if (suffix.equals("doc")) { mimeType = "application/msword"; } else if (suffix.equals("odt")) { mimeType = "application/vnd.oasis.opendocument.text"; } else if (suffix.equals("sxc")) { mimeType = "application/vnd.sun.xml.calc"; } else if (suffix.equals("xpi")) { mimeType = "application/x-xpinstall"; } else if (suffix.equals("zip")) { mimeType = "application/zip"; } else if (suffix.equals("txt")) { mimeType = "text/plain"; } else if (suffix.equals("mov")) { mimeType = "video/quicktime"; } else if (suffix.equals("ico")) { mimeType = "image/x-icon"; } else { log.warn("Could not determine mime-type from suffix (suffix: " + suffix + ")!"); mimeType = "application/octet-stream"; } } else { log.warn("mime-type will be set to application/octet-stream, because no suffix for " + getPath()); mimeType = "application/octet-stream"; } return mimeType; } /** * */ public Reader getReader() throws Exception { return new InputStreamReader(getInputStream(), "UTF-8"); } /** * */ public InputStream getInputStream() throws Exception { return getRealm().getRepository().getNode(getPath()).getInputStream(); } /** * */ public Writer getWriter() throws Exception { log.error("Not implemented yet!"); return null; } /** * */ public OutputStream getOutputStream() throws Exception { return getRealm().getRepository().getNode(getPath()).getOutputStream(); } /** * */ public void write(InputStream in) throws Exception { log.warn("Not implemented yet!"); } /** * */ public long getLastModified() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); long lastModified; if (node.isResource()) { lastModified = node.getLastModified(); } else { lastModified = 0; } return lastModified; } /** * Delete data of node resource */ public boolean delete() throws Exception { getRealm().getRepository().getNode(getPath()).delete(); return true; } /** * */ public RevisionInformation[] getRevisions() throws Exception { Revision[] revisions = getRealm().getRepository().getNode(getPath()).getRevisions(); RevisionInformation[] revisionInfos = new RevisionInformation[revisions.length]; for (int i=0; i<revisions.length; i++) { revisionInfos[i] = new RevisionInformation(revisions[i]); } return revisionInfos; } public void checkin(String comment) throws Exception { Node node = getRealm().getRepository().getNode(getPath()); node.checkin(comment); /* if (node.isCheckedOut()) { String checkoutUserID = node.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { node.checkin(); } else { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } */ } public void checkout(String userID) throws Exception { Node node = getRealm().getRepository().getNode(getPath()); node.checkout(userID); /* if (node.isCheckedOut()) { String checkoutUserID = node.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + getPath() + " is already checked out by this user: " + checkoutUserID); } else { throw new Exception("Resource is already checked out by another user: " + checkoutUserID); } } else { node.checkout(userID); } */ } public void restore(String revisionName) throws Exception { getRealm().getRepository().getNode(getPath()).restore(revisionName); } public Date getCheckoutDate() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); // return node.getCheckoutDate(); return null; } public String getCheckoutUserID() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); return node.getCheckoutUserID(); } public boolean isCheckedOut() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); return node.isCheckedOut(); } public boolean exists() throws Exception { return getRealm().getRepository().existsNode(getPath()); } /** * */ public long getSize() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); long size; if (node.isResource()) { size = node.getSize(); } else { size = 0; } return size; } /** * */ public Object getProperty(String name) { log.warn("No implemented yet!"); return null; } /** * */ public String[] getPropertyNames() { String[] props = {"data"}; return props; } /** * */ public void setProperty(String name, Object value) { log.warn("No implemented yet!"); } /** * */ public void create(HttpServletRequest request) { log.warn("No implemented yet!"); } /** * */ public java.util.HashMap createRTIProperties(HttpServletRequest request) { log.warn("No implemented yet!"); return null; } /** * */ public String getPropertyType(String name) { return CreatableV2.TYPE_UPLOAD; } public String getIntrospection() throws Exception { String name = PathUtil.getName(getPath()); StringBuffer buf = new StringBuffer(); buf.append("<?xml version=\"1.0\"?>"); buf.append("<introspection xmlns=\"http://www.wyona.org/neutron/1.0\">"); buf.append("<edit mime-type=\"" + this.getMimeType(null) + "\" name=\"" + name + "\">"); buf.append("<checkout url=\"?yanel.resource.usecase=checkout\" method=\"GET\"/>"); buf.append("<checkin url=\"?yanel.resource.usecase=checkin\" method=\"PUT\"/>"); buf.append("</edit>"); buf.append("</introspection>"); return buf.toString(); } }
src/resources/file/src/java/org/wyona/yanel/impl/resources/NodeResource.java
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.impl.resources; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceConfiguration; import org.wyona.yanel.core.Topic; import org.wyona.yanel.core.api.attributes.CreatableV2; import org.wyona.yanel.core.api.attributes.IntrospectableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.VersionableV2; import org.wyona.yanel.core.api.attributes.ViewableV2; import org.wyona.yanel.core.attributes.versionable.RevisionInformation; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.attributes.viewable.ViewDescriptor; import org.wyona.yanel.core.util.PathUtil; import org.wyona.yarep.core.Node; import org.wyona.yarep.core.Repository; import org.wyona.yarep.core.RepositoryFactory; import org.wyona.yarep.core.Revision; import org.wyona.yarep.util.RepoPath; import javax.servlet.http.HttpServletRequest; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.util.Date; import org.apache.log4j.Category; /** * Generic Node Resource */ public class NodeResource extends Resource implements ViewableV2, ModifiableV2, VersionableV2, IntrospectableV1 { //public class NodeResource extends Resource implements ViewableV2, ModifiableV2, VersionableV2, CreatableV2 { private static Category log = Category.getInstance(NodeResource.class); /** * */ public NodeResource() { } /** * */ public ViewDescriptor[] getViewDescriptors() { return null; } public View getView(String viewId, String revisionName) throws Exception { View defaultView = new View(); defaultView.setInputStream(getRealm().getRepository().getNode(getPath()) .getRevision(revisionName).getInputStream()); defaultView.setMimeType(getMimeType(viewId)); return defaultView; } /** * */ public View getView(String viewId) throws Exception { View defaultView = new View(); defaultView.setInputStream(getRealm().getRepository().getNode(getPath()).getInputStream()); defaultView.setMimeType(getMimeType(viewId)); return defaultView; } /** * */ public String getMimeType(String viewId) throws Exception { String mimeType = null; ResourceConfiguration resConfig = getConfiguration(); if (resConfig != null) { mimeType = resConfig.getProperty("mime-type"); } else { log.warn("Depracted: " + getPath()); mimeType = getRTI().getProperty("mime-type"); } if (mimeType != null) return mimeType; // TODO: Load config mime.types ... String suffix = PathUtil.getSuffix(getPath()); if (suffix != null) { log.debug("SUFFIX: " + suffix); if (suffix.equals("html")) { mimeType = "text/html"; } else if (suffix.equals("htm")) { mimeType = "text/html"; } else if (suffix.equals("xhtml")) { mimeType = "application/xhtml+xml"; } else if (suffix.equals("xml")) { mimeType = "application/xml"; } else if (suffix.equals("css")) { mimeType = "text/css"; } else if (suffix.equals("js")) { mimeType = "application/x-javascript"; } else if (suffix.equals("png")) { mimeType = "image/png"; } else if (suffix.equals("jpg")) { mimeType = "image/jpeg"; } else if (suffix.equals("gif")) { mimeType = "image/gif"; } else if (suffix.equals("pdf")) { mimeType = "application/pdf"; } else if (suffix.equals("doc")) { mimeType = "application/msword"; } else if (suffix.equals("odt")) { mimeType = "application/vnd.oasis.opendocument.text"; } else if (suffix.equals("sxc")) { mimeType = "application/vnd.sun.xml.calc"; } else if (suffix.equals("xpi")) { mimeType = "application/x-xpinstall"; } else if (suffix.equals("zip")) { mimeType = "application/zip"; } else if (suffix.equals("txt")) { mimeType = "text/plain"; } else if (suffix.equals("mov")) { mimeType = "video/quicktime"; } else if (suffix.equals("ico")) { mimeType = "image/x-icon"; } else { log.warn("Could not determine mime-type from suffix (suffix: " + suffix + ")!"); mimeType = "application/octet-stream"; } } else { log.warn("mime-type will be set to application/octet-stream, because no suffix for " + getPath()); mimeType = "application/octet-stream"; } return mimeType; } /** * */ public Reader getReader() throws Exception { return new InputStreamReader(getInputStream(), "UTF-8"); } /** * */ public InputStream getInputStream() throws Exception { return getRealm().getRepository().getNode(getPath()).getInputStream(); } /** * */ public Writer getWriter() throws Exception { log.error("Not implemented yet!"); return null; } /** * */ public OutputStream getOutputStream() throws Exception { return getRealm().getRepository().getNode(getPath()).getOutputStream(); } /** * */ public void write(InputStream in) throws Exception { log.warn("Not implemented yet!"); } /** * */ public long getLastModified() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); long lastModified; if (node.isResource()) { lastModified = node.getLastModified(); } else { lastModified = 0; } return lastModified; } /** * Delete data of node resource */ public boolean delete() throws Exception { getRealm().getRepository().getNode(getPath()).delete(); return true; } /** * */ public RevisionInformation[] getRevisions() throws Exception { Revision[] revisions = getRealm().getRepository().getNode(getPath()).getRevisions(); RevisionInformation[] revisionInfos = new RevisionInformation[revisions.length]; for (int i=0; i<revisions.length; i++) { revisionInfos[i] = new RevisionInformation(revisions[i]); } return revisionInfos; } public void checkin(String comment) throws Exception { Node node = getRealm().getRepository().getNode(getPath()); node.checkin(comment); /* if (node.isCheckedOut()) { String checkoutUserID = node.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { node.checkin(); } else { throw new Exception("Resource is checked out by another user: " + checkoutUserID); } } else { throw new Exception("Resource is not checked out."); } */ } public void checkout(String userID) throws Exception { Node node = getRealm().getRepository().getNode(getPath()); node.checkout(userID); /* if (node.isCheckedOut()) { String checkoutUserID = node.getCheckoutUserID(); if (checkoutUserID.equals(userID)) { log.warn("Resource " + getPath() + " is already checked out by this user: " + checkoutUserID); } else { throw new Exception("Resource is already checked out by another user: " + checkoutUserID); } } else { node.checkout(userID); } */ } public void restore(String revisionName) throws Exception { getRealm().getRepository().getNode(getPath()).restore(revisionName); } public Date getCheckoutDate() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); // return node.getCheckoutDate(); return null; } public String getCheckoutUserID() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); return node.getCheckoutUserID(); } public boolean isCheckedOut() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); return node.isCheckedOut(); } public boolean exists() throws Exception { log.warn("Not implemented yet!"); return true; } /** * */ public long getSize() throws Exception { Node node = getRealm().getRepository().getNode(getPath()); long size; if (node.isResource()) { size = node.getSize(); } else { size = 0; } return size; } /** * */ public Object getProperty(String name) { log.warn("No implemented yet!"); return null; } /** * */ public String[] getPropertyNames() { String[] props = {"data"}; return props; } /** * */ public void setProperty(String name, Object value) { log.warn("No implemented yet!"); } /** * */ public void create(HttpServletRequest request) { log.warn("No implemented yet!"); } /** * */ public java.util.HashMap createRTIProperties(HttpServletRequest request) { log.warn("No implemented yet!"); return null; } /** * */ public String getPropertyType(String name) { return CreatableV2.TYPE_UPLOAD; } public String getIntrospection() throws Exception { String name = PathUtil.getName(getPath()); StringBuffer buf = new StringBuffer(); buf.append("<?xml version=\"1.0\"?>"); buf.append("<introspection xmlns=\"http://www.wyona.org/neutron/1.0\">"); buf.append("<edit mime-type=\"" + this.getMimeType(null) + "\" name=\"" + name + "\">"); buf.append("<checkout url=\"?yanel.resource.usecase=checkout\" method=\"GET\"/>"); buf.append("<checkin url=\"?yanel.resource.usecase=checkin\" method=\"PUT\"/>"); buf.append("</edit>"); buf.append("</introspection>"); return buf.toString(); } }
implement exists
src/resources/file/src/java/org/wyona/yanel/impl/resources/NodeResource.java
implement exists
<ide><path>rc/resources/file/src/java/org/wyona/yanel/impl/resources/NodeResource.java <ide> } <ide> <ide> public boolean exists() throws Exception { <del> log.warn("Not implemented yet!"); <del> return true; <add> return getRealm().getRepository().existsNode(getPath()); <ide> } <ide> <ide> /**
Java
bsd-2-clause
d60762172302af9baea0864590f3a801116f2829
0
alpha-asp/Alpha,alpha-asp/Alpha,AntoniusW/Alpha
/** * Copyright (c) 2016-2018, the Alpha Team. * All rights reserved. * * Additional changes made by Siemens. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1) Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package at.ac.tuwien.kr.alpha.solver; import at.ac.tuwien.kr.alpha.common.*; import at.ac.tuwien.kr.alpha.common.atoms.Atom; import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom; import at.ac.tuwien.kr.alpha.common.atoms.ComparisonAtom; import at.ac.tuwien.kr.alpha.common.atoms.Literal; import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm; import at.ac.tuwien.kr.alpha.grounder.Grounder; import at.ac.tuwien.kr.alpha.grounder.NonGroundRule; import at.ac.tuwien.kr.alpha.grounder.ProgramAnalyzingGrounder; import at.ac.tuwien.kr.alpha.grounder.Substitution; import at.ac.tuwien.kr.alpha.grounder.atoms.RuleAtom; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory.Heuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.ChainedBranchingHeuristics; import at.ac.tuwien.kr.alpha.solver.heuristics.NaiveHeuristic; import at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import static at.ac.tuwien.kr.alpha.Util.oops; import static at.ac.tuwien.kr.alpha.common.Literals.*; import static at.ac.tuwien.kr.alpha.solver.ThriceTruth.MBT; import static at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic.DEFAULT_CHOICE_LITERAL; import static at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner.ConflictAnalysisResult.UNSAT; /** * The new default solver employed in Alpha. * Copyright (c) 2016-2018, the Alpha Team. */ public class DefaultSolver extends AbstractSolver implements SolverMaintainingStatistics { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSolver.class); private final NoGoodStore store; private final ChoiceManager choiceManager; private final WritableAssignment assignment; private final GroundConflictNoGoodLearner learner; private final BranchingHeuristic branchingHeuristic; private boolean initialize = true; private int mbtAtFixpoint; private int conflictsAfterClosing; private final boolean disableJustifications; private boolean disableJustificationAfterClosing = true; // Keep disabled for now, case not fully worked out yet. public DefaultSolver(AtomStore atomStore, Grounder grounder, NoGoodStore store, WritableAssignment assignment, Random random, Heuristic branchingHeuristic, boolean debugInternalChecks, boolean disableJustifications) { super(atomStore, grounder); this.assignment = assignment; this.store = store; this.choiceManager = new ChoiceManager(assignment, store, debugInternalChecks); this.learner = new GroundConflictNoGoodLearner(assignment); this.branchingHeuristic = ChainedBranchingHeuristics.chainOf( BranchingHeuristicFactory.getInstance(branchingHeuristic, grounder, assignment, choiceManager, random), new NaiveHeuristic(choiceManager)); this.disableJustifications = disableJustifications; } @Override protected boolean tryAdvance(Consumer<? super AnswerSet> action) { boolean didChange = false; long timeOnEntry = System.currentTimeMillis(); long timeLast = timeOnEntry; int decisionsLast = 0; // Initially, get NoGoods from grounder. if (initialize) { Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } initialize = false; } else if (assignment.getDecisionLevel() == 0) { logStats(); return false; } else { // We already found one Answer-Set and are requested to find another one. // Create enumeration NoGood to avoid finding the same Answer-Set twice. final NoGood enumerationNoGood = choiceManager.computeEnumeration(); final int backjumpLevel = assignment.minimumConflictLevel(enumerationNoGood); if (backjumpLevel == -1) { throw oops("Enumeration nogood is not violated"); } if (backjumpLevel == 0) { // Search space exhausted (only happens if first choice is for TRUE at decision level 1 for an atom that was MBT at decision level 0 already). return false; } // Backjump instead of backtrackSlow, enumerationNoGood will invert last choice. choiceManager.backjump(backjumpLevel - 1); LOGGER.debug("Adding enumeration nogood: {}", enumerationNoGood); if (!addAndBackjumpIfNecessary(grounder.register(enumerationNoGood), enumerationNoGood)) { return false; } } boolean afterAllAtomsAssigned = false; // Try all assignments until grounder reports no more NoGoods and all of them are satisfied while (true) { long currentTime = System.currentTimeMillis(); int currentNumberOfChoices = getNumberOfChoices(); if (currentTime >= timeLast + 1000) { LOGGER.info("Decisions in {}s: {}", (currentTime - timeLast) / 1000.0f, currentNumberOfChoices - decisionsLast); timeLast = currentTime; decisionsLast = currentNumberOfChoices; float overallTime = (currentTime - timeOnEntry) / 1000.0f; float decisionsPerSec = currentNumberOfChoices / overallTime; LOGGER.info("Overall performance: {} decisions in {}s or {} decisions per sec. Overall replayed assignments: {}.", currentNumberOfChoices, overallTime, decisionsPerSec, ((TrailAssignment)assignment).replayCounter); } ConflictCause conflictCause = store.propagate(); didChange |= store.didPropagate(); LOGGER.trace("Assignment after propagation is: {}", assignment); if (conflictCause != null) { // Learn from conflict. NoGood violatedNoGood = conflictCause.getViolatedNoGood(); LOGGER.debug("Violating assignment is: {}", assignment); branchingHeuristic.violatedNoGood(violatedNoGood); if (!afterAllAtomsAssigned) { if (!learnBackjumpAddFromConflict(conflictCause)) { logStats(); return false; } } else { LOGGER.debug("Assignment is violated after all unassigned atoms have been assigned false."); conflictsAfterClosing++; if (!treatConflictAfterClosing(violatedNoGood)) { return false; } afterAllAtomsAssigned = false; } } else if (didChange) { // Ask the grounder for new NoGoods, then propagate (again). LOGGER.trace("Doing propagation step."); grounder.updateAssignment(assignment.getNewPositiveAssignmentsIterator()); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } } else if (choose()) { LOGGER.debug("Did choice."); didChange = true; } else if (close()) { LOGGER.debug("Closed unassigned known atoms (assigning FALSE)."); afterAllAtomsAssigned = true; } else if (assignment.getMBTCount() == 0) { // NOTE: If we would do optimization, we would now have a guaranteed upper bound. AnswerSet as = translate(assignment.getTrueAssignments()); LOGGER.debug("Answer-Set found: {}", as); action.accept(as); logStats(); return true; } else { LOGGER.debug("Backtracking from wrong choices ({} MBTs).", assignment.getMBTCount()); if (!justifyMbtAndBacktrack()) { return false; } afterAllAtomsAssigned = false; } } } /** * Adds a noGood to the store and in case of out-of-order literals causing another conflict, triggers further backjumping. * @param noGoodId * @param noGood */ private boolean addAndBackjumpIfNecessary(int noGoodId, NoGood noGood) { while (store.add(noGoodId, noGood) != null) { LOGGER.debug("Adding noGood (again) caused conflict, computing real backjumping level now."); int backjumpLevel = learner.computeConflictFreeBackjumpingLevel(noGood); if (backjumpLevel < 0) { return false; } choiceManager.backjump(backjumpLevel); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } } return true; } /** * Analyzes the conflict and either learns a new NoGood (causing backjumping and addition to the NoGood store), * or backtracks the choice causing the conflict. * @return false iff the analysis result shows that the set of NoGoods is unsatisfiable. */ private boolean learnBackjumpAddFromConflict(ConflictCause conflictCause) { GroundConflictNoGoodLearner.ConflictAnalysisResult analysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); LOGGER.debug("Analysis result: {}", analysisResult); if (analysisResult == UNSAT) { // Halt if unsatisfiable. return false; } branchingHeuristic.analyzedConflict(analysisResult); if (analysisResult.learnedNoGood == null && analysisResult.clearLastChoiceAfterBackjump) { // TODO: Temporarily abort resolution with backtrackFast instead of learning a too large nogood. return backtrack(); } if (analysisResult.learnedNoGood != null) { choiceManager.backjump(analysisResult.backjumpLevel); final NoGood learnedNoGood = analysisResult.learnedNoGood; int noGoodId = grounder.register(learnedNoGood); if (!addAndBackjumpIfNecessary(noGoodId, learnedNoGood)) { return false; } return true; } choiceManager.backjump(analysisResult.backjumpLevel); choiceManager.backtrackFast(); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } if (!store.didPropagate()) { throw oops("Nothing to propagate after backtracking from conflict-causing choice"); } return true; } private boolean justifyMbtAndBacktrack() { mbtAtFixpoint++; // Run justification only if enabled and possible. if (disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; // Justify one MBT assigned atom. Integer atomToJustify = assignment.getBasicAtomAssignedMBT(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for justification of {} / {}", atomToJustify, atomStore.atomToString(atomToJustify)); LOGGER.debug("Assignment is (TRUE part only): {}", translate(assignment.getTrueAssignments())); } Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomToJustify, assignment); NoGood noGood = noGoodFromJustificationReasons(atomToJustify, reasonsForUnjustified); int noGoodID = grounder.register(noGood); Map<Integer, NoGood> obtained = new LinkedHashMap<>(); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); // Add NoGood and trigger backjumping. if (!ingest(obtained)) { logStats(); return false; } return true; } private NoGood noGoodFromJustificationReasons(int atomToJustify, Set<Literal> reasonsForUnjustified) { // Turn the justification into a NoGood. int[] reasons = new int[reasonsForUnjustified.size() + 1]; reasons[0] = atomToLiteral(atomToJustify); int arrpos = 1; for (Literal literal : reasonsForUnjustified) { reasons[arrpos++] = atomToLiteral(atomStore.get(literal.getAtom()), !literal.isNegated()); } return new NoGood(reasons); } private boolean treatConflictAfterClosing(NoGood violatedNoGood) { if (disableJustificationAfterClosing || disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { // Will not learn from violated NoGood, do simple backtrack. LOGGER.debug("NoGood was violated after all unassigned atoms were assigned to false; will not learn from it; skipping."); if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; LOGGER.debug("Justifying atoms in violated nogood."); LinkedHashSet<Integer> toJustify = new LinkedHashSet<>(); // Find those literals in violatedNoGood that were just assigned false. for (Integer literal : violatedNoGood) { if (assignment.getImpliedBy(atomOf(literal)) == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { toJustify.add(literal); } } // Since the violatedNoGood may contain atoms other than BasicAtom, these have to be treated. Map<Integer, NoGood> obtained = new LinkedHashMap<>(); Iterator<Integer> toJustifyIterator = toJustify.iterator(); ArrayList<Integer> ruleAtomReplacements = new ArrayList<>(); while (toJustifyIterator.hasNext()) { Integer literal = toJustifyIterator.next(); Atom atom = atomStore.get(atomOf(literal)); if (atom instanceof BasicAtom) { continue; } if (!(atom instanceof RuleAtom)) { // Ignore atoms other than RuleAtom. toJustifyIterator.remove(); continue; } // For RuleAtoms in toJustify the corresponding ground body contains BasicAtoms that have been assigned FALSE in the closing. // First, translate RuleAtom back to NonGroundRule + Substitution. String ruleId = (String) ((ConstantTerm<?>)atom.getTerms().get(0)).getObject(); NonGroundRule nonGroundRule = analyzingGrounder.getNonGroundRule(Integer.parseInt(ruleId)); String substitution = (String) ((ConstantTerm<?>)atom.getTerms().get(1)).getObject(); Substitution groundingSubstitution = Substitution.fromString(substitution); Rule rule = nonGroundRule.getRule(); // Find ground literals in the body that have been assigned false and justify those. for (Literal bodyLiteral : rule.getBody()) { Atom groundAtom = bodyLiteral.getAtom().substitute(groundingSubstitution); if (groundAtom instanceof ComparisonAtom || analyzingGrounder.isFact(groundAtom)) { // Facts and ComparisonAtoms are always true, no justification needed. continue; } int groundAtomId = atomStore.get(groundAtom); Assignment.Entry entry = assignment.get(groundAtomId); // Check if atom was assigned to FALSE during the closing. if (entry.getImpliedBy() == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { ruleAtomReplacements.add(atomToNegatedLiteral(groundAtomId)); } } toJustifyIterator.remove(); } toJustify.addAll(ruleAtomReplacements); for (Integer literalToJustify : toJustify) { LOGGER.debug("Searching for justification(s) of {} / {}", toJustify, atomStore.atomToString(atomOf(literalToJustify))); Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomOf(literalToJustify), assignment); NoGood noGood = noGoodFromJustificationReasons(atomOf(literalToJustify), reasonsForUnjustified); int noGoodID = grounder.register(noGood); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); } // Backtrack to remove the violation. if (!backtrack()) { logStats(); return false; } // Add newly obtained noGoods. if (!ingest(obtained)) { logStats(); return false; } return true; } private boolean close() { return assignment.closeUnassignedAtoms(); } /** * Iterative implementation of recursive backtracking. * * @return {@code true} iff it is possible to backtrack even further, {@code false} otherwise */ private boolean backtrack() { while (assignment.getDecisionLevel() != 0) { final Assignment.Entry choice = choiceManager.backtrackSlow(); store.propagate(); if (choice == null) { LOGGER.debug("Backtracking further, because last choice was already backtracked."); continue; } final int lastChoice = choice.getAtom(); final boolean choiceValue = choice.getTruth().toBoolean(); // Chronological backtracking: choose inverse now. // Choose FALSE if the previous choice was for TRUE and the atom was not already MBT at that time. ThriceTruth lastChoiceTruth = assignment.getTruth(lastChoice); if (choiceValue && MBT.equals(lastChoiceTruth)) { LOGGER.debug("Backtracking further, because last choice was MBT before choosing TRUE."); continue; } // If choice was assigned at lower decision level (due to added NoGoods), no inverted choice should be done. if (choice.getImpliedBy() != null) { LOGGER.debug("Last choice is now implied by {}", choice.getImpliedBy()); //if (choice.getDecisionLevel() == assignment.getDecisionLevel() + 1) { // throw oops("Choice was assigned but not at a lower decision level"); //} LOGGER.debug("Backtracking further, because last choice was assigned at a lower decision level."); continue; } // Choose inverse if it is not yet already assigned TRUE or FALSE. if (lastChoiceTruth == null || (lastChoiceTruth.isMBT() && !choiceValue)) { LOGGER.debug("Choosing inverse."); choiceManager.choose(new Choice(lastChoice, !choiceValue, true)); break; } // Continue backtracking. } return assignment.getDecisionLevel() != 0; } private boolean ingest(Map<Integer, NoGood> obtained) { branchingHeuristic.newNoGoods(obtained.values()); assignment.growForMaxAtomId(); store.growForMaxAtomId(atomStore.getMaxAtomId()); LinkedList<Map.Entry<Integer, NoGood>> noGoodsToAdd = new LinkedList<>(obtained.entrySet()); Map.Entry<Integer, NoGood> entry; while ((entry = noGoodsToAdd.poll()) != null) { if (NoGood.UNSAT.equals(entry.getValue())) { // Empty NoGood cannot be satisfied, program is unsatisfiable. return false; } final ConflictCause conflictCause = store.add(entry.getKey(), entry.getValue()); if (conflictCause == null) { // There is no conflict, all is fine. Just skip conflict treatment and carry on. continue; } final NoGood learnedNoGood = fixContradiction(entry, conflictCause); if (learnedNoGood != null) { noGoodsToAdd.addFirst(new AbstractMap.SimpleEntry<>(grounder.register(learnedNoGood), learnedNoGood)); } } return true; } private NoGood fixContradiction(Map.Entry<Integer, NoGood> noGoodEntry, ConflictCause conflictCause) { LOGGER.debug("Attempting to fix violation of {} caused by {}", noGoodEntry.getValue(), conflictCause); if (conflictCause.getViolatedChoice() != null) { choiceManager.backjump(conflictCause.getViolatedChoice().getDecisionLevel()); choiceManager.backtrackFast(); return null; } GroundConflictNoGoodLearner.ConflictAnalysisResult conflictAnalysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); if (conflictAnalysisResult == UNSAT) { return NoGood.UNSAT; } branchingHeuristic.analyzedConflict(conflictAnalysisResult); choiceManager.backjump(conflictAnalysisResult.backjumpLevel); if (conflictAnalysisResult.clearLastChoiceAfterBackjump) { choiceManager.backtrackFast(); } // If NoGood was learned, add it to the store. // Note that the learned NoGood may cause further conflicts, since propagation on lower decision levels is lazy, // hence backtracking once might not be enough to remove the real conflict cause. if (!addAndBackjumpIfNecessary(noGoodEntry.getKey(), noGoodEntry.getValue())) { return NoGood.UNSAT; } return conflictAnalysisResult.learnedNoGood; } private boolean choose() { choiceManager.addChoiceInformation(grounder.getChoiceAtoms()); choiceManager.updateAssignments(); // Hint: for custom heuristics, evaluate them here and pick a value if the heuristics suggests one. int literal; if ((literal = branchingHeuristic.chooseLiteral()) == DEFAULT_CHOICE_LITERAL) { LOGGER.debug("No choices!"); return false; } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Branching heuristic chose literal {}", atomStore.literalToString(literal)); } choiceManager.choose(new Choice(literal, false)); return true; } @Override public int getNumberOfChoices() { return choiceManager.getChoices(); } @Override public int getNumberOfBacktracks() { return choiceManager.getBacktracks(); } @Override public int getNumberOfBacktracksWithinBackjumps() { return choiceManager.getBacktracksWithinBackjumps(); } @Override public int getNumberOfBackjumps() { return choiceManager.getBackjumps(); } @Override public int getNumberOfBacktracksDueToRemnantMBTs() { return mbtAtFixpoint; } @Override public int getNumberOfConflictsAfterClosing() { return conflictsAfterClosing; } private void logStats() { LOGGER.debug(getStatisticsString()); } }
src/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java
/** * Copyright (c) 2016-2018, the Alpha Team. * All rights reserved. * * Additional changes made by Siemens. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1) Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package at.ac.tuwien.kr.alpha.solver; import at.ac.tuwien.kr.alpha.common.*; import at.ac.tuwien.kr.alpha.common.atoms.Atom; import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom; import at.ac.tuwien.kr.alpha.common.atoms.ComparisonAtom; import at.ac.tuwien.kr.alpha.common.atoms.Literal; import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm; import at.ac.tuwien.kr.alpha.grounder.Grounder; import at.ac.tuwien.kr.alpha.grounder.NonGroundRule; import at.ac.tuwien.kr.alpha.grounder.ProgramAnalyzingGrounder; import at.ac.tuwien.kr.alpha.grounder.Substitution; import at.ac.tuwien.kr.alpha.grounder.atoms.RuleAtom; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory.Heuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.ChainedBranchingHeuristics; import at.ac.tuwien.kr.alpha.solver.heuristics.NaiveHeuristic; import at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import static at.ac.tuwien.kr.alpha.Util.oops; import static at.ac.tuwien.kr.alpha.common.Literals.*; import static at.ac.tuwien.kr.alpha.solver.ThriceTruth.MBT; import static at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic.DEFAULT_CHOICE_LITERAL; import static at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner.ConflictAnalysisResult.UNSAT; /** * The new default solver employed in Alpha. * Copyright (c) 2016-2018, the Alpha Team. */ public class DefaultSolver extends AbstractSolver implements SolverMaintainingStatistics { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSolver.class); private final NoGoodStore store; private final ChoiceManager choiceManager; private final WritableAssignment assignment; private final GroundConflictNoGoodLearner learner; private final BranchingHeuristic branchingHeuristic; private boolean initialize = true; private int mbtAtFixpoint; private int conflictsAfterClosing; private final boolean disableJustifications; private boolean disableJustificationAfterClosing = true; // Keep disabled for now, case not fully worked out yet. public DefaultSolver(AtomStore atomStore, Grounder grounder, NoGoodStore store, WritableAssignment assignment, Random random, Heuristic branchingHeuristic, boolean debugInternalChecks, boolean disableJustifications) { super(atomStore, grounder); this.assignment = assignment; this.store = store; this.choiceManager = new ChoiceManager(assignment, store, debugInternalChecks); this.learner = new GroundConflictNoGoodLearner(assignment); this.branchingHeuristic = ChainedBranchingHeuristics.chainOf( BranchingHeuristicFactory.getInstance(branchingHeuristic, grounder, assignment, choiceManager, random), new NaiveHeuristic(choiceManager)); this.disableJustifications = disableJustifications; } @Override protected boolean tryAdvance(Consumer<? super AnswerSet> action) { boolean didChange = false; long timeOnEntry = System.currentTimeMillis(); long timeLast = timeOnEntry; int decisionsLast = 0; // Initially, get NoGoods from grounder. if (initialize) { Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } initialize = false; } else if (assignment.getDecisionLevel() == 0) { logStats(); return false; } else { // We already found one Answer-Set and are requested to find another one. // Create enumeration NoGood to avoid finding the same Answer-Set twice. final NoGood enumerationNoGood = choiceManager.computeEnumeration(); final int backjumpLevel = assignment.minimumConflictLevel(enumerationNoGood); if (backjumpLevel == -1) { throw oops("Enumeration nogood is not violated"); } if (backjumpLevel == 0) { // Search space exhausted (only happens if first choice is for TRUE at decision level 1 for an atom that was MBT at decision level 0 already). return false; } // Backjump instead of backtrackSlow, enumerationNoGood will invert last choice. choiceManager.backjump(backjumpLevel - 1); LOGGER.debug("Adding enumeration nogood: {}", enumerationNoGood); if (!addAndBackjumpIfNecessary(grounder.register(enumerationNoGood), enumerationNoGood)) { return false; } } boolean afterAllAtomsAssigned = false; // Try all assignments until grounder reports no more NoGoods and all of them are satisfied while (true) { long currentTime = System.currentTimeMillis(); int currentNumberOfChoices = getNumberOfChoices(); if (currentTime >= timeLast + 1000) { LOGGER.info("Decisions in {}s: {}", (currentTime - timeLast) / 1000.0f, currentNumberOfChoices - decisionsLast); timeLast = currentTime; decisionsLast = currentNumberOfChoices; float overallTime = (currentTime - timeOnEntry) / 1000.0f; float decisionsPerSec = currentNumberOfChoices / overallTime; LOGGER.info("Overall performance: {} decision in {}s or {} decisions per sec. Overall replayed assignments: {}.", currentNumberOfChoices, currentTime - timeOnEntry, decisionsPerSec, ((TrailAssignment)assignment).replayCounter); } ConflictCause conflictCause = store.propagate(); didChange |= store.didPropagate(); LOGGER.trace("Assignment after propagation is: {}", assignment); if (conflictCause != null) { // Learn from conflict. NoGood violatedNoGood = conflictCause.getViolatedNoGood(); LOGGER.debug("Violating assignment is: {}", assignment); branchingHeuristic.violatedNoGood(violatedNoGood); if (!afterAllAtomsAssigned) { if (!learnBackjumpAddFromConflict(conflictCause)) { logStats(); return false; } } else { LOGGER.debug("Assignment is violated after all unassigned atoms have been assigned false."); conflictsAfterClosing++; if (!treatConflictAfterClosing(violatedNoGood)) { return false; } afterAllAtomsAssigned = false; } } else if (didChange) { // Ask the grounder for new NoGoods, then propagate (again). LOGGER.trace("Doing propagation step."); grounder.updateAssignment(assignment.getNewPositiveAssignmentsIterator()); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } } else if (choose()) { LOGGER.debug("Did choice."); didChange = true; } else if (close()) { LOGGER.debug("Closed unassigned known atoms (assigning FALSE)."); afterAllAtomsAssigned = true; } else if (assignment.getMBTCount() == 0) { // NOTE: If we would do optimization, we would now have a guaranteed upper bound. AnswerSet as = translate(assignment.getTrueAssignments()); LOGGER.debug("Answer-Set found: {}", as); action.accept(as); logStats(); return true; } else { LOGGER.debug("Backtracking from wrong choices ({} MBTs).", assignment.getMBTCount()); if (!justifyMbtAndBacktrack()) { return false; } afterAllAtomsAssigned = false; } } } /** * Adds a noGood to the store and in case of out-of-order literals causing another conflict, triggers further backjumping. * @param noGoodId * @param noGood */ private boolean addAndBackjumpIfNecessary(int noGoodId, NoGood noGood) { while (store.add(noGoodId, noGood) != null) { LOGGER.debug("Adding noGood (again) caused conflict, computing real backjumping level now."); int backjumpLevel = learner.computeConflictFreeBackjumpingLevel(noGood); if (backjumpLevel < 0) { return false; } choiceManager.backjump(backjumpLevel); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } } return true; } /** * Analyzes the conflict and either learns a new NoGood (causing backjumping and addition to the NoGood store), * or backtracks the choice causing the conflict. * @return false iff the analysis result shows that the set of NoGoods is unsatisfiable. */ private boolean learnBackjumpAddFromConflict(ConflictCause conflictCause) { GroundConflictNoGoodLearner.ConflictAnalysisResult analysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); LOGGER.debug("Analysis result: {}", analysisResult); if (analysisResult == UNSAT) { // Halt if unsatisfiable. return false; } branchingHeuristic.analyzedConflict(analysisResult); if (analysisResult.learnedNoGood == null && analysisResult.clearLastChoiceAfterBackjump) { // TODO: Temporarily abort resolution with backtrackFast instead of learning a too large nogood. return backtrack(); } if (analysisResult.learnedNoGood != null) { choiceManager.backjump(analysisResult.backjumpLevel); final NoGood learnedNoGood = analysisResult.learnedNoGood; int noGoodId = grounder.register(learnedNoGood); if (!addAndBackjumpIfNecessary(noGoodId, learnedNoGood)) { return false; } return true; } choiceManager.backjump(analysisResult.backjumpLevel); choiceManager.backtrackFast(); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } if (!store.didPropagate()) { throw oops("Nothing to propagate after backtracking from conflict-causing choice"); } return true; } private boolean justifyMbtAndBacktrack() { mbtAtFixpoint++; // Run justification only if enabled and possible. if (disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; // Justify one MBT assigned atom. Integer atomToJustify = assignment.getBasicAtomAssignedMBT(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for justification of {} / {}", atomToJustify, atomStore.atomToString(atomToJustify)); LOGGER.debug("Assignment is (TRUE part only): {}", translate(assignment.getTrueAssignments())); } Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomToJustify, assignment); NoGood noGood = noGoodFromJustificationReasons(atomToJustify, reasonsForUnjustified); int noGoodID = grounder.register(noGood); Map<Integer, NoGood> obtained = new LinkedHashMap<>(); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); // Add NoGood and trigger backjumping. if (!ingest(obtained)) { logStats(); return false; } return true; } private NoGood noGoodFromJustificationReasons(int atomToJustify, Set<Literal> reasonsForUnjustified) { // Turn the justification into a NoGood. int[] reasons = new int[reasonsForUnjustified.size() + 1]; reasons[0] = atomToLiteral(atomToJustify); int arrpos = 1; for (Literal literal : reasonsForUnjustified) { reasons[arrpos++] = atomToLiteral(atomStore.get(literal.getAtom()), !literal.isNegated()); } return new NoGood(reasons); } private boolean treatConflictAfterClosing(NoGood violatedNoGood) { if (disableJustificationAfterClosing || disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { // Will not learn from violated NoGood, do simple backtrack. LOGGER.debug("NoGood was violated after all unassigned atoms were assigned to false; will not learn from it; skipping."); if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; LOGGER.debug("Justifying atoms in violated nogood."); LinkedHashSet<Integer> toJustify = new LinkedHashSet<>(); // Find those literals in violatedNoGood that were just assigned false. for (Integer literal : violatedNoGood) { if (assignment.getImpliedBy(atomOf(literal)) == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { toJustify.add(literal); } } // Since the violatedNoGood may contain atoms other than BasicAtom, these have to be treated. Map<Integer, NoGood> obtained = new LinkedHashMap<>(); Iterator<Integer> toJustifyIterator = toJustify.iterator(); ArrayList<Integer> ruleAtomReplacements = new ArrayList<>(); while (toJustifyIterator.hasNext()) { Integer literal = toJustifyIterator.next(); Atom atom = atomStore.get(atomOf(literal)); if (atom instanceof BasicAtom) { continue; } if (!(atom instanceof RuleAtom)) { // Ignore atoms other than RuleAtom. toJustifyIterator.remove(); continue; } // For RuleAtoms in toJustify the corresponding ground body contains BasicAtoms that have been assigned FALSE in the closing. // First, translate RuleAtom back to NonGroundRule + Substitution. String ruleId = (String) ((ConstantTerm<?>)atom.getTerms().get(0)).getObject(); NonGroundRule nonGroundRule = analyzingGrounder.getNonGroundRule(Integer.parseInt(ruleId)); String substitution = (String) ((ConstantTerm<?>)atom.getTerms().get(1)).getObject(); Substitution groundingSubstitution = Substitution.fromString(substitution); Rule rule = nonGroundRule.getRule(); // Find ground literals in the body that have been assigned false and justify those. for (Literal bodyLiteral : rule.getBody()) { Atom groundAtom = bodyLiteral.getAtom().substitute(groundingSubstitution); if (groundAtom instanceof ComparisonAtom || analyzingGrounder.isFact(groundAtom)) { // Facts and ComparisonAtoms are always true, no justification needed. continue; } int groundAtomId = atomStore.get(groundAtom); Assignment.Entry entry = assignment.get(groundAtomId); // Check if atom was assigned to FALSE during the closing. if (entry.getImpliedBy() == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { ruleAtomReplacements.add(atomToNegatedLiteral(groundAtomId)); } } toJustifyIterator.remove(); } toJustify.addAll(ruleAtomReplacements); for (Integer literalToJustify : toJustify) { LOGGER.debug("Searching for justification(s) of {} / {}", toJustify, atomStore.atomToString(atomOf(literalToJustify))); Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomOf(literalToJustify), assignment); NoGood noGood = noGoodFromJustificationReasons(atomOf(literalToJustify), reasonsForUnjustified); int noGoodID = grounder.register(noGood); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); } // Backtrack to remove the violation. if (!backtrack()) { logStats(); return false; } // Add newly obtained noGoods. if (!ingest(obtained)) { logStats(); return false; } return true; } private boolean close() { return assignment.closeUnassignedAtoms(); } /** * Iterative implementation of recursive backtracking. * * @return {@code true} iff it is possible to backtrack even further, {@code false} otherwise */ private boolean backtrack() { while (assignment.getDecisionLevel() != 0) { final Assignment.Entry choice = choiceManager.backtrackSlow(); store.propagate(); if (choice == null) { LOGGER.debug("Backtracking further, because last choice was already backtracked."); continue; } final int lastChoice = choice.getAtom(); final boolean choiceValue = choice.getTruth().toBoolean(); // Chronological backtracking: choose inverse now. // Choose FALSE if the previous choice was for TRUE and the atom was not already MBT at that time. ThriceTruth lastChoiceTruth = assignment.getTruth(lastChoice); if (choiceValue && MBT.equals(lastChoiceTruth)) { LOGGER.debug("Backtracking further, because last choice was MBT before choosing TRUE."); continue; } // If choice was assigned at lower decision level (due to added NoGoods), no inverted choice should be done. if (choice.getImpliedBy() != null) { LOGGER.debug("Last choice is now implied by {}", choice.getImpliedBy()); //if (choice.getDecisionLevel() == assignment.getDecisionLevel() + 1) { // throw oops("Choice was assigned but not at a lower decision level"); //} LOGGER.debug("Backtracking further, because last choice was assigned at a lower decision level."); continue; } // Choose inverse if it is not yet already assigned TRUE or FALSE. if (lastChoiceTruth == null || (lastChoiceTruth.isMBT() && !choiceValue)) { LOGGER.debug("Choosing inverse."); choiceManager.choose(new Choice(lastChoice, !choiceValue, true)); break; } // Continue backtracking. } return assignment.getDecisionLevel() != 0; } private boolean ingest(Map<Integer, NoGood> obtained) { branchingHeuristic.newNoGoods(obtained.values()); assignment.growForMaxAtomId(); store.growForMaxAtomId(atomStore.getMaxAtomId()); LinkedList<Map.Entry<Integer, NoGood>> noGoodsToAdd = new LinkedList<>(obtained.entrySet()); Map.Entry<Integer, NoGood> entry; while ((entry = noGoodsToAdd.poll()) != null) { if (NoGood.UNSAT.equals(entry.getValue())) { // Empty NoGood cannot be satisfied, program is unsatisfiable. return false; } final ConflictCause conflictCause = store.add(entry.getKey(), entry.getValue()); if (conflictCause == null) { // There is no conflict, all is fine. Just skip conflict treatment and carry on. continue; } final NoGood learnedNoGood = fixContradiction(entry, conflictCause); if (learnedNoGood != null) { noGoodsToAdd.addFirst(new AbstractMap.SimpleEntry<>(grounder.register(learnedNoGood), learnedNoGood)); } } return true; } private NoGood fixContradiction(Map.Entry<Integer, NoGood> noGoodEntry, ConflictCause conflictCause) { LOGGER.debug("Attempting to fix violation of {} caused by {}", noGoodEntry.getValue(), conflictCause); if (conflictCause.getViolatedChoice() != null) { choiceManager.backjump(conflictCause.getViolatedChoice().getDecisionLevel()); choiceManager.backtrackFast(); return null; } GroundConflictNoGoodLearner.ConflictAnalysisResult conflictAnalysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); if (conflictAnalysisResult == UNSAT) { return NoGood.UNSAT; } branchingHeuristic.analyzedConflict(conflictAnalysisResult); choiceManager.backjump(conflictAnalysisResult.backjumpLevel); if (conflictAnalysisResult.clearLastChoiceAfterBackjump) { choiceManager.backtrackFast(); } // If NoGood was learned, add it to the store. // Note that the learned NoGood may cause further conflicts, since propagation on lower decision levels is lazy, // hence backtracking once might not be enough to remove the real conflict cause. if (!addAndBackjumpIfNecessary(noGoodEntry.getKey(), noGoodEntry.getValue())) { return NoGood.UNSAT; } return conflictAnalysisResult.learnedNoGood; } private boolean choose() { choiceManager.addChoiceInformation(grounder.getChoiceAtoms()); choiceManager.updateAssignments(); // Hint: for custom heuristics, evaluate them here and pick a value if the heuristics suggests one. int literal; if ((literal = branchingHeuristic.chooseLiteral()) == DEFAULT_CHOICE_LITERAL) { LOGGER.debug("No choices!"); return false; } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Branching heuristic chose literal {}", atomStore.literalToString(literal)); } choiceManager.choose(new Choice(literal, false)); return true; } @Override public int getNumberOfChoices() { return choiceManager.getChoices(); } @Override public int getNumberOfBacktracks() { return choiceManager.getBacktracks(); } @Override public int getNumberOfBacktracksWithinBackjumps() { return choiceManager.getBacktracksWithinBackjumps(); } @Override public int getNumberOfBackjumps() { return choiceManager.getBackjumps(); } @Override public int getNumberOfBacktracksDueToRemnantMBTs() { return mbtAtFixpoint; } @Override public int getNumberOfConflictsAfterClosing() { return conflictsAfterClosing; } private void logStats() { LOGGER.debug(getStatisticsString()); } }
Fix error in performance output messages. Solves part of #113
src/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java
Fix error in performance output messages.
<ide><path>rc/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java <ide> decisionsLast = currentNumberOfChoices; <ide> float overallTime = (currentTime - timeOnEntry) / 1000.0f; <ide> float decisionsPerSec = currentNumberOfChoices / overallTime; <del> LOGGER.info("Overall performance: {} decision in {}s or {} decisions per sec. Overall replayed assignments: {}.", currentNumberOfChoices, currentTime - timeOnEntry, decisionsPerSec, ((TrailAssignment)assignment).replayCounter); <add> LOGGER.info("Overall performance: {} decisions in {}s or {} decisions per sec. Overall replayed assignments: {}.", currentNumberOfChoices, overallTime, decisionsPerSec, ((TrailAssignment)assignment).replayCounter); <ide> } <ide> ConflictCause conflictCause = store.propagate(); <ide> didChange |= store.didPropagate();
JavaScript
mit
c16fe0784f7f96dfb66c46e05ba2f31074325345
0
Techwraith/ribcage-docs
'use strict'; var marked = require('marked') , wrench = require('wrench') , atomify = require('atomify') , handlebars = require('handlebars') , fs = require('fs') , path = require('path') , http = require('http') , change = require('change-case') , _ = require('lodash') , js = atomify.js , css = atomify.css var renderer = new marked.Renderer(); renderer.heading = function (text, level) { var ret = '' if (level === 1) { ret = '<h' + level + '><a name="' + change.pascal(text) + '">'+text+'</a></h' + level + '>' } else { ret = '<h' + level + '>'+text+'</h'+level+'>' } return ret } module.exports = function (dir) { var exists = function exists(component, file) { var p = path.join(dir, component, file) return !!fs.existsSync(p) } var read = function read(component, file) { var p = path.join(dir, component, file) return fs.readFileSync(p, 'utf8') } var scanComponents = function () { var files = wrench.readdirSyncRecursive(dir) , blackListedComponentNames = ['.DS_Store', 'README.md'] , dirs = _.unique(_.map(files, function (file) { return file.split('/')[0] })) , components = _.map(dirs, function (component) { return { name: component , pascal: change.pascal(component) , readme: exists(component, 'README.md') , example: exists(component, 'example') , test: exists(component, 'test.js') } }) return _.filter(components, function (component){ return blackListedComponentNames.indexOf(component.name) === -1 }) } var sendComponentReadme = function (req, res) { var component = req.url.split('?')[0].split('/')[1] , html if (exits(component, 'README.md')) { html = marked(read(component, 'README.md')) } else { html = '<h1>'+component+'<h1><p>No docs written.</p>' } res.end(html) } var sendComponentExampleJS = function (req, res) { var component = req.url.split('?')[0].split('/')[1] if (!exists(component, path.join('example', 'entry.js'))) { return res.end('no example') } var opts = { entry: path.join(dir, component, 'example', 'entry.js') , debug: true // default: `false` } js(opts, responder('css', res)) } var sendComponentExampleCSS = function (req, res) { var component = req.url.split('?')[0].split('/')[1] if (!exists(component, path.join('example', 'entry.css'))) { return res.end('no example') } var opts = { entry: path.join(dir, component, 'example', 'entry.css') , debug: true // default: `false` } css(opts, responder('css', res)) } var sendComponentExampleHTML = function (req, res) { var component = req.url.split('?')[0].split('/')[1] var src = '<!doctype html><html><head>' src += '<meta charset="utf-8">' src += '<meta http-equiv="X-UA-Compatible" content="IE=edge">' src += '<meta name="viewport" content="initial-scale=1,width=device-width,user-scalable=0,minimal-ui">' src += '<title>Example for '+component+'</title>' src += '<link rel="stylesheet" href="/'+component+'/example.css">' src += '</head><body>' src += '<script src="/'+component+'/example.js"></script>' src += '</body></html>' res.setHeader('Content-Type', 'text/html') res.end(src) } var sendDocs = function (req, res) { var components = scanComponents() , iframe = '' for(var i in components) { if (components.hasOwnProperty(i)){ iframe = '' if (components[i].example) { iframe = '<iframe src="/'+components[i].path+'/example.html" width="100%" height="500"></iframe>' } if (components[i].readme) { components[i].htmlReadme = iframe + marked(read(components[i].path, 'README.md'), {renderer: renderer}) } else { components[i].htmlReadme = '<h1><a name="'+components[i].pascal+'">'+components[i].name+'</a></h1>'+iframe+'<p>No docs written.</p>' } } } var template = handlebars.compile(fs.readFileSync(path.join(__dirname, 'index.html.hbs'), 'utf8')) var html = template({ components: components }) res.end(html) } var sendCSS = function (req, res) { var opts = { entry: path.join(__dirname, 'pages', 'docs', 'entry.css') , debug: true // default: `false` } css(opts, responder('css', res)) } var sendJS = function (req, res) { var opts = { entry: path.join(__dirname, 'pages', 'docs', 'entry.js') , debug: true // default: `false` } js(opts, responder('css', res)) } var responder = function (type, res) { return function (err, src) { if (err) console.error(err); if (!res.headersSent) res.setHeader('Content-Type', 'text/' + type) res.end(src) } } var startServer = function () { http.createServer(function (req, res) { var url = req.url.split('?')[0] if (url === '/docs' || url === '/') { return sendDocs(req, res) } if (url === '/style.css') { return sendCSS(req, res) } if (url === '/site.js') { return sendJS(req, res) } if (url.indexOf('readme.html') > -1) { return sendComponentReadme(req, res) } if (url.indexOf('example.html') > -1) { return sendComponentExampleHTML(req, res) } if (url.indexOf('example.js') > -1) { return sendComponentExampleJS(req, res) } if (url.indexOf('example.css') > -1) { return sendComponentExampleCSS(req, res) } }).listen(9001) console.info('Server running. Visit http://localhost:9001/ to see the docs') } startServer() }
index.js
'use strict'; var marked = require('marked') , wrench = require('wrench') , atomify = require('atomify') , handlebars = require('handlebars') , fs = require('fs') , path = require('path') , http = require('http') , change = require('change-case') , _ = require('lodash') , js = atomify.js , css = atomify.css var renderer = new marked.Renderer(); renderer.heading = function (text, level) { var ret = '' if (level === 1) { ret = '<h' + level + '><a name="' + change.pascal(text) + '">'+text+'</a></h' + level + '>' } else { ret = '<h' + level + '>'+text+'</h'+level+'>' } return ret } module.exports = function (dir) { var exists = function exists(component, file) { var p = path.join(dir, component, file) return !!fs.existsSync(p) } var read = function read(component, file) { var p = path.join(dir, component, file) return fs.readFileSync(p, 'utf8') } var scanComponents = function () { var files = wrench.readdirSyncRecursive(dir) , blackListedComponentNames = ['.DS_Store', 'README.md'] , dirs = _.unique(_.map(files, function (file) { return file.split('/')[0] })) , components = _.map(dirs, function (component) { return { name: component , pascal: change.pascal(component) , readme: exists(component, 'README.md') , example: exists(component, 'example') , test: exists(component, 'test.js') } }) return _.filter(components, function (component){ return blackListedComponentNames.indexOf(component.name) === -1 }) } var sendComponentReadme = function (req, res) { var component = req.url.split('?')[0].split('/')[1] , html if (exits(component, 'README.md')) { html = marked(read(component, 'README.md')) } else { html = '<h1>'+component+'<h1><p>No docs written.</p>' } res.end(html) } var sendComponentExampleJS = function (req, res) { var component = req.url.split('?')[0].split('/')[1] if (!exists(component, path.join('example', 'entry.js'))) { return res.end('no example') } var opts = { entry: path.join(dir, component, 'example', 'entry.js') , debug: true // default: `false` } js(opts, responder('css', res)) } var sendComponentExampleCSS = function (req, res) { var component = req.url.split('?')[0].split('/')[1] if (!exists(component, path.join('example', 'entry.css'))) { return res.end('no example') } var opts = { entry: path.join(dir, component, 'example', 'entry.css') , debug: true // default: `false` } css(opts, responder('css', res)) } var sendComponentExampleHTML = function (req, res) { var component = req.url.split('?')[0].split('/')[1] var src = '<!doctype html><html><head>' src += '<meta charset="utf-8">' src += '<meta http-equiv="X-UA-Compatible" content="IE=edge">' src += '<meta name="viewport" content="initial-scale=1,width=device-width,user-scalable=0,minimal-ui">' src += '<title>Example for '+component+'</title>' src += '<link rel="stylesheet" href="/'+component+'/example.css">' src += '</head><body>' src += '<script src="/'+component+'/example.js"></script>' src += '</body></html>' res.setHeader('Content-Type', 'text/html') res.end(src) } var sendDocs = function (req, res) { var components = scanComponents() , iframe = '' for(var i in components) { if (components.hasOwnProperty(i)){ iframe = '' if (components[i].example) { iframe = '<iframe src="/'+components[i].path+'/example.html" width="100%" height="500"></iframe>' } if (components[i].readme) { components[i].htmlReadme = iframe + marked(read(components[i].path, 'README.md'), {renderer: renderer}) } else { components[i].htmlReadme = '<h1><a name="'+components[i].pascal+'">'+components[i].name+'</a></h1>'+iframe+'<p>No docs written.</p>' } } } var template = handlebars.compile(fs.readFileSync(path.join(__dirname, 'index.html.hbs'), 'utf8')) var html = template({ components: components }) res.end(html) } var sendCSS = function (req, res) { var opts = { entry: path.join(__dirname, 'pages', 'docs', 'entry.css') , debug: true // default: `false` } css(opts, responder('css', res)) } var sendJS = function (req, res) { var opts = { entry: path.join(__dirname, 'pages', 'docs', 'entry.js') , debug: true // default: `false` } js(opts, responder('css', res)) } var responder = function (type, res) { return function (err, src) { if (err) console.log(err); if (!res.headersSent) res.setHeader('Content-Type', 'text/' + type) res.end(src) } } var startServer = function () { http.createServer(function (req, res) { var url = req.url.split('?')[0] if (url === '/docs' || url === '/') { return sendDocs(req, res) } if (url === '/style.css') { return sendCSS(req, res) } if (url === '/site.js') { return sendJS(req, res) } if (url.indexOf('readme.html') > -1) { return sendComponentReadme(req, res) } if (url.indexOf('example.html') > -1) { return sendComponentExampleHTML(req, res) } if (url.indexOf('example.js') > -1) { return sendComponentExampleJS(req, res) } if (url.indexOf('example.css') > -1) { return sendComponentExampleCSS(req, res) } }).listen(9001) console.log('Server running. Visit http://localhost:9001/ to see the docs') } startServer() }
Less generic logging
index.js
Less generic logging
<ide><path>ndex.js <ide> <ide> var responder = function (type, res) { <ide> return function (err, src) { <del> if (err) console.log(err); <add> if (err) console.error(err); <ide> <ide> if (!res.headersSent) res.setHeader('Content-Type', 'text/' + type) <ide> res.end(src) <ide> } <ide> <ide> }).listen(9001) <del> console.log('Server running. Visit http://localhost:9001/ to see the docs') <add> console.info('Server running. Visit http://localhost:9001/ to see the docs') <ide> } <ide> startServer() <ide>
Java
mit
error: pathspec 'QuestionResponseDBTest.java' did not match any file(s) known to git
36285ef8238713d68f8f0872f86e5e94bcc56f77
1
jestjest/quizme,jestjest/quizme,jestjest/quizme
package quizme.tests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.util.LinkedList; import java.util.List; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import quizme.DBConnection; import quizme.database.QuestionResponseTable; public class QuestionResponseDBTest { static DBConnection db; static QuestionResponseTable questionResponseDB; @BeforeClass public static void oneTimeSetUp() { db = new DBConnection(); questionResponseDB = new QuestionResponseTable(db); questionResponseDB.removeQuestion(7, 30); /* keep replacing questions in basic test */ questionResponseDB.removeQuestion(7, 500); questionResponseDB.removeQuestion(500, 500); } @AfterClass public static void oneTimeTearDown() { db.closeConnection(); } @Test public void basictest() { int quizid = 7; int questionOrder = 300; List<String> answers = new LinkedList<String>(); answers.add("1812"); answers.add("19th Century"); questionResponseDB.addQuestion(7, 300, "When was The War of 1812?", answers, 0); int quizidFromDB = questionResponseDB.getQuizID(quizid, questionOrder); assertEquals(quizidFromDB, 7); int questionOrderFromDB = questionResponseDB.getQuestionOrder(quizid, questionOrder); assertEquals(questionOrderFromDB, 300); String question = questionResponseDB.getQuestion(quizid, questionOrder); assertTrue(question.equals("When was The War of 1812?")); List<String> correctAnswer = questionResponseDB.getCorrectAnswers(quizid, questionOrder); assertTrue(correctAnswer.get(0).equals("1812")); assertTrue(correctAnswer.get(1).equals("19th Century")); int preferredAnswer = questionResponseDB.getPreferredAnswer(quizid, questionOrder); assertEquals(preferredAnswer, 0); questionResponseDB.setQuestion(quizid, questionOrder, "When was the American Civil War?"); question = questionResponseDB.getQuestion(quizid, questionOrder); assertTrue(question.equals("When was the American Civil War?")); List<String> answers2 = new LinkedList<String>(); answers2.add("19th Century"); answers2.add("1861"); answers2.add("Before the new millenium"); questionResponseDB.setCorrectAnswers(quizid, questionOrder, answers2); correctAnswer = questionResponseDB.getCorrectAnswers(quizid, questionOrder); assertTrue(correctAnswer.get(1).equals("1861")); assertTrue(correctAnswer.get(2).equals("Before the new millenium")); questionResponseDB.setPreferredAnswer(quizid, questionOrder, 1); preferredAnswer = questionResponseDB.getPreferredAnswer(quizid, questionOrder); assertEquals(preferredAnswer, 1); questionResponseDB.setQuestionOrder(quizid, questionOrder, 30); questionOrder = 30; questionOrderFromDB = questionResponseDB.getQuestionOrder(quizid, questionOrder); assertEquals(questionOrderFromDB, 30); List<String> answers3 = new LinkedList<String>(); answers3.add("1775"); questionResponseDB.addQuestion(quizid, 500, "When was the American Revolutionary War?", answers3, 0); List<String> answers4 = new LinkedList<String>(); answers4.add("Maybe"); answers4.add("No"); answers4.add("I don't know"); answers4.add("Yes!"); questionResponseDB.addQuestion(500, 500, "Is 2016 a leap year?", answers4, 3); ResultSet rs = questionResponseDB.getAllQuizEntries(quizid); int count = 0; try { while(rs.next()) { assertEquals(rs.getInt("quizid"), quizid); count++; } assertEquals(count, 2); } catch (SQLException e) { e.printStackTrace(); } } }
QuestionResponseDBTest.java
Updated Question Response Table Test
QuestionResponseDBTest.java
Updated Question Response Table Test
<ide><path>uestionResponseDBTest.java <add>package quizme.tests; <add> <add>import static org.junit.Assert.assertEquals; <add>import static org.junit.Assert.assertTrue; <add> <add>import java.sql.ResultSet; <add>import java.sql.SQLException; <add>import java.util.LinkedList; <add>import java.util.List; <add> <add>import org.junit.AfterClass; <add>import org.junit.BeforeClass; <add>import org.junit.Test; <add> <add>import quizme.DBConnection; <add>import quizme.database.QuestionResponseTable; <add> <add>public class QuestionResponseDBTest { <add> static DBConnection db; <add> static QuestionResponseTable questionResponseDB; <add> <add> @BeforeClass <add> public static void oneTimeSetUp() { <add> db = new DBConnection(); <add> questionResponseDB = new QuestionResponseTable(db); <add> questionResponseDB.removeQuestion(7, 30); /* keep replacing questions in basic test */ <add> questionResponseDB.removeQuestion(7, 500); <add> questionResponseDB.removeQuestion(500, 500); <add> } <add> <add> @AfterClass <add> public static void oneTimeTearDown() { <add> db.closeConnection(); <add> } <add> <add> @Test <add> public void basictest() { <add> int quizid = 7; <add> int questionOrder = 300; <add> List<String> answers = new LinkedList<String>(); <add> answers.add("1812"); <add> answers.add("19th Century"); <add> questionResponseDB.addQuestion(7, 300, "When was The War of 1812?", answers, 0); <add> <add> int quizidFromDB = questionResponseDB.getQuizID(quizid, questionOrder); <add> assertEquals(quizidFromDB, 7); <add> <add> int questionOrderFromDB = questionResponseDB.getQuestionOrder(quizid, questionOrder); <add> assertEquals(questionOrderFromDB, 300); <add> <add> String question = questionResponseDB.getQuestion(quizid, questionOrder); <add> assertTrue(question.equals("When was The War of 1812?")); <add> <add> List<String> correctAnswer = questionResponseDB.getCorrectAnswers(quizid, questionOrder); <add> assertTrue(correctAnswer.get(0).equals("1812")); <add> assertTrue(correctAnswer.get(1).equals("19th Century")); <add> <add> int preferredAnswer = questionResponseDB.getPreferredAnswer(quizid, questionOrder); <add> assertEquals(preferredAnswer, 0); <add> <add> questionResponseDB.setQuestion(quizid, questionOrder, "When was the American Civil War?"); <add> question = questionResponseDB.getQuestion(quizid, questionOrder); <add> assertTrue(question.equals("When was the American Civil War?")); <add> <add> List<String> answers2 = new LinkedList<String>(); <add> answers2.add("19th Century"); <add> answers2.add("1861"); <add> answers2.add("Before the new millenium"); <add> questionResponseDB.setCorrectAnswers(quizid, questionOrder, answers2); <add> correctAnswer = questionResponseDB.getCorrectAnswers(quizid, questionOrder); <add> assertTrue(correctAnswer.get(1).equals("1861")); <add> assertTrue(correctAnswer.get(2).equals("Before the new millenium")); <add> <add> questionResponseDB.setPreferredAnswer(quizid, questionOrder, 1); <add> preferredAnswer = questionResponseDB.getPreferredAnswer(quizid, questionOrder); <add> assertEquals(preferredAnswer, 1); <add> <add> questionResponseDB.setQuestionOrder(quizid, questionOrder, 30); <add> questionOrder = 30; <add> questionOrderFromDB = questionResponseDB.getQuestionOrder(quizid, questionOrder); <add> assertEquals(questionOrderFromDB, 30); <add> <add> List<String> answers3 = new LinkedList<String>(); <add> answers3.add("1775"); <add> questionResponseDB.addQuestion(quizid, 500, "When was the American Revolutionary War?", answers3, 0); <add> <add> List<String> answers4 = new LinkedList<String>(); <add> answers4.add("Maybe"); <add> answers4.add("No"); <add> answers4.add("I don't know"); <add> answers4.add("Yes!"); <add> questionResponseDB.addQuestion(500, 500, "Is 2016 a leap year?", answers4, 3); <add> <add> ResultSet rs = questionResponseDB.getAllQuizEntries(quizid); <add> int count = 0; <add> try { <add> while(rs.next()) { <add> assertEquals(rs.getInt("quizid"), quizid); <add> count++; <add> } <add> assertEquals(count, 2); <add> } catch (SQLException e) { <add> e.printStackTrace(); <add> } <add> } <add>}
Java
apache-2.0
d107c03a9c391395c834f455455fa8f9b980fbb1
0
1gravity/Android-ContactPicker,1gravity/Android-ContactPicker
/* * Copyright (C) 2015-2016 Emanuel Moecklin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.onegravity.contactpicker.implementation; import android.Manifest; import android.app.Activity; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.ContactsContract; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentStatePagerAdapter; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import com.onegravity.contactpicker.OnContactCheckedListener; import com.onegravity.contactpicker.R; import com.onegravity.contactpicker.contact.Contact; import com.onegravity.contactpicker.contact.ContactDescription; import com.onegravity.contactpicker.contact.ContactFragment; import com.onegravity.contactpicker.contact.ContactsLoaded; import com.onegravity.contactpicker.group.Group; import com.onegravity.contactpicker.group.GroupFragment; import com.onegravity.contactpicker.group.GroupsLoaded; import com.onegravity.contactpicker.picture.ContactPictureType; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; public class ContactPickerActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor> { /** * Use this parameter to determine whether the contact picture shows a contact badge and if yes * what type (round, square) * * {@link com.onegravity.contactpicker.picture.ContactPictureType} */ public static final String EXTRA_CONTACT_BADGE_TYPE = "EXTRA_CONTACT_BADGE_TYPE"; /** * Use this to define what contact information is used for the description field (second line) * * {@link com.onegravity.contactpicker.contact.ContactDescription} */ public static final String EXTRA_CONTACT_DESCRIPTION = "EXTRA_CONTACT_DESCRIPTION"; /** * We put the resulting contact list into the Intent as extra data with this key. */ public static final String RESULT_CONTACT_DATA = "RESULT_CONTACT_DATA"; private static ContactPictureType sBadgeType = ContactPictureType.ROUND; public static ContactPictureType getContactBadgeType() { return sBadgeType; } private static ContactDescription sDescription = ContactDescription.EMAIL; public static ContactDescription getContactDescription() { return sDescription; } private PagerAdapter mAdapter; private String mDefaultTitle; // update the adapter after a certain amount of contacts has loaded private static final int BATCH_SIZE = 25; /* * The selected ids are saved in onSaveInstanceState, restored in onCreate and then applied to * the contacts and groups in onLoadFinished. */ private static final String CONTACT_IDS = "CONTACT_IDS"; private HashSet<Long> mSelectedContactIds = new HashSet<>(); private static final String GROUP_IDS = "GROUP_IDS"; private HashSet<Long> mSelectedGroupIds = new HashSet<>(); // ****************************************** Lifecycle Methods ******************************************* @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // check if we have the READ_CONTACTS permission, if not --> terminate try { int pid = android.os.Process.myPid(); PackageManager pckMgr = getPackageManager(); int uid = pckMgr.getApplicationInfo(getComponentName().getPackageName(), PackageManager.GET_META_DATA).uid; enforcePermission(Manifest.permission.READ_CONTACTS, pid, uid, "Contact permission hasn't been granted to this app, terminating."); } catch (PackageManager.NameNotFoundException | SecurityException e) { Log.e(getClass().getSimpleName(), e.getMessage()); finish(); return; } // retrieve default title which is used if no contacts are selected if (savedInstanceState == null) { try { PackageManager pkMgr = getPackageManager(); ActivityInfo activityInfo = pkMgr.getActivityInfo(getComponentName(), PackageManager.GET_META_DATA); mDefaultTitle = activityInfo.loadLabel(pkMgr).toString(); } catch (PackageManager.NameNotFoundException ignore) { mDefaultTitle = getTitle().toString(); } } else { mDefaultTitle = savedInstanceState.getString("mDefaultTitle"); try { mSelectedContactIds = (HashSet<Long>) savedInstanceState.getSerializable(CONTACT_IDS); } catch (ClassCastException ignore) {} try { mSelectedGroupIds = (HashSet<Long>) savedInstanceState.getSerializable(GROUP_IDS); } catch (ClassCastException ignore) {} } // read Activity parameter ContactPictureType sBadgeType = ContactPictureType.ROUND; Intent intent = getIntent(); String tmp = intent.getStringExtra(EXTRA_CONTACT_BADGE_TYPE); if (tmp != null) { try { sBadgeType = ContactPictureType.valueOf(tmp); } catch (IllegalArgumentException e) { Log.e(getClass().getSimpleName(), tmp + " is not a legal EXTRA_CONTACT_BADGE_TYPE value, defaulting to ROUND"); } } // read Activity parameter ContactDescription sDescription = ContactDescription.EMAIL; tmp = intent.getStringExtra(EXTRA_CONTACT_DESCRIPTION); if (tmp != null) { try { sDescription = ContactDescription.valueOf(tmp); } catch (IllegalArgumentException e) { Log.e(getClass().getSimpleName(), tmp + " is not a legal EXTRA_CONTACT_DESCRIPTION value, defaulting to EMAIL"); } } setContentView(R.layout.contact_tab_layout); // initialize TabLayout TabLayout tabLayout = (TabLayout)findViewById(R.id.tabContent); tabLayout.setTabMode(TabLayout.MODE_FIXED); tabLayout.setTabGravity(TabLayout.GRAVITY_FILL); TabLayout.Tab tabContacts = tabLayout.newTab(); tabContacts.setText(R.string.contact_tab_title); tabLayout.addTab(tabContacts); TabLayout.Tab tabGroups = tabLayout.newTab(); tabGroups.setText(R.string.group_tab_title); tabLayout.addTab(tabGroups); // initialize ViewPager final ViewPager viewPager = (ViewPager) findViewById(R.id.tabPager); mAdapter = new PagerAdapter(getSupportFragmentManager(), tabLayout.getTabCount()); viewPager.setAdapter(mAdapter); viewPager.addOnPageChangeListener(new TabLayout.TabLayoutOnPageChangeListener(tabLayout)); tabLayout.setOnTabSelectedListener(new TabLayout.OnTabSelectedListener() { @Override public void onTabSelected(TabLayout.Tab tab) { viewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(TabLayout.Tab tab) {} @Override public void onTabReselected(TabLayout.Tab tab) {} }); } private static class PagerAdapter extends FragmentStatePagerAdapter { private int mNumOfTabs; private ContactFragment mContactFragment; private GroupFragment mGroupFragment; public PagerAdapter(FragmentManager fm, int numOfTabs) { super(fm); mNumOfTabs = numOfTabs; } @Override public Fragment getItem(int position) { switch (position) { case 0: mContactFragment = ContactFragment.newInstance(); return mContactFragment; case 1: mGroupFragment = GroupFragment.newInstance(); return mGroupFragment; default: return null; } } @Override public int getCount() { return mNumOfTabs; } } @Override protected void onResume() { super.onResume(); EventBus.getDefault().register(this); getSupportLoaderManager().initLoader(CONTACTS_LOADER_ID, null, this); getSupportLoaderManager().initLoader(GROUPS_LOADER_ID, null, this); } @Override protected void onPause() { super.onPause(); EventBus.getDefault().unregister(this); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString("mDefaultTitle", mDefaultTitle); mSelectedContactIds.clear();; for (Contact contact : mContacts) { if (contact.isChecked()) { mSelectedContactIds.add( contact.getId() ); } } outState.putSerializable(CONTACT_IDS, mSelectedContactIds); mSelectedGroupIds.clear();; for (Group group : mGroups) { if (group.isChecked()) { mSelectedGroupIds.add( group.getId() ); } } outState.putSerializable(GROUP_IDS, mSelectedGroupIds); } private void updateTitle() { if (mNrOfSelectedContacts == 0) { setTitle(mDefaultTitle); } else { String title = getString(R.string.actionmode_selected, mNrOfSelectedContacts); setTitle(title); } } // ****************************************** Option Menu ******************************************* @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.contact_picker, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if( id == android.R.id.home) { setResult(Activity.RESULT_CANCELED, null); finish(); return true; } else if( id == R.id.action_done) { onDone(); return true; } return super.onOptionsItemSelected(item); } private void onDone() { // return only checked contacts List<Contact> contacts = new ArrayList<>(); if (mContacts != null) { for (Contact contact : mContacts) { if (contact.isChecked()) { contacts.add(contact); } } } Intent data = new Intent(); data.putExtra(RESULT_CONTACT_DATA, (Serializable) contacts); setResult(Activity.RESULT_OK, data); finish(); } // ****************************************** Loader Methods ******************************************* /* * Loader configuration contacts */ private static final int CONTACTS_LOADER_ID = 0; private static final Uri CONTACTS_URI = ContactsContract.Contacts.CONTENT_URI; private static final String[] CONTACTS_PROJECTION = new String[] { ContactsContract.Contacts._ID, ContactsContract.Contacts.LOOKUP_KEY, ContactsContract.Contacts.DISPLAY_NAME_PRIMARY, ContactsContract.Contacts.PHOTO_URI}; private static final String CONTACTS_SORT = ContactsContract.Contacts.DISPLAY_NAME_PRIMARY + " COLLATE LOCALIZED ASC"; /* * Loader configuration contacts details */ private static final int CONTACT_DETAILS_LOADER_ID = 1; private static final Uri CONTACT_DETAILS_URI = ContactsContract.Data.CONTENT_URI; private static final String[] CONTACT_DETAILS_PROJECTION = { ContactsContract.Data.LOOKUP_KEY, ContactsContract.Contacts.DISPLAY_NAME_PRIMARY, ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS, ContactsContract.CommonDataKinds.StructuredPostal.TYPE, ContactsContract.CommonDataKinds.StructuredPostal.STREET, ContactsContract.CommonDataKinds.StructuredPostal.POBOX, ContactsContract.CommonDataKinds.StructuredPostal.CITY, ContactsContract.CommonDataKinds.StructuredPostal.REGION, ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE, ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY, ContactsContract.CommonDataKinds.Phone.NUMBER, ContactsContract.CommonDataKinds.Phone.TYPE, ContactsContract.CommonDataKinds.Email.ADDRESS, ContactsContract.CommonDataKinds.Email.TYPE, ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME, ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME, ContactsContract.CommonDataKinds.GroupMembership.GROUP_ROW_ID, }; /* * Loader configuration groups */ private static final int GROUPS_LOADER_ID = 2; private static final Uri GROUPS_URI = ContactsContract.Groups.CONTENT_URI; private static final String[] GROUPS_PROJECTION = new String[] { ContactsContract.Groups._ID, ContactsContract.Groups.SOURCE_ID, ContactsContract.Groups.TITLE}; private static final String GROUPS_SELECTION = ContactsContract.Groups.DELETED + " = 0"; private static final String GROUPS_SORT = ContactsContract.Groups.TITLE + " COLLATE LOCALIZED ASC"; @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { switch(id) { case CONTACTS_LOADER_ID: return new CursorLoader(this, CONTACTS_URI, CONTACTS_PROJECTION, null, null, CONTACTS_SORT); case CONTACT_DETAILS_LOADER_ID: return new CursorLoader(this, CONTACT_DETAILS_URI, CONTACT_DETAILS_PROJECTION, null, null, null); case GROUPS_LOADER_ID: return new CursorLoader(this, GROUPS_URI, GROUPS_PROJECTION, GROUPS_SELECTION, null, GROUPS_SORT); } return null; } @Override public void onLoaderReset(Loader<Cursor> loader) { ContactsLoaded.post(null); GroupsLoaded.post(null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { switch(loader.getId()) { case CONTACTS_LOADER_ID: readContacts(cursor); // contacts loaded --> load the contact details getSupportLoaderManager().initLoader(CONTACT_DETAILS_LOADER_ID, null, this); break; case CONTACT_DETAILS_LOADER_ID: readContactDetails(cursor); break; case GROUPS_LOADER_ID: { readGroups(cursor); break; } } } // ****************************************** Contact Methods ******************************************* /* * List of all contacts. */ private List<ContactImpl> mContacts = new ArrayList<>(); /* * Map of all contacts by lookup key (ContactsContract.Contacts.LOOKUP_KEY). * We use this to find the contacts when the contact details are loaded. */ private Map<String, ContactImpl> mContactsByLookupKey = new HashMap<>(); /* * Number of selected contacts. * Selected groups are reflected in this too. */ private int mNrOfSelectedContacts = 0; private void readContacts(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* CONTACTS *"); Log.e("1gravity", "***************************************************************"); mContacts.clear(); mContactsByLookupKey.clear(); mNrOfSelectedContacts = 0; if (cursor.moveToFirst()) { cursor.moveToPrevious(); int count = 0; while (cursor.moveToNext()) { ContactImpl contact = ContactImpl.fromCursor(cursor); mContacts.add(contact); // LOOKUP_KEY is the one we use to retrieve the contact when the contact details are loaded String lookupKey = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.LOOKUP_KEY)); mContactsByLookupKey.put(lookupKey, contact); boolean isChecked = mSelectedContactIds.contains( contact.getId() ); contact.setChecked(isChecked, true); mNrOfSelectedContacts += isChecked ? 1 : 0; contact.addOnContactCheckedListener(mContactListener); Log.e("1gravity", "lookupKey: " + lookupKey); Log.e("1gravity", "id: " + contact.getId()); Log.e("1gravity", "displayName: " + contact.getDisplayName()); Log.e("1gravity", "first name: " + contact.getFirstName()); Log.e("1gravity", "last name: " + contact.getLastName()); Log.e("1gravity", "photoUri: " + contact.getPhotoUri()); // update the ui once some contacts have loaded if (++count >= BATCH_SIZE) { ContactsLoaded.post(mContacts); count = 0; } } } updateTitle(); ContactsLoaded.post(mContacts); } private void readContactDetails(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* CONTACTS DETAILS *"); Log.e("1gravity", "***************************************************************"); if (cursor != null && cursor.moveToFirst()) { cursor.moveToPrevious(); while (cursor.moveToNext()) { String lookupKey = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.LOOKUP_KEY)); ContactImpl contact = mContactsByLookupKey.get(lookupKey); if (contact != null) { readContactDetails(cursor, contact); } } } ContactsLoaded.post(mContacts); joinContactsAndGroups(mContacts); } private void readContactDetails(Cursor cursor, ContactImpl contact) { String mime = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); if (mime.equals(ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE)) { String email = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.ADDRESS)); String type = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.TYPE)); if (email != null) contact.setEmail(email); Log.e("1gravity", " email: " + email); Log.e("1gravity", " type: " + type); } else if (mime.equals(ContactsContract.CommonDataKinds.Phone.CONTENT_ITEM_TYPE)) { String phone = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER)); String type = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.TYPE)); contact.setPhone(phone); Log.e("1gravity", " phone: " + phone); Log.e("1gravity", " type: " + type); } else if (mime.equals(ContactsContract.CommonDataKinds.StructuredPostal.CONTENT_ITEM_TYPE)) { String FORMATTED_ADDRESS = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS)); String TYPE = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.TYPE)); String STREET = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.STREET)); String POBOX = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.POBOX)); String CITY = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.CITY)); String REGION = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.REGION)); String POSTCODE = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE)); String COUNTRY = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY)); contact.setAddress(FORMATTED_ADDRESS.replaceAll("\\n", ", ")); Log.e("1gravity", " FORMATTED_ADDRESS: " + FORMATTED_ADDRESS); Log.e("1gravity", " TYPE: " + TYPE); Log.e("1gravity", " STREET: " + STREET); Log.e("1gravity", " POBOX: " + POBOX); Log.e("1gravity", " CITY: " + CITY); Log.e("1gravity", " POSTCODE: " + POSTCODE); Log.e("1gravity", " REGION: " + REGION); Log.e("1gravity", " COUNTRY: " + COUNTRY); } else if (mime.equals(ContactsContract.CommonDataKinds.StructuredName.CONTENT_ITEM_TYPE)) { String firstName = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME)); String lastName = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME)); if (firstName != null) contact.setFirstName(firstName); if (lastName != null) contact.setLastName(lastName); Log.e("1gravity", " first name: " + firstName); Log.e("1gravity", " last name: " + lastName); } else if (mime.equals(ContactsContract.CommonDataKinds.GroupMembership.CONTENT_ITEM_TYPE)) { int groupId = cursor.getInt(cursor.getColumnIndex(ContactsContract.CommonDataKinds.GroupMembership.GROUP_ROW_ID)); Log.e("1gravity", " groupId: " + groupId); contact.addGroupId(groupId); } } // ****************************************** Group Methods ******************************************* /* * List of all groups. */ private List<GroupImpl> mGroups = new ArrayList<>(); /* * Map of all groups by id (ContactsContract.Groups._ID). * We use this to find the group when joining contacts and groups. */ private Map<Long, GroupImpl> mGroupsById = new HashMap<>(); /* * List of all visible groups. * Only groups with contacts will be shown / visible. */ private List<GroupImpl> mVisibleGroups = new ArrayList<>(); private void readGroups(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* GROUPS *"); Log.e("1gravity", "***************************************************************"); mGroups.clear(); mGroupsById.clear(); mVisibleGroups.clear(); if (cursor.moveToFirst()) { cursor.moveToPrevious(); while (cursor.moveToNext()) { GroupImpl group = GroupImpl.fromCursor(cursor); mGroups.add(group); mGroupsById.put(group.getId(), group); boolean isChecked = mSelectedGroupIds.contains( group.getId() ); group.setChecked(isChecked, true); group.addOnContactCheckedListener(mGroupListener); Log.e("1gravity", "group " + group.getId() + ": " + group.getDisplayName()); String SOURCE_ID = cursor.getString(cursor.getColumnIndex(ContactsContract.Groups.SOURCE_ID)); Log.e("1gravity", "SOURCE_ID: " + SOURCE_ID); } } GroupsLoaded.post(mVisibleGroups); joinContactsAndGroups(mContacts); } // ****************************************** Process Contacts / Groups ******************************************* /** * Join contacts and groups. * This can happen once the contact details and the groups have loaded. */ private synchronized void joinContactsAndGroups(List<? extends Contact> contacts) { if (contacts == null || contacts.isEmpty()) return; if (mGroupsById == null || mGroupsById.isEmpty()) return; // map contacts to groups for (Contact contact : contacts) { for (Long groupId : contact.getGroupIds()) { GroupImpl group = mGroupsById.get(groupId); if (group != null) { if (! group.hasContacts()) { mVisibleGroups.add(group); } group.addContact(contact); } } } Collections.sort(mVisibleGroups, new Comparator<GroupImpl>() { @Override public int compare(GroupImpl lhs, GroupImpl rhs) { return lhs.getDisplayName().compareTo(rhs.getDisplayName()); } }); GroupsLoaded.post(mVisibleGroups); } /** * Listening to onContactChecked for contacts because we need to update the title to reflect * the number of selected contacts and we also want to un-check groups if none of their contacts * are checked any more. */ private OnContactCheckedListener<Contact> mContactListener = new OnContactCheckedListener<Contact>() { @Override public void onContactChecked(Contact contact, boolean wasChecked, boolean isChecked) { if (wasChecked != isChecked) { mNrOfSelectedContacts += isChecked ? 1 : -1; mNrOfSelectedContacts = Math.min(mContacts.size(), Math.max(0, mNrOfSelectedContacts)); updateTitle(); if (! isChecked) { processGroupSelection(); } } } }; /** * Check/un-check a group's contacts if the user checks/un-checks a group. */ private OnContactCheckedListener<Group> mGroupListener = new OnContactCheckedListener<Group>() { @Override public void onContactChecked(Group group, boolean wasChecked, boolean isChecked) { // check/un-check the group's contacts processContactSelection(group, isChecked); // check if we need to deselect some groups processGroupSelection(); } }; @Subscribe(threadMode = ThreadMode.MAIN) public void onEventMainThread(ContactSelectionChanged event) { // all has changed -> calculate the number of selected contacts and update the title calcNrOfSelectedContacts(); // check if we need to deselect some groups processGroupSelection(); } /** * Check/un-check contacts for a group that has been selected/deselected. * Call this when a group has been selected/deselected or after a ContactSelectionChanged event. */ private void processContactSelection(Group group, boolean isChecked) { if (group == null || mContacts == null) return; // check/un-check contacts boolean hasChanged = false; for (Contact contact : group.getContacts()) { if (contact.isChecked() != isChecked) { contact.setChecked(isChecked, true); hasChanged = true; } } if (hasChanged) { ContactsLoaded.post(mContacts); calcNrOfSelectedContacts(); } } /** * Calculate the number or selected contacts. * Call this when a group has been selected/deselected or after a ContactSelectionChanged event. */ private void calcNrOfSelectedContacts() { if (mContacts == null) return; mNrOfSelectedContacts = 0; for (Contact contact : mContacts) { if (contact.isChecked()) { mNrOfSelectedContacts++; } } updateTitle(); } /** * Check if a group needs to be deselected because none of its contacts is selected. * Call this when a contact or group has been deselected or after a ContactSelectionChanged event. */ private void processGroupSelection() { if (mGroups == null) return; boolean hasChanged = false; for (Group theGroup : mGroups) { if (deselectGroup(theGroup)) { hasChanged = true; } } if (hasChanged) { GroupsLoaded.post(mVisibleGroups); } } private boolean deselectGroup(Group group) { if (group == null) return false; // check if the group's contacts are all deselected boolean isSelected = false; for (Contact groupContact : group.getContacts()) { if (groupContact.isChecked()) { isSelected = true; break; } } if (! isSelected && group.isChecked()) { // no contact selected group.setChecked(false, true); return true; } return false; } }
library/src/main/java/com/onegravity/contactpicker/implementation/ContactPickerActivity.java
/* * Copyright (C) 2015-2016 Emanuel Moecklin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.onegravity.contactpicker.implementation; import android.Manifest; import android.app.Activity; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.ContactsContract; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentStatePagerAdapter; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import com.onegravity.contactpicker.OnContactCheckedListener; import com.onegravity.contactpicker.R; import com.onegravity.contactpicker.contact.Contact; import com.onegravity.contactpicker.contact.ContactDescription; import com.onegravity.contactpicker.contact.ContactFragment; import com.onegravity.contactpicker.contact.ContactsLoaded; import com.onegravity.contactpicker.group.Group; import com.onegravity.contactpicker.group.GroupFragment; import com.onegravity.contactpicker.group.GroupsLoaded; import com.onegravity.contactpicker.picture.ContactPictureType; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; public class ContactPickerActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor> { /** * Use this parameter to determine whether the contact picture shows a contact badge and if yes * what type (round, square) * * {@link com.onegravity.contactpicker.picture.ContactPictureType} */ public static final String EXTRA_CONTACT_BADGE_TYPE = "EXTRA_CONTACT_BADGE_TYPE"; /** * Use this to define what contact information is used for the description field (second line) * * {@link com.onegravity.contactpicker.contact.ContactDescription} */ public static final String EXTRA_CONTACT_DESCRIPTION = "EXTRA_CONTACT_DESCRIPTION"; /** * We put the resulting contact list into the Intent as extra data with this key. */ public static final String RESULT_CONTACT_DATA = "RESULT_CONTACT_DATA"; private static ContactPictureType sBadgeType = ContactPictureType.ROUND; public static ContactPictureType getContactBadgeType() { return sBadgeType; } private static ContactDescription sDescription = ContactDescription.EMAIL; public static ContactDescription getContactDescription() { return sDescription; } private PagerAdapter mAdapter; private String mDefaultTitle; // update the adapter after a certain amount of contacts has loaded private static final int BATCH_SIZE = 25; /* * The selected ids are saved in onSaveInstanceState, restored in onCreate and then applied to * the contacts and groups in onLoadFinished. */ private static final String CONTACT_IDS = "CONTACT_IDS"; private HashSet<Long> mSelectedContactIds = new HashSet<>(); private static final String GROUP_IDS = "GROUP_IDS"; private HashSet<Long> mSelectedGroupIds = new HashSet<>(); // ****************************************** Lifecycle Methods ******************************************* @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // check if we have the READ_CONTACTS permission, if not --> terminate try { int pid = android.os.Process.myPid(); PackageManager pckMgr = getPackageManager(); int uid = pckMgr.getApplicationInfo(getComponentName().getPackageName(), PackageManager.GET_META_DATA).uid; enforcePermission(Manifest.permission.READ_CONTACTS, pid, uid, "Contact permission hasn't been granted to this app, terminating."); } catch (PackageManager.NameNotFoundException | SecurityException e) { Log.e(getClass().getSimpleName(), e.getMessage()); finish(); return; } // retrieve default title which is used if no contacts are selected if (savedInstanceState == null) { try { PackageManager pkMgr = getPackageManager(); ActivityInfo activityInfo = pkMgr.getActivityInfo(getComponentName(), PackageManager.GET_META_DATA); mDefaultTitle = activityInfo.loadLabel(pkMgr).toString(); } catch (PackageManager.NameNotFoundException ignore) { mDefaultTitle = getTitle().toString(); } } else { mDefaultTitle = savedInstanceState.getString("mDefaultTitle"); try { mSelectedContactIds = (HashSet<Long>) savedInstanceState.getSerializable(CONTACT_IDS); } catch (ClassCastException ignore) {} try { mSelectedGroupIds = (HashSet<Long>) savedInstanceState.getSerializable(GROUP_IDS); } catch (ClassCastException ignore) {} } // read Activity parameter ContactPictureType sBadgeType = ContactPictureType.ROUND; Intent intent = getIntent(); String tmp = intent.getStringExtra(EXTRA_CONTACT_BADGE_TYPE); if (tmp != null) { try { sBadgeType = ContactPictureType.valueOf(tmp); } catch (IllegalArgumentException e) { Log.e(getClass().getSimpleName(), tmp + " is not a legal EXTRA_CONTACT_BADGE_TYPE value, defaulting to ROUND"); } } // read Activity parameter ContactDescription sDescription = ContactDescription.EMAIL; tmp = intent.getStringExtra(EXTRA_CONTACT_DESCRIPTION); if (tmp != null) { try { sDescription = ContactDescription.valueOf(tmp); } catch (IllegalArgumentException e) { Log.e(getClass().getSimpleName(), tmp + " is not a legal EXTRA_CONTACT_DESCRIPTION value, defaulting to EMAIL"); } } setContentView(R.layout.contact_tab_layout); // initialize TabLayout TabLayout tabLayout = (TabLayout)findViewById(R.id.tabContent); tabLayout.setTabMode(TabLayout.MODE_FIXED); tabLayout.setTabGravity(TabLayout.GRAVITY_FILL); TabLayout.Tab tabContacts = tabLayout.newTab(); tabContacts.setText(R.string.contact_tab_title); tabLayout.addTab(tabContacts); TabLayout.Tab tabGroups = tabLayout.newTab(); tabGroups.setText(R.string.group_tab_title); tabLayout.addTab(tabGroups); // initialize ViewPager final ViewPager viewPager = (ViewPager) findViewById(R.id.tabPager); mAdapter = new PagerAdapter(getSupportFragmentManager(), tabLayout.getTabCount()); viewPager.setAdapter(mAdapter); viewPager.addOnPageChangeListener(new TabLayout.TabLayoutOnPageChangeListener(tabLayout)); tabLayout.setOnTabSelectedListener(new TabLayout.OnTabSelectedListener() { @Override public void onTabSelected(TabLayout.Tab tab) { viewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(TabLayout.Tab tab) {} @Override public void onTabReselected(TabLayout.Tab tab) {} }); } private static class PagerAdapter extends FragmentStatePagerAdapter { private int mNumOfTabs; private ContactFragment mContactFragment; private GroupFragment mGroupFragment; public PagerAdapter(FragmentManager fm, int numOfTabs) { super(fm); mNumOfTabs = numOfTabs; } @Override public Fragment getItem(int position) { switch (position) { case 0: mContactFragment = ContactFragment.newInstance(); return mContactFragment; case 1: mGroupFragment = GroupFragment.newInstance(); return mGroupFragment; default: return null; } } @Override public int getCount() { return mNumOfTabs; } } @Override protected void onResume() { super.onResume(); EventBus.getDefault().register(this); getSupportLoaderManager().initLoader(CONTACTS_LOADER_ID, null, this); getSupportLoaderManager().initLoader(GROUPS_LOADER_ID, null, this); } @Override protected void onPause() { super.onPause(); EventBus.getDefault().unregister(this); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString("mDefaultTitle", mDefaultTitle); mSelectedContactIds.clear();; for (Contact contact : mContacts) { if (contact.isChecked()) { mSelectedContactIds.add( contact.getId() ); } } outState.putSerializable(CONTACT_IDS, mSelectedContactIds); mSelectedGroupIds.clear();; for (Group group : mGroups) { if (group.isChecked()) { mSelectedGroupIds.add( group.getId() ); } } outState.putSerializable(GROUP_IDS, mSelectedGroupIds); } private void updateTitle() { if (mNrOfSelectedContacts == 0) { setTitle(mDefaultTitle); } else { String title = getString(R.string.actionmode_selected, mNrOfSelectedContacts); setTitle(title); } } // ****************************************** Option Menu ******************************************* @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.contact_picker, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if( id == android.R.id.home) { setResult(Activity.RESULT_CANCELED, null); finish(); return true; } else if( id == R.id.action_done) { onDone(); return true; } return super.onOptionsItemSelected(item); } private void onDone() { // return only checked contacts List<Contact> contacts = new ArrayList<>(); if (mContacts != null) { for (Contact contact : mContacts) { if (contact.isChecked()) { contacts.add(contact); } } } Intent data = new Intent(); data.putExtra(RESULT_CONTACT_DATA, (Serializable) contacts); setResult(Activity.RESULT_OK, data); finish(); } // ****************************************** Loader Methods ******************************************* /* * Loader configuration contacts */ private static final int CONTACTS_LOADER_ID = 0; private static final Uri CONTACTS_URI = ContactsContract.Contacts.CONTENT_URI; private static final String[] CONTACTS_PROJECTION = new String[] { ContactsContract.Contacts._ID, ContactsContract.Contacts.LOOKUP_KEY, ContactsContract.Contacts.DISPLAY_NAME_PRIMARY, ContactsContract.Contacts.PHOTO_URI}; private static final String CONTACTS_SORT = ContactsContract.Contacts.DISPLAY_NAME_PRIMARY + " COLLATE LOCALIZED ASC"; /* * Loader configuration contacts details */ private static final int CONTACT_DETAILS_LOADER_ID = 1; private static final Uri CONTACT_DETAILS_URI = ContactsContract.Data.CONTENT_URI; private static final String[] CONTACT_DETAILS_PROJECTION = { ContactsContract.Data.LOOKUP_KEY, ContactsContract.Contacts.DISPLAY_NAME_PRIMARY, ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS, ContactsContract.CommonDataKinds.StructuredPostal.TYPE, ContactsContract.CommonDataKinds.StructuredPostal.STREET, ContactsContract.CommonDataKinds.StructuredPostal.POBOX, ContactsContract.CommonDataKinds.StructuredPostal.CITY, ContactsContract.CommonDataKinds.StructuredPostal.REGION, ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE, ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY, ContactsContract.CommonDataKinds.Phone.NUMBER, ContactsContract.CommonDataKinds.Phone.TYPE, ContactsContract.CommonDataKinds.Email.ADDRESS, ContactsContract.CommonDataKinds.Email.TYPE, ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME, ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME, ContactsContract.CommonDataKinds.GroupMembership.GROUP_ROW_ID, }; /* * Loader configuration groups */ private static final int GROUPS_LOADER_ID = 2; private static final Uri GROUPS_URI = ContactsContract.Groups.CONTENT_URI; private static final String[] GROUPS_PROJECTION = new String[] { ContactsContract.Groups._ID, ContactsContract.Groups.SOURCE_ID, ContactsContract.Groups.TITLE}; private static final String GROUPS_SELECTION = ContactsContract.Groups.DELETED + " = 0"; private static final String GROUPS_SORT = ContactsContract.Groups.TITLE + " COLLATE LOCALIZED ASC"; @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { switch(id) { case CONTACTS_LOADER_ID: return new CursorLoader(this, CONTACTS_URI, CONTACTS_PROJECTION, null, null, CONTACTS_SORT); case CONTACT_DETAILS_LOADER_ID: return new CursorLoader(this, CONTACT_DETAILS_URI, CONTACT_DETAILS_PROJECTION, null, null, null); case GROUPS_LOADER_ID: return new CursorLoader(this, GROUPS_URI, GROUPS_PROJECTION, GROUPS_SELECTION, null, GROUPS_SORT); } return null; } @Override public void onLoaderReset(Loader<Cursor> loader) { ContactsLoaded.post(null); GroupsLoaded.post(null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { switch(loader.getId()) { case CONTACTS_LOADER_ID: readContacts(cursor); // contacts loaded --> load the contact details getSupportLoaderManager().initLoader(CONTACT_DETAILS_LOADER_ID, null, this); break; case CONTACT_DETAILS_LOADER_ID: readContactDetails(cursor); break; case GROUPS_LOADER_ID: { readGroups(cursor); break; } } } // ****************************************** Contact Methods ******************************************* /* * List of all contacts. */ private List<ContactImpl> mContacts = new ArrayList<>(); /* * Map of all contacts by lookup key (ContactsContract.Contacts.LOOKUP_KEY). * We use this to find the contacts when the contact details are loaded. */ private Map<String, ContactImpl> mContactsByLookupKey = new HashMap<>(); /* * Number of selected contacts. * Selected groups are reflected in this too. */ private int mNrOfSelectedContacts = 0; private void readContacts(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* CONTACTS *"); Log.e("1gravity", "***************************************************************"); mContacts.clear(); mContactsByLookupKey.clear(); mNrOfSelectedContacts = 0; if (cursor.moveToFirst()) { cursor.moveToPrevious(); int count = 0; while (cursor.moveToNext()) { ContactImpl contact = ContactImpl.fromCursor(cursor); mContacts.add(contact); // LOOKUP_KEY is the one we use to retrieve the contact when the contact details are loaded String lookupKey = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.LOOKUP_KEY)); mContactsByLookupKey.put(lookupKey, contact); boolean isChecked = mSelectedContactIds.contains( contact.getId() ); contact.setChecked(isChecked, true); mNrOfSelectedContacts += isChecked ? 1 : 0; contact.addOnContactCheckedListener(mContactListener); Log.e("1gravity", "lookupKey: " + lookupKey); Log.e("1gravity", "id: " + contact.getId()); Log.e("1gravity", "displayName: " + contact.getDisplayName()); Log.e("1gravity", "first name: " + contact.getFirstName()); Log.e("1gravity", "last name: " + contact.getLastName()); Log.e("1gravity", "photoUri: " + contact.getPhotoUri()); // update the ui once some contacts have loaded if (++count >= BATCH_SIZE) { ContactsLoaded.post(mContacts); count = 0; } } } updateTitle(); ContactsLoaded.post(mContacts); } private void readContactDetails(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* CONTACTS DETAILS *"); Log.e("1gravity", "***************************************************************"); if (cursor != null && cursor.moveToFirst()) { cursor.moveToPrevious(); while (cursor.moveToNext()) { String lookupKey = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.LOOKUP_KEY)); ContactImpl contact = mContactsByLookupKey.get(lookupKey); if (contact != null) { readContactDetails(cursor, contact); } } } ContactsLoaded.post(mContacts); joinContactsAndGroups(mContacts); } private void readContactDetails(Cursor cursor, ContactImpl contact) { String mime = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); if (mime.equals(ContactsContract.CommonDataKinds.Email.CONTENT_ITEM_TYPE)) { String email = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.ADDRESS)); String type = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.TYPE)); if (email != null) contact.setEmail(email); Log.e("1gravity", " email: " + email); Log.e("1gravity", " type: " + type); } else if (mime.equals(ContactsContract.CommonDataKinds.Phone.CONTENT_ITEM_TYPE)) { String phone = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER)); String type = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.TYPE)); contact.setPhone(phone); Log.e("1gravity", " phone: " + phone); Log.e("1gravity", " type: " + type); } else if (mime.equals(ContactsContract.CommonDataKinds.StructuredPostal.CONTENT_ITEM_TYPE)) { String FORMATTED_ADDRESS = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS)); String TYPE = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.TYPE)); String STREET = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.STREET)); String POBOX = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.POBOX)); String CITY = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.CITY)); String REGION = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.REGION)); String POSTCODE = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.POSTCODE)); String COUNTRY = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.COUNTRY)); contact.setAddress(FORMATTED_ADDRESS.replaceAll("\\n", ", ")); Log.e("1gravity", " FORMATTED_ADDRESS: " + FORMATTED_ADDRESS); Log.e("1gravity", " TYPE: " + TYPE); Log.e("1gravity", " STREET: " + STREET); Log.e("1gravity", " POBOX: " + POBOX); Log.e("1gravity", " CITY: " + CITY); Log.e("1gravity", " POSTCODE: " + POSTCODE); Log.e("1gravity", " REGION: " + REGION); Log.e("1gravity", " COUNTRY: " + COUNTRY); } else if (mime.equals(ContactsContract.CommonDataKinds.StructuredName.CONTENT_ITEM_TYPE)) { String firstName = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME)); String lastName = cursor.getString(cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.FAMILY_NAME)); if (firstName != null) contact.setFirstName(firstName); if (lastName != null) contact.setLastName(lastName); Log.e("1gravity", " first name: " + firstName); Log.e("1gravity", " last name: " + lastName); } else if (mime.equals(ContactsContract.CommonDataKinds.GroupMembership.CONTENT_ITEM_TYPE)) { int groupId = cursor.getInt(cursor.getColumnIndex(ContactsContract.CommonDataKinds.GroupMembership.GROUP_ROW_ID)); Log.e("1gravity", " groupId: " + groupId); contact.addGroupId(groupId); } } // ****************************************** Group Methods ******************************************* /* * List of all groups. */ private List<GroupImpl> mGroups = new ArrayList<>(); /* * Map of all groups by id (ContactsContract.Groups._ID). * We use this to find the group when joining contacts and groups. */ private Map<Long, GroupImpl> mGroupsById = new HashMap<>(); /* * List of all visible groups. * Only groups with contacts will be shown / visible. */ private List<GroupImpl> mVisibleGroups = new ArrayList<>(); private void readGroups(Cursor cursor) { Log.e("1gravity", "***************************************************************"); Log.e("1gravity", "* GROUPS *"); Log.e("1gravity", "***************************************************************"); mGroups.clear(); mGroupsById.clear(); mVisibleGroups.clear(); if (cursor.moveToFirst()) { cursor.moveToPrevious(); while (cursor.moveToNext()) { GroupImpl group = GroupImpl.fromCursor(cursor); mGroups.add(group); mGroupsById.put(group.getId(), group); boolean isChecked = mSelectedGroupIds.contains( group.getId() ); group.setChecked(isChecked, true); group.addOnContactCheckedListener(mGroupListener); Log.e("1gravity", "group " + group.getId() + ": " + group.getDisplayName()); String SOURCE_ID = cursor.getString(cursor.getColumnIndex(ContactsContract.Groups.SOURCE_ID)); Log.e("1gravity", "SOURCE_ID: " + SOURCE_ID); } } GroupsLoaded.post(mVisibleGroups); joinContactsAndGroups(mContacts); } // ****************************************** Process Contacts / Groups ******************************************* /** * Join contacts and groups. * This can happen once the contact details and the groups have loaded. */ private synchronized void joinContactsAndGroups(List<? extends Contact> contacts) { if (contacts == null || contacts.isEmpty()) return; if (mGroupsById == null || mGroupsById.isEmpty()) return; // map contacts to groups for (Contact contact : contacts) { for (Long groupId : contact.getGroupIds()) { GroupImpl group = mGroupsById.get(groupId); if (group != null) { if (! group.hasContacts()) { mVisibleGroups.add(group); } group.addContact(contact); } } } GroupsLoaded.post(mVisibleGroups); } /** * Listening to onContactChecked for contacts because we need to update the title to reflect * the number of selected contacts and we also want to un-check groups if none of their contacts * are checked any more. */ private OnContactCheckedListener<Contact> mContactListener = new OnContactCheckedListener<Contact>() { @Override public void onContactChecked(Contact contact, boolean wasChecked, boolean isChecked) { if (wasChecked != isChecked) { mNrOfSelectedContacts += isChecked ? 1 : -1; mNrOfSelectedContacts = Math.min(mContacts.size(), Math.max(0, mNrOfSelectedContacts)); updateTitle(); if (! isChecked) { processGroupSelection(); } } } }; /** * Check/un-check a group's contacts if the user checks/un-checks a group. */ private OnContactCheckedListener<Group> mGroupListener = new OnContactCheckedListener<Group>() { @Override public void onContactChecked(Group group, boolean wasChecked, boolean isChecked) { // check/un-check the group's contacts processContactSelection(group, isChecked); // check if we need to deselect some groups processGroupSelection(); } }; @Subscribe(threadMode = ThreadMode.MAIN) public void onEventMainThread(ContactSelectionChanged event) { // all has changed -> calculate the number of selected contacts and update the title calcNrOfSelectedContacts(); // check if we need to deselect some groups processGroupSelection(); } /** * Check/un-check contacts for a group that has been selected/deselected. * Call this when a group has been selected/deselected or after a ContactSelectionChanged event. */ private void processContactSelection(Group group, boolean isChecked) { if (group == null || mContacts == null) return; // check/un-check contacts boolean hasChanged = false; for (Contact contact : group.getContacts()) { if (contact.isChecked() != isChecked) { contact.setChecked(isChecked, true); hasChanged = true; } } if (hasChanged) { ContactsLoaded.post(mContacts); calcNrOfSelectedContacts(); } } /** * Calculate the number or selected contacts. * Call this when a group has been selected/deselected or after a ContactSelectionChanged event. */ private void calcNrOfSelectedContacts() { if (mContacts == null) return; mNrOfSelectedContacts = 0; for (Contact contact : mContacts) { if (contact.isChecked()) { mNrOfSelectedContacts++; } } updateTitle(); } /** * Check if a group needs to be deselected because none of its contacts is selected. * Call this when a contact or group has been deselected or after a ContactSelectionChanged event. */ private void processGroupSelection() { if (mGroups == null) return; boolean hasChanged = false; for (Group theGroup : mGroups) { if (deselectGroup(theGroup)) { hasChanged = true; } } if (hasChanged) { GroupsLoaded.post(mVisibleGroups); } } private boolean deselectGroup(Group group) { if (group == null) return false; // check if the group's contacts are all deselected boolean isSelected = false; for (Contact groupContact : group.getContacts()) { if (groupContact.isChecked()) { isSelected = true; break; } } if (! isSelected && group.isChecked()) { // no contact selected group.setChecked(false, true); return true; } return false; } }
the join of contacts with groups destroyed the group's sorting order
library/src/main/java/com/onegravity/contactpicker/implementation/ContactPickerActivity.java
the join of contacts with groups destroyed the group's sorting order
<ide><path>ibrary/src/main/java/com/onegravity/contactpicker/implementation/ContactPickerActivity.java <ide> <ide> import java.io.Serializable; <ide> import java.util.ArrayList; <add>import java.util.Collections; <add>import java.util.Comparator; <ide> import java.util.HashMap; <ide> import java.util.HashSet; <ide> import java.util.List; <ide> ContactsContract.Groups.SOURCE_ID, <ide> ContactsContract.Groups.TITLE}; <ide> private static final String GROUPS_SELECTION = ContactsContract.Groups.DELETED + " = 0"; <del> <ide> private static final String GROUPS_SORT = ContactsContract.Groups.TITLE + " COLLATE LOCALIZED ASC"; <ide> <ide> @Override <ide> } <ide> } <ide> <add> Collections.sort(mVisibleGroups, new Comparator<GroupImpl>() { <add> @Override <add> public int compare(GroupImpl lhs, GroupImpl rhs) { <add> return lhs.getDisplayName().compareTo(rhs.getDisplayName()); <add> } <add> }); <add> <ide> GroupsLoaded.post(mVisibleGroups); <ide> } <ide>
JavaScript
mit
bc703e5a62a504f31eb188fed86f31655b0cca23
0
soslan/skjs,soslan/skjs,soslan/skjs
var skui = {} sk.ui = skui; skui.Component = function(){ this.element = sk.div(); this.container = this.element; return this; } skui.component = function(){ return new skui.Component(); } skui.section = function(cArgs){ var container = {}; container.container = sk.c('div', cArgs); container.element = container.container; container.container.classList.add('section'); container.header = sk.c('div', { cls: 'header', parent: container.container, }); container.body = sk.c('div', { parent: container.container, cls: 'body', }); container.footer = sk.c('div', { parent: container.container, cls: 'footer', }); return container; } skui.window = function(){ var args = sk.args(arguments, 'str title, num? level, args?'); var wind = {}; wind.container = sk.div(args); wind.container.classList.add('skui-wind'); wind.element = wind.container; wind.container.classList.add('window'); wind.header = sk.c('div', { cls: 'header skui-wind-header', parent: wind.container, }); wind.actions = sk.c('div', wind.header, 'skui-wind-actions'); wind.title = sk.c('h' + (args.level || 2), wind.header, 'skui-wind-title title', {content: args.title || 'Window'}); wind.toolbar = sk.c('div', wind.element, 'toolbar skui-wind-toolbar'); wind.body = sk.c('div', wind.element, 'body skui-wind-body'); wind.footer = sk.c('div', wind.element, 'footer skui-wind-footer'); return wind; } // skui.labeled = function() // Scene manager skui.scenes = function(cArgs){ var self = {}; self.container = sk.c(cArgs); self.nextId = 1; self.scenes = {}; self.activeScenes = []; self.add = function(args){ self.container.appendChild(args.container); self.scenes[self.nextId] = args; args.container.dataset.sceneId = self.nextId; self.nextId += 1; } self.activate = function(scene){ if(scene.parentNode === self.container){ scene.classList.add('active'); self.activeScenes.push(scene); var onactivated = self.scenes[scene.dataset.sceneId].onactivated; if(typeof onactivated === 'function'){ onactivated.call(); } } } self.deactivate = function(scene){ scene.classList.remove('active'); } self.deactivateAll = function(){ for(var i in self.activeScenes){ self.deactivate(self.activeScenes[i]) } self.activeScenes = []; } self.reset = function(scene){ self.deactivateAll(); self.activate(scene); } return self; } sk.ui.field = function(args){ var field = {}; field.element = sk.div(args); field.element.classList.add('skui-field'); field.header = sk.div({ parent: field.element, cls: 'skui-field-header', }); field.body = sk.div({ parent: field.element, cls: 'skui-field-body', }); field.label = sk.html({ tag: 'label', parent: field.header, cls: 'skui-field-label', text: args.label, }); return field; } skui.controlContainer = function(args, label){ var out = { container: sk.c('div', args), }; out.container.classList.add('control-container'); out.container.classList.add('form-group'); out.header = sk.c('div', { parent: out.container, cls: 'header', }) out.label = sk.c('label', { parent: out.header, cls: 'control-label', content: label, }) out.body = sk.c('div', { parent: out.container, cls: 'body', }) return out; } skui.textControl = function(args, label, name, value){ var control = skui.controlContainer(args, label); control.input = sk.c('input', { parent: control.body, content: value, id: true, attr: { name: name, type: 'text', }, cls: 'control text-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.textareaControl = function(args, label, name, value){ var control = skui.controlContainer(args, label); control.input = sk.c('textarea', { parent: control.body, content: value, id: true, attr: { name: name, }, cls: 'control textarea-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.numberControl = function(){ var args = sk.args(arguments, 'obj? contArgs, str label, str name, args?', { label: 'Number', }); var control = skui.controlContainer(args.contArgs, args.label); control.input = sk.c('input', { parent: control.body, id: true, attr: { name: args.name, type: 'number', }, cls: 'control number-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.fileControl = function(){ var args = sk.args(arguments, 'obj? contArgs, str label, str name, args?', { label: 'File', }); var control = skui.controlContainer(args.contArgs, args.label); control.input = sk.c('input', { parent: control.body, id: true, attr: { name: args.name, type: 'file', }, cls: 'control file-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.menu = function(){ var menu = {}; menu.element = sk.c('ul',{ cls: 'menu-list' }); menu.container = menu.element; menu.activator = sk.activator({ activation: function(prev, next){ next.element.classList.add('active'); }, deactivation: function(prev){ prev.element.classList.remove('active'); } }); menu.item = function(content, action){ var item = { action: action, menu: menu, }; item.element = sk.c('li', menu.element, { action: function(){ item.activate(); } }); item.element.appendChild(content); item.activate = function(){ if(typeof item.action === 'function'){ item.action(); } } } return menu; } sk.ui.admin = function(args){ var admin = {}; admin.element = sk.div(args); admin.element.classList.add('skui-admin'); admin.header = sk.div({ parent: admin.element, cls: 'skui-admin-header' }); admin.body = sk.div({ parent: admin.element, cls: 'skui-admin-body', }) admin.navigation = sk.div({ parent: admin.body, cls: 'skui-admin-nav', }); admin.main = sk.ui.scenes({ parent: admin.body, cls: 'skui-admin-main', }); admin.footer = sk.div({ parent: admin.element, cls: 'skui-admin-footer' }); admin.menu = sk.ui.scenes({ parent: admin.navigation, }); admin.brand = sk.span({ parent: admin.header, cls: 'skui-admin-brand', text: args.brandName || '', }); return admin; }
src/ui.js
var skui = {} sk.ui = skui; skui.Component = function(){ this.element = sk.div(); this.container = this.element; return this; } skui.component = function(){ return new skui.Component(); } skui.section = function(cArgs){ var container = {}; container.container = sk.c('div', cArgs); container.element = container.container; container.container.classList.add('section'); container.header = sk.c('div', { cls: 'header', parent: container.container, }); container.body = sk.c('div', { parent: container.container, cls: 'body', }); container.footer = sk.c('div', { parent: container.container, cls: 'footer', }); return container; } skui.window = function(){ var args = sk.args(arguments, 'str title, num? level, args?'); var wind = {}; wind.container = sk.div(args); wind.container.classList.add('skui-wind'); wind.element = wind.container; wind.container.classList.add('window'); wind.header = sk.c('div', { cls: 'header skui-wind-header', parent: wind.container, }); wind.actions = sk.c('div', wind.header, 'skui-wind-actions'); wind.title = sk.c('h' + (args.level || 2), wind.header, 'skui-wind-title title', {content: args.title || 'Window'}); wind.toolbar = sk.c('div', wind.element, 'toolbar skui-wind-toolbar'); wind.body = sk.c('div', wind.element, 'body skui-wind-body'); wind.footer = sk.c('div', wind.element, 'footer skui-wind-footer'); return wind; } // skui.labeled = function() // Scene manager skui.scenes = function(cArgs){ var self = {}; self.container = sk.c(cArgs); self.nextId = 1; self.scenes = {}; self.activeScenes = []; self.add = function(args){ self.container.appendChild(args.container); self.scenes[self.nextId] = args; args.container.dataset.sceneId = self.nextId; self.nextId += 1; } self.activate = function(scene){ if(scene.parentNode === self.container){ scene.classList.add('active'); self.activeScenes.push(scene); var onactivated = self.scenes[scene.dataset.sceneId].onactivated; if(typeof onactivated === 'function'){ onactivated.call(); } } } self.deactivate = function(scene){ scene.classList.remove('active'); } self.deactivateAll = function(){ for(var i in self.activeScenes){ self.deactivate(self.activeScenes[i]) } self.activeScenes = []; } self.reset = function(scene){ self.deactivateAll(); self.activate(scene); } return self; } skui.controlContainer = function(args, label){ var out = { container: sk.c('div', args), }; out.container.classList.add('control-container'); out.container.classList.add('form-group'); out.header = sk.c('div', { parent: out.container, cls: 'header', }) out.label = sk.c('label', { parent: out.header, cls: 'control-label', content: label, }) out.body = sk.c('div', { parent: out.container, cls: 'body', }) return out; } skui.textControl = function(args, label, name, value){ var control = skui.controlContainer(args, label); control.input = sk.c('input', { parent: control.body, content: value, id: true, attr: { name: name, type: 'text', }, cls: 'control text-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.textareaControl = function(args, label, name, value){ var control = skui.controlContainer(args, label); control.input = sk.c('textarea', { parent: control.body, content: value, id: true, attr: { name: name, }, cls: 'control textarea-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.numberControl = function(){ var args = sk.args(arguments, 'obj? contArgs, str label, str name, args?', { label: 'Number', }); var control = skui.controlContainer(args.contArgs, args.label); control.input = sk.c('input', { parent: control.body, id: true, attr: { name: args.name, type: 'number', }, cls: 'control number-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.fileControl = function(){ var args = sk.args(arguments, 'obj? contArgs, str label, str name, args?', { label: 'File', }); var control = skui.controlContainer(args.contArgs, args.label); control.input = sk.c('input', { parent: control.body, id: true, attr: { name: args.name, type: 'file', }, cls: 'control file-control form-control', }); control.label.htmlFor = control.input.id; return control; } skui.menu = function(){ var menu = {}; menu.element = sk.c('ul',{ cls: 'menu-list' }); menu.container = menu.element; menu.activator = sk.activator({ activation: function(prev, next){ next.element.classList.add('active'); }, deactivation: function(prev){ prev.element.classList.remove('active'); } }); menu.item = function(content, action){ var item = { action: action, menu: menu, }; item.element = sk.c('li', menu.element, { action: function(){ item.activate(); } }); item.element.appendChild(content); item.activate = function(){ if(typeof item.action === 'function'){ item.action(); } } } return menu; } sk.ui.admin = function(args){ var admin = {}; admin.element = sk.div(args); admin.element.classList.add('skui-admin'); admin.header = sk.div({ parent: admin.element, cls: 'skui-admin-header' }); admin.body = sk.div({ parent: admin.element, cls: 'skui-admin-body', }) admin.navigation = sk.div({ parent: admin.body, cls: 'skui-admin-nav', }); admin.main = sk.ui.scenes({ parent: admin.body, cls: 'skui-admin-main', }); admin.footer = sk.div({ parent: admin.element, cls: 'skui-admin-footer' }); admin.menu = sk.ui.scenes({ parent: admin.navigation, }); admin.brand = sk.span({ parent: admin.header, cls: 'skui-admin-brand', text: args.brandName || '', }); return admin; }
Add sk.ui.field()
src/ui.js
Add sk.ui.field()
<ide><path>rc/ui.js <ide> self.activate(scene); <ide> } <ide> return self; <add>} <add> <add>sk.ui.field = function(args){ <add> var field = {}; <add> field.element = sk.div(args); <add> field.element.classList.add('skui-field'); <add> <add> field.header = sk.div({ <add> parent: field.element, <add> cls: 'skui-field-header', <add> }); <add> <add> field.body = sk.div({ <add> parent: field.element, <add> cls: 'skui-field-body', <add> }); <add> <add> field.label = sk.html({ <add> tag: 'label', <add> parent: field.header, <add> cls: 'skui-field-label', <add> text: args.label, <add> }); <add> return field; <ide> } <ide> <ide> skui.controlContainer = function(args, label){
Java
mit
2863ad0b96a129e217d7c55db8f8a3610695d5cf
0
faribas/RMG-Java,ReactionMechanismGenerator/RMG-Java,ReactionMechanismGenerator/RMG-Java,nyee/RMG-Java,keceli/RMG-Java,faribas/RMG-Java,nyee/RMG-Java,keceli/RMG-Java,keceli/RMG-Java,ReactionMechanismGenerator/RMG-Java,keceli/RMG-Java,jwallen/RMG-Java,nyee/RMG-Java,enochd/RMG-Java,rwest/RMG-Java,enochd/RMG-Java,ReactionMechanismGenerator/RMG-Java,KEHANG/RMG-Java,KEHANG/RMG-Java,enochd/RMG-Java,faribas/RMG-Java,rwest/RMG-Java,nyee/RMG-Java,rwest/RMG-Java,connie/RMG-Java,nyee/RMG-Java,keceli/RMG-Java,connie/RMG-Java,KEHANG/RMG-Java,enochd/RMG-Java,connie/RMG-Java,nyee/RMG-Java,jwallen/RMG-Java,KEHANG/RMG-Java,rwest/RMG-Java,enochd/RMG-Java,faribas/RMG-Java,jwallen/RMG-Java,jwallen/RMG-Java,enochd/RMG-Java,keceli/RMG-Java,jwallen/RMG-Java,ReactionMechanismGenerator/RMG-Java,KEHANG/RMG-Java,rwest/RMG-Java,jwallen/RMG-Java,KEHANG/RMG-Java,connie/RMG-Java,connie/RMG-Java
//////////////////////////////////////////////////////////////////////////////// // // RMG - Reaction Mechanism Generator // // Copyright (c) 2002-2009 Prof. William H. Green ([email protected]) and the // RMG Team ([email protected]) // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // //////////////////////////////////////////////////////////////////////////////// package jing.rxn; import java.io.*; import jing.chem.*; import java.util.*; import jing.param.*; import jing.mathTool.*; import jing.chemParser.*; import jing.chem.Species; import jing.param.Temperature; import jing.rxnSys.NegativeConcentrationException; import jing.rxnSys.ReactionModelGenerator; import jing.rxnSys.SystemSnapshot; //## package jing::rxn //---------------------------------------------------------------------------- //jing\rxn\Reaction.java //---------------------------------------------------------------------------- /** Immutable objects. */ //## class Reaction public class Reaction { protected static double TRIMOLECULAR_RATE_UPPER = 1.0E100; protected static double BIMOLECULAR_RATE_UPPER = 1.0E100; //## attribute BIMOLECULAR_RATE_UPPER protected static double UNIMOLECULAR_RATE_UPPER = 1.0E100; //## attribute UNIMOLECULAR_RATE_UPPER protected String comments = "No comment"; //## attribute comments protected Kinetics[] fittedReverseKinetics = null; //## attribute fittedReverseKinetics protected double rateConstant; protected Reaction reverseReaction = null; //## attribute reverseReaction protected Kinetics[] kinetics; protected Structure structure; protected double UpperBoundRate;//svp protected double LowerBoundRate;//svp //protected Kinetics additionalKinetics = null; //This is incase a reaction has two completely different transition states. protected boolean finalized = false; protected String ChemkinString = null; protected boolean ratesForKineticsAndAdditionalKineticsCross = false; //10/29/07 gmagoon: added variable to keep track of whether both rate constants are maximum for some temperature in the temperature range protected boolean kineticsFromPrimaryKineticLibrary = false; protected ReactionTemplate rxnTemplate; // Constructors //## operation Reaction() public Reaction() { //#[ operation Reaction() //#] } //## operation Reaction(Structure,RateConstant) private Reaction(Structure p_structure, Kinetics[] p_kinetics) { //#[ operation Reaction(Structure,RateConstant) structure = p_structure; kinetics = p_kinetics; //rateConstant = calculateTotalRate(Global.temperature); //#] } /*public Reaction(Reaction rxn) { structure = rxn.structure; kinetics = rxn.kinetics; comments = rxn.comments; fittedReverseKinetics = rxn.fittedReverseKinetics; rateConstant = rxn.rateConstant; reverseReaction = rxn.reverseReaction; UpperBoundRate = rxn.UpperBoundRate; LowerBoundRate = rxn.LowerBoundRate; additionalKinetics = rxn.additionalKinetics; finalized = rxn.finalized; ChemkinString = rxn.ChemkinString; ratesForKineticsAndAdditionalKineticsCross = rxn.ratesForKineticsAndAdditionalKineticsCross; }*/ //## operation allProductsIncluded(HashSet) public boolean allProductsIncluded(HashSet p_speciesSet) { //#[ operation allProductsIncluded(HashSet) Iterator iter = getProducts(); while (iter.hasNext()) { Species spe = ((Species)iter.next()); if (!p_speciesSet.contains(spe)) return false; } return true; //#] } //## operation allReactantsIncluded(HashSet) public boolean allReactantsIncluded(HashSet p_speciesSet) { //#[ operation allReactantsIncluded(HashSet) if (p_speciesSet == null) throw new NullPointerException(); Iterator iter = getReactants(); while (iter.hasNext()) { Species spe = ((Species)iter.next()); if (!p_speciesSet.contains(spe)) return false; } return true; //#] } /** Calculate this reaction's thermo parameter. Basically, make addition of the thermo parameters of all the reactants and products. */ //## operation calculateHrxn(Temperature) public double calculateHrxn(Temperature p_temperature) { //#[ operation calculateHrxn(Temperature) return structure.calculateHrxn(p_temperature); //#] } //## operation calculateKeq(Temperature) public double calculateKeq(Temperature p_temperature) { //#[ operation calculateKeq(Temperature) return structure.calculateKeq(p_temperature); //#] } //## operation calculateKeqUpperBound(Temperature) //svp public double calculateKeqUpperBound(Temperature p_temperature) { //#[ operation calculateKeqUpperBound(Temperature) return structure.calculateKeqUpperBound(p_temperature); //#] } //## operation calculateKeqLowerBound(Temperature) //svp public double calculateKeqLowerBound(Temperature p_temperature) { //#[ operation calculateKeqLowerBound(Temperature) return structure.calculateKeqLowerBound(p_temperature); //#] } public double calculateTotalRate(Temperature p_temperature){ double rate =0; Temperature stdtemp = new Temperature(298,"K"); double Hrxn = calculateHrxn(stdtemp); /* AJ 12JULY2010: * Added diffusive limits from previous RMG version by replacing function calculateTotalRate * Checks the exothermicity and molecularity of reaction to determine the diffusive rate limit */ /* * 29Jun2009-MRH: Added a kinetics from PRL check * If the kinetics for this reaction is from a PRL, use those numbers * to compute the rate. Else, proceed as before. */ /* * MRH 18MAR2010: * Changing the structure of a reaction's kinetics * If the kinetics are from a primary kinetic library, we assume the user * has supplied the total pre-exponential factor for the reaction (and * not the per-event pre-exponential facor). * If the kinetics were estimated by RMG, the pre-exponential factor must * be multiplied by the "redundancy" (# of events) */ if (kineticsFromPrimaryKineticLibrary) { Kinetics[] k_All = kinetics; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { Kinetics k = k_All[numKinetics]; if (k instanceof ArrheniusEPKinetics) rate += k.calculateRate(p_temperature,Hrxn); else rate += k.calculateRate(p_temperature); } return rate; } else if (isForward()){ Kinetics[] k_All = kinetics; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { Kinetics k = k_All[numKinetics].multiply(structure.redundancy); if (k instanceof ArrheniusEPKinetics) rate += k.calculateRate(p_temperature,Hrxn); else rate += k.calculateRate(p_temperature); } /* Diffusion limits added by AJ on July 12, 2010 * Requires correction in the forward direction only, reverse reaction corrects itself * If ReactionModelGenerator.useDiffusion is true (solvation is on) * compute kd and return keff */ if (ReactionModelGenerator.getUseDiffusion()) { int numReacts = structure.getReactantNumber(); int numProds = structure.getProductNumber(); double keff = 0.0; double DiffFactor = 0.0; if (numReacts == 1 && numProds == 1) { keff = rate; setKineticsComments(getComments() + "\t" + "Diffusive limits do not apply " + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 1 && numProds == 2) { double k_back = rate / calculateKeq(p_temperature); LinkedList reactantsInBackRxn = structure.products; double k_back_diff= calculatediff(reactantsInBackRxn); double k_back_eff= k_back*k_back_diff/(k_back + k_back_diff); keff = k_back_eff*calculateKeq(p_temperature); //if (keff/k_chem < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")){ DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 2 && numProds == 1) { double k_forw = rate; LinkedList reactantsInForwRxn = structure.reactants; double k_forw_diff = calculatediff(reactantsInForwRxn); double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); keff = k_forw_eff; //if (keff/k_forw < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")){ DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 2 && numProds == 2) { //Temperature stdTemp = new Temperature(298, "K"); double deltaHrxn = structure.calculateHrxn(p_temperature); //setKineticsComments(getKinetics().getComment() + "Diffusion limited"); if (deltaHrxn<0){ // Forward reaction is exothermic hence the corresponding diffusion limit applies double k_forw = rate; LinkedList reactantsInForwRxn = structure.reactants; double k_forw_diff = calculatediff(reactantsInForwRxn); double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); keff = k_forw_eff; //if (keff/k_forw < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")) { DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (deltaHrxn>0){ // Reverse reaction is exothermic and the corresponding diffusion limit should be used double k_back = rate / calculateKeq(p_temperature); LinkedList reactantsInBackRxn = structure.products; double k_back_diff= calculatediff(reactantsInBackRxn); double k_back_eff= k_back*k_back_diff/(k_back + k_back_diff); keff = k_back_eff*calculateKeq(p_temperature); //if (keff/k_chem < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")) { DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } } else if (numReacts == 2 && numProds == 3) { double k_forw = rate; LinkedList reactantsInForwRxn = structure.reactants; double k_forw_diff = calculatediff(reactantsInForwRxn); double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); keff = k_forw_eff; //if (keff/k_forw < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")){ DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } } setKineticsComments(getComments() + "\t" + "Keq =" + calculateKeq(p_temperature),0); return rate; // Iterator kineticIter = getAllKinetics().iterator(); // while (kineticIter.hasNext()){ // Kinetics k = (Kinetics)kineticIter.next(); // if (k instanceof ArrheniusEPKinetics) // rate = rate + k.calculateRate(p_temperature,Hrxn); // else // rate = rate + k.calculateRate(p_temperature); // } // // return rate; } else if (isBackward()){ Reaction r = getReverseReaction(); rate = r.calculateTotalRate(p_temperature); return rate*calculateKeq(p_temperature); } else { throw new InvalidReactionDirectionException(); } } public double calculatediff(LinkedList p_struct) { if (p_struct.size()!=2){ System.out.println("Cannot compute diffusive limit if number of reactants is not equal to 2"); } // Array containing the radii of the two species passed in p_struct double[] r; double[] d; r = new double[2]; d = new double[2]; int i= 0; for (Iterator iter = p_struct.iterator(); iter.hasNext();) { Species sp = (Species)iter.next(); ChemGraph cg = sp.getChemGraph(); r[i] = cg.getRadius(); d[i] = cg.getDiffusivity(); i = i+1; } double kdiff; kdiff = (88/7)*(d[0] + d[1])*(r[0] + r[1]) * 6.023e29; // units of r[i]=m; d[1]=m2/sec; kdiff=cm3/mole sec return kdiff; } //## operation calculateUpperBoundRate(Temperature) //svp public double calculateUpperBoundRate(Temperature p_temperature){ //#[ operation calculateUpperBoundRate(Temperature) if (isForward()){ double A; double E; double n; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { A = kinetics[numKinetics].getA().getUpperBound(); E = kinetics[numKinetics].getE().getLowerBound(); n = kinetics[numKinetics].getN().getUpperBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()*1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } } if (E < 0){ E = 0; } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); UpperBoundRate += indiv_k; } return UpperBoundRate; } else if (isBackward()) { Reaction r = getReverseReaction(); if (r == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!r.isForward()) throw new InvalidReactionDirectionException(); for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { double A = kinetics[numKinetics].getA().getUpperBound(); double E = kinetics[numKinetics].getE().getLowerBound(); double n = kinetics[numKinetics].getN().getUpperBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()*1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } } if (E < 0){ E = 0; } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); UpperBoundRate += indiv_k*calculateKeqUpperBound(p_temperature); } return UpperBoundRate; } else{ throw new InvalidReactionDirectionException(); } //#] } //## operation calculateLowerBoundRate(Temperature) //svp public double calculateLowerBoundRate(Temperature p_temperature){ //#[ operation calculateLowerBoundRate(Temperature) if (isForward()){ for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { double A = kinetics[numKinetics].getA().getLowerBound(); double E = kinetics[numKinetics].getE().getUpperBound(); double n = kinetics[numKinetics].getN().getLowerBound(); if (A > 1E300 || A <= 0) { A = kinetics[numKinetics].getA().getValue()/1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue()>0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue()>0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); LowerBoundRate += indiv_k; } return LowerBoundRate; } else if (isBackward()) { Reaction r = getReverseReaction(); if (r == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!r.isForward()) throw new InvalidReactionDirectionException(); for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { double A = kinetics[numKinetics].getA().getLowerBound(); double E = kinetics[numKinetics].getE().getUpperBound(); double n = kinetics[numKinetics].getN().getLowerBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()/1.2; } if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); LowerBoundRate += indiv_k*calculateKeqLowerBound(p_temperature); } return LowerBoundRate; } else{ throw new InvalidReactionDirectionException(); } //#] } //## operation calculateSrxn(Temperature) public double calculateSrxn(Temperature p_temperature) { //#[ operation calculateSrxn(Temperature) return structure.calculateSrxn(p_temperature); //#] } //## operation calculateThirdBodyCoefficient(SystemSnapshot) public double calculateThirdBodyCoefficient(SystemSnapshot p_presentStatus) { //#[ operation calculateThirdBodyCoefficient(SystemSnapshot) if (!(this instanceof ThirdBodyReaction)) return 1; else { return ((ThirdBodyReaction)this).calculateThirdBodyCoefficient(p_presentStatus); } //#] } //## operation checkRateRange() public boolean checkRateRange() { //#[ operation checkRateRange() Temperature t = new Temperature(1500,"K"); double rate = calculateTotalRate(t); if (getReactantNumber() == 2) { if (rate > BIMOLECULAR_RATE_UPPER) return false; } else if (getReactantNumber() == 1) { if (rate > UNIMOLECULAR_RATE_UPPER) return false; } else if (getReactantNumber() == 3) { if (rate > TRIMOLECULAR_RATE_UPPER) return false; } else throw new InvalidReactantNumberException(); return true; //#] } //## operation contains(Species) public boolean contains(Species p_species) { //#[ operation contains(Species) if (containsAsReactant(p_species) || containsAsProduct(p_species)) return true; else return false; //#] } //## operation containsAsProduct(Species) public boolean containsAsProduct(Species p_species) { //#[ operation containsAsProduct(Species) Iterator iter = getProducts(); while (iter.hasNext()) { //ChemGraph cg = (ChemGraph)iter.next(); Species spe = (Species)iter.next(); if (spe.equals(p_species)) return true; } return false; //#] } //## operation containsAsReactant(Species) public boolean containsAsReactant(Species p_species) { //#[ operation containsAsReactant(Species) Iterator iter = getReactants(); while (iter.hasNext()) { //ChemGraph cg = (ChemGraph)iter.next(); Species spe = (Species)iter.next(); if (spe.equals(p_species)) return true; } return false; //#] } /** * Checks if the structure of the reaction is the same. Does not check the rate constant. * Two reactions with the same structure but different rate constants will be equal. */ //## operation equals(Object) public boolean equals(Object p_reaction) { //#[ operation equals(Object) if (this == p_reaction) return true; if (!(p_reaction instanceof Reaction)) return false; Reaction r = (Reaction)p_reaction; if (!getStructure().equals(r.getStructure())) return false; return true; //#] } /* // fitReverseKineticsPrecisely and getPreciseReverseKinetics // are not used, not maintained, and we have clue what they do, // so we're commenting them out so we don't keep looking at them. // (oh, and they look pretty similar to each other!) // - RWest & JWAllen, June 2009 //## operation fitReverseKineticsPrecisely() public void fitReverseKineticsPrecisely() { //#[ operation fitReverseKineticsPrecisely() if (isForward()) { fittedReverseKinetics = null; } else { String result = ""; for (double t = 300.0; t<1500.0; t+=50.0) { double rate = calculateTotalRate(new Temperature(t,"K")); result += String.valueOf(t) + '\t' + String.valueOf(rate) + '\n'; } // run fit3p String dir = System.getProperty("RMG.workingDirectory"); File fit3p_input; try { // prepare fit3p input file, "input.dat" is the input file name fit3p_input = new File("fit3p/input.dat"); FileWriter fw = new FileWriter(fit3p_input); fw.write(result); fw.close(); } catch (IOException e) { System.out.println("Wrong input file for fit3p!"); System.out.println(e.getMessage()); System.exit(0); } try { // system call for fit3p String[] command = {dir+ "/bin/fit3pbnd.exe"}; File runningDir = new File("fit3p"); Process fit = Runtime.getRuntime().exec(command, null, runningDir); int exitValue = fit.waitFor(); } catch (Exception e) { System.out.println("Error in run fit3p!"); System.out.println(e.getMessage()); System.exit(0); } // parse the output file from chemdis try { String fit3p_output = "fit3p/output.dat"; FileReader in = new FileReader(fit3p_output); BufferedReader data = new BufferedReader(in); String line = ChemParser.readMeaningfulLine(data); line = line.trim(); StringTokenizer st = new StringTokenizer(line); String A = st.nextToken(); String temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Ar = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String n = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double nr = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String E = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Er = Double.parseDouble(temp); if (Er < 0) { System.err.println(getStructure().toString()); System.err.println("fitted Er < 0: "+Double.toString(Er)); double increase = Math.exp(-Er/GasConstant.getKcalMolK()/715.0); double deltan = Math.log(increase)/Math.log(715.0); System.err.println("n enlarged by factor of: " + Double.toString(deltan)); nr += deltan; Er = 0; } UncertainDouble udAr = new UncertainDouble(Ar, 0, "Adder"); UncertainDouble udnr = new UncertainDouble(nr, 0, "Adder"); UncertainDouble udEr = new UncertainDouble(Er, 0, "Adder"); fittedReverseKinetics = new ArrheniusKinetics(udAr, udnr , udEr, "300-1500", 1, "fitting from forward and thermal",null); in.close(); } catch (Exception e) { System.out.println("Error in read output.dat from fit3p!"); System.out.println(e.getMessage()); System.exit(0); } } return; //#] } //## operation fitReverseKineticsPrecisely() public Kinetics getPreciseReverseKinetics() { //#[ operation fitReverseKineticsPrecisely() Kinetics fittedReverseKinetics =null; String result = ""; for (double t = 300.0; t<1500.0; t+=50.0) { double rate = calculateTotalRate(new Temperature(t,"K")); result += String.valueOf(t) + '\t' + String.valueOf(rate) + '\n'; } // run fit3p String dir = System.getProperty("RMG.workingDirectory"); File fit3p_input; try { // prepare fit3p input file, "input.dat" is the input file name fit3p_input = new File("fit3p/input.dat"); FileWriter fw = new FileWriter(fit3p_input); fw.write(result); fw.close(); } catch (IOException e) { System.out.println("Wrong input file for fit3p!"); System.out.println(e.getMessage()); System.exit(0); } try { // system call for fit3p String[] command = {dir+ "/bin/fit3pbnd.exe"}; File runningDir = new File("fit3p"); Process fit = Runtime.getRuntime().exec(command, null, runningDir); int exitValue = fit.waitFor(); } catch (Exception e) { System.out.println("Error in run fit3p!"); System.out.println(e.getMessage()); System.exit(0); } // parse the output file from chemdis try { String fit3p_output = "fit3p/output.dat"; FileReader in = new FileReader(fit3p_output); BufferedReader data = new BufferedReader(in); String line = ChemParser.readMeaningfulLine(data); line = line.trim(); StringTokenizer st = new StringTokenizer(line); String A = st.nextToken(); String temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Ar = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String n = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double nr = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String E = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Er = Double.parseDouble(temp); if (Er < 0) { System.err.println(getStructure().toString()); System.err.println("fitted Er < 0: "+Double.toString(Er)); double increase = Math.exp(-Er/GasConstant.getKcalMolK()/715.0); double deltan = Math.log(increase)/Math.log(715.0); System.err.println("n enlarged by factor of: " + Double.toString(deltan)); nr += deltan; Er = 0; } UncertainDouble udAr = new UncertainDouble(Ar, 0, "Adder"); UncertainDouble udnr = new UncertainDouble(nr, 0, "Adder"); UncertainDouble udEr = new UncertainDouble(Er, 0, "Adder"); fittedReverseKinetics = new ArrheniusKinetics(udAr, udnr , udEr, "300-1500", 1, "fitting from forward and thermal",null); in.close(); } catch (Exception e) { System.out.println("Error in read output.dat from fit3p!"); System.out.println(e.getMessage()); System.exit(0); } return fittedReverseKinetics; //#] } // */ //## operation fitReverseKineticsRoughly() public void fitReverseKineticsRoughly() { //#[ operation fitReverseKineticsRoughly() // now is a rough fitting if (isForward()) { fittedReverseKinetics = null; } else { //double temp = 715; // double temp = 298.15; //10/29/07 gmagoon: Sandeep made change to temp = 298 on his computer locally // double temp = 1350; //11/6/07 gmagoon:**** changed to actual temperature in my condition file to create agreement with old version; apparently, choice of temp has large effect; //11/9/07 gmagoon: commented out double temp = 298.15; //11/9/07 gmagoon: restored use of 298.15 per discussion with Sandeep //double temp = Global.temperature.getK(); Kinetics[] k = getKinetics(); fittedReverseKinetics = new Kinetics[k.length]; double doubleAlpha; for (int numKinetics=0; numKinetics<k.length; numKinetics++) { if (k[numKinetics] instanceof ArrheniusEPKinetics) doubleAlpha = ((ArrheniusEPKinetics)k[numKinetics]).getAlphaValue(); else doubleAlpha = 0; double Hrxn = calculateHrxn(new Temperature(temp,"K")); double Srxn = calculateSrxn(new Temperature(temp, "K")); // for EvansPolyani kinetics (Ea = Eo + alpha * Hrxn) remember that k.getEValue() gets Eo not Ea // this Hrxn is for the reverse reaction (ie. -Hrxn_forward) double doubleEr = k[numKinetics].getEValue() - (doubleAlpha-1)*Hrxn; if (doubleEr < 0) { System.err.println("fitted Er < 0: "+Double.toString(doubleEr)); System.err.println(getStructure().toString()); //doubleEr = 0; } UncertainDouble Er = new UncertainDouble(doubleEr, k[numKinetics].getE().getUncertainty(), k[numKinetics].getE().getType()); UncertainDouble n = new UncertainDouble(0,0, "Adder"); double doubleA = k[numKinetics].getAValue()* Math.pow(temp, k[numKinetics].getNValue())* Math.exp(Srxn/GasConstant.getCalMolK()); doubleA *= Math.pow(GasConstant.getCCAtmMolK()*temp, -getStructure().getDeltaN()); // assumes Ideal gas law concentration and 1 Atm reference state fittedReverseKinetics[numKinetics] = new ArrheniusKinetics(new UncertainDouble(doubleA, 0, "Adder"), n , Er, "300-1500", 1, "fitting from forward and thermal",null); } } return; //#] } /** * Generates a reaction whose structure is opposite to that of the present reaction. * Just appends the rate constant of this reaction to the reverse reaction. * */ //## operation generateReverseReaction() public void generateReverseReaction() { //#[ operation generateReverseReaction() Structure s = getStructure(); //Kinetics k = getKinetics(); Kinetics[] k = kinetics; if (kinetics == null) throw new NullPointerException(); Structure newS = s.generateReverseStructure(); newS.setRedundancy(s.getRedundancy()); Reaction r = new Reaction(newS, k); // if (hasAdditionalKinetics()){ // r.addAdditionalKinetics(additionalKinetics,1); // } r.setReverseReaction(this); this.setReverseReaction(r); return; //#] } public String getComments() { return comments; } //## operation getDirection() public int getDirection() { //#[ operation getDirection() return getStructure().getDirection(); //#] } //## operation getFittedReverseKinetics() public Kinetics[] getFittedReverseKinetics() { //#[ operation getFittedReverseKinetics() if (fittedReverseKinetics == null) fitReverseKineticsRoughly(); return fittedReverseKinetics; //#] } /*//## operation getForwardRateConstant() public Kinetics getForwardRateConstant() { //#[ operation getForwardRateConstant() if (isForward()) return kinetics; else return null; //#] } */ //## operation getKinetics() public Kinetics[] getKinetics() { //#[ operation getKinetics() // returns the kinetics OF THE FORWARD REACTION // ie. if THIS is reverse, it calls this.getReverseReaction().getKinetics() /* * 29Jun2009-MRH: * When getting kinetics, check whether it comes from a PRL or not. * If so, return the kinetics. We are not worried about the redundancy * because I assume the user inputs the Arrhenius kinetics for the overall * reaction A = B + C * * E.g. CH4 = CH3 + H A n E * The Arrhenius parameters would be for the overall decomposition of CH4, * not for each carbon-hydrogen bond fission */ if (isFromPrimaryKineticLibrary()) { return kinetics; } if (isForward()) { int red = structure.getRedundancy(); Kinetics[] kinetics2return = new Kinetics[kinetics.length]; for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { kinetics2return[numKinetics] = kinetics[numKinetics].multiply(red); } return kinetics2return; } else if (isBackward()) { Reaction rr = getReverseReaction(); // Added by MRH on 7/Sept/2009 // Required when reading in the restart files if (rr == null) { generateReverseReaction(); rr = getReverseReaction(); } if (rr == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!rr.isForward()) throw new InvalidReactionDirectionException(structure.toString()); return rr.getKinetics(); } else throw new InvalidReactionDirectionException(structure.toString()); //#] } public void setKineticsComments(String p_string, int num_k){ kinetics[num_k].setComments(p_string); } // shamel: Added this function 6/10/2010, to get Kinetics Source to identify duplicates // in Reaction Library, Seed Mech and Template Reaction with Library Reaction feature public String getKineticsSource(int num_k){ // The num_k is the number for different kinetics stored for one type of reaction but formed due to different families // Check if the kinetics exits if (kinetics != null) { // Check if the "source" string is not null if(kinetics[num_k].getSource() != null){ return kinetics[num_k].getSource(); } else{ // This is mostly done for case of H Abstraction where forward kinetic source is null //we might need to check if this "source" is also null (Can be source of Bug) return this.reverseReaction.kinetics[num_k].getSource(); } } else // Returns Source as null when there are no Kinetics at all! return null; } public void setKineticsSource(String p_string, int num_k){ kinetics[num_k].setSource(p_string); } //## operation getUpperBoundRate(Temperature) public double getUpperBoundRate(Temperature p_temperature){//svp //#[ operation getUpperBoundRate(Temperature) if (UpperBoundRate == 0.0){ calculateUpperBoundRate(p_temperature); } return UpperBoundRate; //#] } //## operation getLowerBoundRate(Temperature) public double getLowerBoundRate(Temperature p_temperature){//svp //#[ operation getLowerBoundRate(Temperature) if (LowerBoundRate == 0.0){ calculateLowerBoundRate(p_temperature); } return LowerBoundRate; //#] } //## operation getProductList() public LinkedList getProductList() { //#[ operation getProductList() return structure.getProductList(); //#] } //10/26/07 gmagoon: changed to have temperature and pressure passed as parameters (part of eliminating use of Global.temperature) public double getRateConstant(Temperature p_temperature){ //public double getRateConstant(){ if (rateConstant == 0) rateConstant = calculateTotalRate(p_temperature); // rateConstant = calculateTotalRate(Global.temperature); return rateConstant; } //## operation getProductNumber() public int getProductNumber() { //#[ operation getProductNumber() return getStructure().getProductNumber(); //#] } //## operation getProducts() public ListIterator getProducts() { //#[ operation getProducts() return structure.getProducts(); //#] } /*//## operation getRateConstant() public Kinetics getRateConstant() { //#[ operation getRateConstant() if (isForward()) { return rateConstant; } else if (isBackward()) { Reaction rr = getReverseReaction(); if (rr == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!rr.isForward()) throw new InvalidReactionDirectionException(structure.toString()); return rr.getRateConstant(); } else throw new InvalidReactionDirectionException(structure.toString()); //#] }*/ //## operation getReactantList() public LinkedList getReactantList() { //#[ operation getReactantList() return structure.getReactantList(); //#] } //## operation getReactantNumber() public int getReactantNumber() { //#[ operation getReactantNumber() return getStructure().getReactantNumber(); //#] } //## operation getReactants() public ListIterator getReactants() { //#[ operation getReactants() return structure.getReactants(); //#] } //## operation getRedundancy() public int getRedundancy() { //#[ operation getRedundancy() return getStructure().getRedundancy(); //#] } public boolean hasResonanceIsomer() { //#[ operation hasResonanceIsomer() return (hasResonanceIsomerAsReactant() || hasResonanceIsomerAsProduct()); //#] } //## operation hasResonanceIsomerAsProduct() public boolean hasResonanceIsomerAsProduct() { //#[ operation hasResonanceIsomerAsProduct() for (Iterator iter = getProducts(); iter.hasNext();) { Species spe = ((Species)iter.next()); if (spe.hasResonanceIsomers()) return true; } return false; //#] } //## operation hasResonanceIsomerAsReactant() public boolean hasResonanceIsomerAsReactant() { //#[ operation hasResonanceIsomerAsReactant() for (Iterator iter = getReactants(); iter.hasNext();) { Species spe = ((Species)iter.next()); if (spe.hasResonanceIsomers()) return true; } return false; //#] } //## operation hasReverseReaction() public boolean hasReverseReaction() { //#[ operation hasReverseReaction() return reverseReaction != null; //#] } //## operation hashCode() public int hashCode() { //#[ operation hashCode() // just use the structure's hashcode return structure.hashCode(); //#] } //## operation isDuplicated(Reaction) /*public boolean isDuplicated(Reaction p_reaction) { //#[ operation isDuplicated(Reaction) // the same structure, return true Structure str1 = getStructure(); Structure str2 = p_reaction.getStructure(); //if (str1.isDuplicate(str2)) return true; // if not the same structure, check the resonance isomers if (!hasResonanceIsomer()) return false; if (str1.equals(str2)) return true; else return false; //#] }*/ //## operation isBackward() public boolean isBackward() { //#[ operation isBackward() return structure.isBackward(); //#] } //## operation isForward() public boolean isForward() { //#[ operation isForward() return structure.isForward(); //#] } //## operation isIncluded(HashSet) public boolean isIncluded(HashSet p_speciesSet) { //#[ operation isIncluded(HashSet) return (allReactantsIncluded(p_speciesSet) && allProductsIncluded(p_speciesSet)); //#] } //## operation makeReaction(Structure,Kinetics,boolean) public static Reaction makeReaction(Structure p_structure, Kinetics[] p_kinetics, boolean p_generateReverse) { //#[ operation makeReaction(Structure,Kinetics,boolean) if (!p_structure.repOk()) throw new InvalidStructureException(p_structure.toChemkinString(false).toString()); for (int numKinetics=0; numKinetics<p_kinetics.length; numKinetics++) { if (!p_kinetics[numKinetics].repOk()) throw new InvalidKineticsException(p_kinetics[numKinetics].toString()); } Reaction r = new Reaction(p_structure, p_kinetics); if (p_generateReverse) { r.generateReverseReaction(); } else { r.setReverseReaction(null); } return r; //#] } //## operation reactantEqualsProduct() public boolean reactantEqualsProduct() { //#[ operation reactantEqualsProduct() return getStructure().reactantEqualsProduct(); //#] } //## operation repOk() public boolean repOk() { //#[ operation repOk() if (!structure.repOk()) { System.out.println("Invalid Reaction Structure:" + structure.toString()); return false; } if (!isForward() && !isBackward()) { System.out.println("Invalid Reaction Direction: " + String.valueOf(getDirection())); return false; } if (isBackward() && reverseReaction == null) { System.out.println("Backward Reaction without a reversed reaction defined!"); return false; } /*if (!getRateConstant().repOk()) { System.out.println("Invalid Rate Constant: " + getRateConstant().toString()); return false; }*/ Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { if (!allKinetics[numKinetics].repOk()) { System.out.println("Invalid Kinetics: " + allKinetics[numKinetics].toString()); return false; } } if (!checkRateRange()) { System.out.println("reaction rate is higher than the upper rate limit!"); System.out.println(getStructure().toString()); Temperature tup = new Temperature(1500,"K"); if (isForward()) { System.out.println("k(T=1500) = " + String.valueOf(calculateTotalRate(tup))); } else { System.out.println("k(T=1500) = " + String.valueOf(calculateTotalRate(tup))); System.out.println("Keq(T=1500) = " + String.valueOf(calculateKeq(tup))); System.out.println("krev(T=1500) = " + String.valueOf(getReverseReaction().calculateTotalRate(tup))); } System.out.println(getKinetics()); return false; } return true; //#] } //## operation setReverseReaction(Reaction) public void setReverseReaction(Reaction p_reverseReaction) { //#[ operation setReverseReaction(Reaction) reverseReaction = p_reverseReaction; if (p_reverseReaction != null) reverseReaction.reverseReaction = this; //#] } //## operation toChemkinString() public String toChemkinString(Temperature p_temperature) { //#[ operation toChemkinString() if (ChemkinString != null) return ChemkinString; StringBuilder result = new StringBuilder(); StringBuilder strucString = getStructure().toChemkinString(hasReverseReaction()); Temperature stdtemp = new Temperature(298,"K"); double Hrxn = calculateHrxn(stdtemp); Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { String k = allKinetics[numKinetics].toChemkinString(Hrxn,p_temperature,true); if (allKinetics.length == 1) result.append(strucString + " " + k); else result.append(strucString + " " + k + "\nDUP\n"); } ChemkinString = result.toString(); return result.toString(); } public String toChemkinString(Temperature p_temperature, Pressure p_pressure) { // For certain PDep cases it's helpful to be able to call this with a temperature and pressure // but usually (and in this case) the pressure is irrelevant, so we just call the above toChemkinString(Temperature) method: return toChemkinString(p_temperature); } public String toRestartString(Temperature p_temperature, boolean pathReaction) { /* * Edited by MRH on 18Jan2010 * * Writing restart files was causing a bug in the RMG-generated chem.inp file * For example, H+CH4=CH3+H2 in input file w/HXD13, CH4, and H2 * RMG would correctly multiply the A factor by the structure's redundancy * when calculating the rate to place in the ODEsolver input file. However, * the A reported in the chem.inp file would be the "per event" A. This was * due to the reaction.toChemkinString() method being called when writing the * Restart coreReactions.txt and edgeReactions.txt files. At the first point of * writing the chemkinString for this reaction (when it is still an edge reaction), * RMG had not yet computed the redundancy of the structure (as H was not a core * species at the time, but CH3 and H2 were). When RMG tried to write the chemkinString * for the above reaction, using the correct redundancy, the chemkinString already existed * and thus the toChemkinString() method was exited immediately. * MRH is replacing the reaction.toChemkinString() call with reaction.toRestartString() * when writing the Restart files, to account for this bug. */ String result = getStructure().toRestartString(hasReverseReaction()).toString(); //+ " "+getStructure().direction + " "+getStructure().redundancy; // MRH 18Jan2010: Restart files do not look for direction/redundancy /* * MRH 14Feb2010: Handle reactions with multiple kinetics */ String totalResult = ""; Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { totalResult += result + " " + allKinetics[numKinetics].toChemkinString(calculateHrxn(p_temperature),p_temperature,true); if (allKinetics.length != 1) { if (pathReaction) { totalResult += "\n"; if (numKinetics != allKinetics.length-1) totalResult += getStructure().direction + "\t"; } else totalResult += "\n\tDUP\n"; } } return totalResult; } /* * MRH 23MAR2010: * Method not used in RMG */ // //## operation toFullString() // public String toFullString() { // //#[ operation toFullString() // return getStructure().toString() + getKinetics().toString() + getComments().toString(); // // // // //#] // } //## operation toString() public String toString(Temperature p_temperature) { //#[ operation toString() String string2return = ""; Kinetics[] k = getKinetics(); for (int numKinetics=0; numKinetics<k.length; ++numKinetics) { string2return += getStructure().toString() + "\t"; string2return += k[numKinetics].toChemkinString(calculateHrxn(p_temperature),p_temperature,false); if (k.length > 1) string2return += "\n"; } return string2return; } /* * MRH 23MAR2010: * This method is redundant to toString() */ //10/26/07 gmagoon: changed to take temperature as parameter (required changing function name from toString to reactionToString // public String reactionToString(Temperature p_temperature) { // // public String toString() { // //#[ operation toString() // // Temperature p_temperature = Global.temperature; // Kinetics k = getKinetics(); // String kString = k.toChemkinString(calculateHrxn(p_temperature),p_temperature,false); // // return getStructure().toString() + '\t' + kString; // //#] // } public static double getBIMOLECULAR_RATE_UPPER() { return BIMOLECULAR_RATE_UPPER; } public static double getUNIMOLECULAR_RATE_UPPER() { return UNIMOLECULAR_RATE_UPPER; } public void setComments(String p_comments) { comments = p_comments; } /** * Returns the reverse reaction of this reaction. If there is no reverse reaction present * then a null object is returned. * @return */ public Reaction getReverseReaction() { return reverseReaction; } public void setKinetics(Kinetics p_kinetics, int k_index) { if (p_kinetics == null) { kinetics = null; } else { kinetics[k_index] = p_kinetics; } } public void addAdditionalKinetics(Kinetics p_kinetics, int red) { if (finalized) return; if (p_kinetics == null) return; if (kinetics == null){ kinetics = new Kinetics[1]; kinetics[0] = p_kinetics; structure.redundancy = 1; } else { boolean kineticsAlreadyPresent = false; for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { if (kinetics[numKinetics].equals(p_kinetics)) { structure.increaseRedundancy(red); kineticsAlreadyPresent = true; } } if (!kineticsAlreadyPresent) { Kinetics[] tempKinetics = kinetics; kinetics = new Kinetics[tempKinetics.length+1]; for (int i=0; i<tempKinetics.length; i++) { kinetics[i] = tempKinetics[i]; } kinetics[kinetics.length-1] = p_kinetics; structure.redundancy = 1; } } /* * MRH 24MAR2010: * Commented out. As RMG will be able to handle more than 2 Kinetics * per reaction, the code below is no longer necessary */ //10/29/07 gmagoon: changed to use Global.highTemperature, Global.lowTemperature (versus Global.temperature); apparently this function chooses the top two rates when there are multiple reactions with same reactants and products; the reactions with the top two rates are used; use of high and low temperatures would be less than ideal in cases where temperature of system changes over the course of reaction //10/31/07 gmagoon: it is assumed that two rate constants vs. temperature cross each other at most one time over the temperature range of interest //if there at least three different reactions/rates and rate crossings/intersections occur, the two rates used are based on the lowest simulation temperature and a warning is displayed // else if (additionalKinetics == null){ // if (p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if (p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // additionalKinetics = kinetics; // kinetics = p_kinetics; // structure.redundancy = 1; // } // else{ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // additionalKinetics = p_kinetics; // } // } // else if (additionalKinetics.equals(p_kinetics)) // return; // else { // if(ratesForKineticsAndAdditionalKineticsCross){ // if(p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = false; // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = kinetics; // kinetics = p_kinetics; // } // else if (p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else{//else p_kinetics @ low temperature is between kinetics and additional kinetics at low temperature // if(p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = false; // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } // else{ // if ((p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)) && (p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); //10/29/07 gmagoon: note that I have moved this before reassignment of variables; I think this was a minor bug in original code // additionalKinetics = kinetics; // kinetics = p_kinetics; // } // else if ((p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) < additionalKinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else if ((p_kinetics.calculateRate(Global.lowTemperature) > additionalKinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) > additionalKinetics.calculateRate(Global.highTemperature))&&(p_kinetics.calculateRate(Global.lowTemperature) < kinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // else //else there is at least one crossing in the temperature range of interest between p_kinetics and either kinetics or additionalKinetics; base which reaction is kept on the lowest temperature // { // if(p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) < additionalKinetics.calculateRate(Global.highTemperature)) // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = kinetics; // kinetics = p_kinetics; // ratesForKineticsAndAdditionalKineticsCross = true; // } // else if(p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature)){ // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else //else p_kinetics at low temperature is between kinetics and additional kinetics at low temperature // { // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // else//else p_kinetics crosses additional kinetics // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } // } // } // else if (additionalKinetics == null){ // if (p_kinetics.calculateRate(Global.temperature) > kinetics.calculateRate(Global.temperature)){ // additionalKinetics = kinetics; // kinetics = p_kinetics; // structure.redundancy = 1; // } // else additionalKinetics = p_kinetics; // } // else if (additionalKinetics.equals(p_kinetics)) // return; // else { // if (p_kinetics.calculateRate(Global.temperature) > kinetics.calculateRate(Global.temperature)){ // additionalKinetics = kinetics; // kinetics = p_kinetics; // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // // } // else if (p_kinetics.calculateRate(Global.temperature) < additionalKinetics.calculateRate(Global.temperature)){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else { // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } } /* * MRH 18MAR2010: * A reaction's kinetics is now an array. The additionalKinetics is now * obsolete and this method will be removed (to ensure nothing calls it) */ // public boolean hasAdditionalKinetics(){ // return (additionalKinetics != null); // } //public int totalNumberOfKinetics(){ //7/26/09 gmagoon: this is not used, and appears to incorrectly assume that there are a maximum of two kinetics...I think this was old and I have changed it since // if (hasAdditionalKinetics()) // return 2; // else // return 1; //} /* * MRH 18MAR2010: * Restructuring a reaction's kinetics * With a reaction's kinetics now being defined as an array of Kinetics, * instead of "kinetics" and "additionalKinetics", the getAllKinetics() * method is now obsolete. */ // public HashSet getAllKinetics(){ // HashSet allKinetics = new HashSet(); // allKinetics.add(kinetics.multiply(structure.redundancy)); // if ( hasAdditionalKinetics()){ // allKinetics.add(additionalKinetics.multiply(structure.redundancy)); // // } // // return allKinetics; // } public void setFinalized(boolean p_finalized) { finalized = p_finalized; return; } public Structure getStructure() { return structure; } public void setStructure(Structure p_Structure) { structure = p_Structure; } /** * Returns the reaction as an ASCII string. * @return A string representing the reaction equation in ASCII test. */ @Override public String toString() { if (getReactantNumber() == 0 || getProductNumber() == 0) return ""; String rxn = ""; Species species = (Species) structure.getReactantList().get(0); rxn = rxn + species.getName() + "(" + Integer.toString(species.getID()) + ")"; for (int i = 1; i < getReactantNumber(); i++) { species = (Species) structure.getReactantList().get(i); rxn += " + " + species.getName() + "(" + Integer.toString(species.getID()) + ")"; } rxn += " --> "; species = (Species) structure.getProductList().get(0); rxn = rxn + species.getName() + "(" + Integer.toString(species.getID()) + ")"; for (int i = 1; i < getProductNumber(); i++) { species = (Species) structure.getProductList().get(i); rxn += " + " + species.getName() + "(" + Integer.toString(species.getID()) + ")"; } return rxn; } public String toInChIString() { if (getReactantNumber() == 0 || getProductNumber() == 0) return ""; String rxn = ""; Species species = (Species) structure.getReactantList().get(0); rxn = rxn + species.getInChI(); for (int i = 1; i < getReactantNumber(); i++) { species = (Species) structure.getReactantList().get(i); rxn += " + " + species.getInChI(); } rxn += " --> "; species = (Species) structure.getProductList().get(0); rxn = rxn + species.getInChI(); for (int i = 1; i < getProductNumber(); i++) { species = (Species) structure.getProductList().get(i); rxn += " + " + species.getInChI(); } return rxn; } /** * Calculates the flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateFlux(SystemSnapshot ss) { return calculateForwardFlux(ss) - calculateReverseFlux(ss); } /** * Calculates the forward flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateForwardFlux(SystemSnapshot ss) { Temperature T = ss.getTemperature(); double forwardFlux = getRateConstant(T); for (ListIterator<Species> iter = getReactants(); iter.hasNext(); ) { Species spe = iter.next(); double conc = 0.0; if (ss.getSpeciesStatus(spe) != null) conc = ss.getSpeciesStatus(spe).getConcentration(); if (conc < 0) { double aTol = ReactionModelGenerator.getAtol(); //if (Math.abs(conc) < aTol) conc = 0; //else throw new NegativeConcentrationException(spe.getName() + ": " + String.valueOf(conc)); if (conc < -100.0 * aTol) throw new NegativeConcentrationException("Species " + spe.getName() + " has negative concentration: " + String.valueOf(conc)); } forwardFlux *= conc; } return forwardFlux; } /** * Calculates the flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateReverseFlux(SystemSnapshot ss) { if (hasReverseReaction()) return reverseReaction.calculateForwardFlux(ss); else return 0.0; } public boolean isFromPrimaryKineticLibrary() { return kineticsFromPrimaryKineticLibrary; } public void setIsFromPrimaryKineticLibrary(boolean p_boolean) { kineticsFromPrimaryKineticLibrary = p_boolean; } public ReactionTemplate getReactionTemplate() { return rxnTemplate; } public void setReactionTemplate(ReactionTemplate rt) { rxnTemplate = rt; } public boolean hasMultipleKinetics() { if (getKinetics().length > 1) return true; else return false; } } /********************************************************************* File Path : RMG\RMG\jing\rxn\Reaction.java *********************************************************************/
source/RMG/jing/rxn/Reaction.java
//////////////////////////////////////////////////////////////////////////////// // // RMG - Reaction Mechanism Generator // // Copyright (c) 2002-2009 Prof. William H. Green ([email protected]) and the // RMG Team ([email protected]) // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // //////////////////////////////////////////////////////////////////////////////// package jing.rxn; import java.io.*; import jing.chem.*; import java.util.*; import jing.param.*; import jing.mathTool.*; import jing.chemParser.*; import jing.chem.Species; import jing.param.Temperature; import jing.rxnSys.NegativeConcentrationException; import jing.rxnSys.ReactionModelGenerator; import jing.rxnSys.SystemSnapshot; //## package jing::rxn //---------------------------------------------------------------------------- //jing\rxn\Reaction.java //---------------------------------------------------------------------------- /** Immutable objects. */ //## class Reaction public class Reaction { protected static double TRIMOLECULAR_RATE_UPPER = 1.0E100; protected static double BIMOLECULAR_RATE_UPPER = 1.0E100; //## attribute BIMOLECULAR_RATE_UPPER protected static double UNIMOLECULAR_RATE_UPPER = 1.0E100; //## attribute UNIMOLECULAR_RATE_UPPER protected String comments = "No comment"; //## attribute comments protected Kinetics[] fittedReverseKinetics = null; //## attribute fittedReverseKinetics protected double rateConstant; protected Reaction reverseReaction = null; //## attribute reverseReaction protected Kinetics[] kinetics; protected Structure structure; protected double UpperBoundRate;//svp protected double LowerBoundRate;//svp //protected Kinetics additionalKinetics = null; //This is incase a reaction has two completely different transition states. protected boolean finalized = false; protected String ChemkinString = null; protected boolean ratesForKineticsAndAdditionalKineticsCross = false; //10/29/07 gmagoon: added variable to keep track of whether both rate constants are maximum for some temperature in the temperature range protected boolean kineticsFromPrimaryKineticLibrary = false; protected ReactionTemplate rxnTemplate; // Constructors //## operation Reaction() public Reaction() { //#[ operation Reaction() //#] } //## operation Reaction(Structure,RateConstant) private Reaction(Structure p_structure, Kinetics[] p_kinetics) { //#[ operation Reaction(Structure,RateConstant) structure = p_structure; kinetics = p_kinetics; //rateConstant = calculateTotalRate(Global.temperature); //#] } /*public Reaction(Reaction rxn) { structure = rxn.structure; kinetics = rxn.kinetics; comments = rxn.comments; fittedReverseKinetics = rxn.fittedReverseKinetics; rateConstant = rxn.rateConstant; reverseReaction = rxn.reverseReaction; UpperBoundRate = rxn.UpperBoundRate; LowerBoundRate = rxn.LowerBoundRate; additionalKinetics = rxn.additionalKinetics; finalized = rxn.finalized; ChemkinString = rxn.ChemkinString; ratesForKineticsAndAdditionalKineticsCross = rxn.ratesForKineticsAndAdditionalKineticsCross; }*/ //## operation allProductsIncluded(HashSet) public boolean allProductsIncluded(HashSet p_speciesSet) { //#[ operation allProductsIncluded(HashSet) Iterator iter = getProducts(); while (iter.hasNext()) { Species spe = ((Species)iter.next()); if (!p_speciesSet.contains(spe)) return false; } return true; //#] } //## operation allReactantsIncluded(HashSet) public boolean allReactantsIncluded(HashSet p_speciesSet) { //#[ operation allReactantsIncluded(HashSet) if (p_speciesSet == null) throw new NullPointerException(); Iterator iter = getReactants(); while (iter.hasNext()) { Species spe = ((Species)iter.next()); if (!p_speciesSet.contains(spe)) return false; } return true; //#] } /** Calculate this reaction's thermo parameter. Basically, make addition of the thermo parameters of all the reactants and products. */ //## operation calculateHrxn(Temperature) public double calculateHrxn(Temperature p_temperature) { //#[ operation calculateHrxn(Temperature) return structure.calculateHrxn(p_temperature); //#] } //## operation calculateKeq(Temperature) public double calculateKeq(Temperature p_temperature) { //#[ operation calculateKeq(Temperature) return structure.calculateKeq(p_temperature); //#] } //## operation calculateKeqUpperBound(Temperature) //svp public double calculateKeqUpperBound(Temperature p_temperature) { //#[ operation calculateKeqUpperBound(Temperature) return structure.calculateKeqUpperBound(p_temperature); //#] } //## operation calculateKeqLowerBound(Temperature) //svp public double calculateKeqLowerBound(Temperature p_temperature) { //#[ operation calculateKeqLowerBound(Temperature) return structure.calculateKeqLowerBound(p_temperature); //#] } public double calculateTotalRate(Temperature p_temperature){ double rate =0; Temperature stdtemp = new Temperature(298,"K"); double Hrxn = calculateHrxn(stdtemp); /* AJ 12JULY2010: * Added diffusive limits from previous RMG version by replacing function calculateTotalRate * Checks the exothermicity and molecularity of reaction to determine the diffusive rate limit */ /* * 29Jun2009-MRH: Added a kinetics from PRL check * If the kinetics for this reaction is from a PRL, use those numbers * to compute the rate. Else, proceed as before. */ /* * MRH 18MAR2010: * Changing the structure of a reaction's kinetics * If the kinetics are from a primary kinetic library, we assume the user * has supplied the total pre-exponential factor for the reaction (and * not the per-event pre-exponential facor). * If the kinetics were estimated by RMG, the pre-exponential factor must * be multiplied by the "redundancy" (# of events) */ if (kineticsFromPrimaryKineticLibrary) { Kinetics[] k_All = kinetics; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { Kinetics k = k_All[numKinetics]; if (k instanceof ArrheniusEPKinetics) rate += k.calculateRate(p_temperature,Hrxn); else rate += k.calculateRate(p_temperature); } return rate; } else if (isForward()){ Kinetics[] k_All = kinetics; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { Kinetics k = k_All[numKinetics].multiply(structure.redundancy); if (k instanceof ArrheniusEPKinetics) rate += k.calculateRate(p_temperature,Hrxn); else rate += k.calculateRate(p_temperature); } /* Diffusion limits added by AJ on July 12, 2010 * Requires correction in the forward direction only, reverse reaction corrects itself * If ReactionModelGenerator.useDiffusion is true (solvation is on) * compute kd and return keff */ if (ReactionModelGenerator.getUseDiffusion()) { int numReacts = structure.getReactantNumber(); int numProds = structure.getProductNumber(); double keff = 0.0; double DiffFactor = 0.0; if (numReacts == 1 && numProds == 1) { keff = rate; setKineticsComments(getComments() + "\t" + "Diffusive limits do not apply " + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 1 && numProds == 2) { double k_back = rate / calculateKeq(p_temperature); LinkedList reactantsInBackRxn = structure.products; double k_back_diff= calculatediff(reactantsInBackRxn); double k_back_eff= k_back*k_back_diff/(k_back + k_back_diff); keff = k_back_eff*calculateKeq(p_temperature); //if (keff/k_chem < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")){ DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 2 && numProds == 1) { double k_forw = rate; LinkedList reactantsInForwRxn = structure.reactants; double k_forw_diff = calculatediff(reactantsInForwRxn); double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); keff = k_forw_eff; //if (keff/k_forw < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")){ DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (numReacts == 2 && numProds == 2) { //Temperature stdTemp = new Temperature(298, "K"); double deltaHrxn = structure.calculateHrxn(p_temperature); //setKineticsComments(getKinetics().getComment() + "Diffusion limited"); if (deltaHrxn<0){ // Forward reaction is exothermic hence the corresponding diffusion limit applies double k_forw = rate; LinkedList reactantsInForwRxn = structure.reactants; double k_forw_diff = calculatediff(reactantsInForwRxn); double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); keff = k_forw_eff; //if (keff/k_forw < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")) { DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } else if (deltaHrxn>0){ // Reverse reaction is exothermic and the corresponding diffusion limit should be used double k_back = rate / calculateKeq(p_temperature); LinkedList reactantsInBackRxn = structure.products; double k_back_diff= calculatediff(reactantsInBackRxn); double k_back_eff= k_back*k_back_diff/(k_back + k_back_diff); keff = k_back_eff*calculateKeq(p_temperature); //if (keff/k_chem < 0.2){ //if (!getKinetics().getComment().endsWith("Diffusion limited")) { DiffFactor = keff/rate; setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); return keff; } } } setKineticsComments(getComments() + "\t" + "Keq =" + calculateKeq(p_temperature),0); return rate; // Iterator kineticIter = getAllKinetics().iterator(); // while (kineticIter.hasNext()){ // Kinetics k = (Kinetics)kineticIter.next(); // if (k instanceof ArrheniusEPKinetics) // rate = rate + k.calculateRate(p_temperature,Hrxn); // else // rate = rate + k.calculateRate(p_temperature); // } // // return rate; } else if (isBackward()){ Reaction r = getReverseReaction(); rate = r.calculateTotalRate(p_temperature); return rate*calculateKeq(p_temperature); } else { throw new InvalidReactionDirectionException(); } } public double calculatediff(LinkedList p_struct) { if (p_struct.size()!=2){ System.out.println("Cannot compute diffusive limit if number of reactants is not equal to 2"); } // Array containing the radii of the two species passed in p_struct double[] r; double[] d; r = new double[2]; d = new double[2]; int i= 0; for (Iterator iter = p_struct.iterator(); iter.hasNext();) { Species sp = (Species)iter.next(); ChemGraph cg = sp.getChemGraph(); r[i] = cg.getRadius(); d[i] = cg.getDiffusivity(); i = i+1; } double kdiff; kdiff = (88/7)*(d[0] + d[1])*(r[0] + r[1]) * 6.023e29; // units of r[i]=m; d[1]=m2/sec; kdiff=cm3/mole sec return kdiff; } //## operation calculateUpperBoundRate(Temperature) //svp public double calculateUpperBoundRate(Temperature p_temperature){ //#[ operation calculateUpperBoundRate(Temperature) if (isForward()){ double A; double E; double n; for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { A = kinetics[numKinetics].getA().getUpperBound(); E = kinetics[numKinetics].getE().getLowerBound(); n = kinetics[numKinetics].getN().getUpperBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()*1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } } if (E < 0){ E = 0; } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); UpperBoundRate += indiv_k; } return UpperBoundRate; } else if (isBackward()) { Reaction r = getReverseReaction(); if (r == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!r.isForward()) throw new InvalidReactionDirectionException(); for (int numKinetics=0; numKinetics<kinetics.length; numKinetics++) { double A = kinetics[numKinetics].getA().getUpperBound(); double E = kinetics[numKinetics].getE().getLowerBound(); double n = kinetics[numKinetics].getN().getUpperBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()*1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } } if (E < 0){ E = 0; } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); UpperBoundRate += indiv_k*calculateKeqUpperBound(p_temperature); } return UpperBoundRate; } else{ throw new InvalidReactionDirectionException(); } //#] } //## operation calculateLowerBoundRate(Temperature) //svp public double calculateLowerBoundRate(Temperature p_temperature){ //#[ operation calculateLowerBoundRate(Temperature) if (isForward()){ for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { double A = kinetics[numKinetics].getA().getLowerBound(); double E = kinetics[numKinetics].getE().getUpperBound(); double n = kinetics[numKinetics].getN().getLowerBound(); if (A > 1E300 || A <= 0) { A = kinetics[numKinetics].getA().getValue()/1.2; } //Kinetics kinetics = getRateConstant().getKinetics(); if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue()>0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue()>0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); LowerBoundRate += indiv_k; } return LowerBoundRate; } else if (isBackward()) { Reaction r = getReverseReaction(); if (r == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!r.isForward()) throw new InvalidReactionDirectionException(); for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { double A = kinetics[numKinetics].getA().getLowerBound(); double E = kinetics[numKinetics].getE().getUpperBound(); double n = kinetics[numKinetics].getN().getLowerBound(); if (A > 1E300) { A = kinetics[numKinetics].getA().getValue()/1.2; } if (kinetics[numKinetics] instanceof ArrheniusEPKinetics){ ArrheniusEPKinetics arrhenius = (ArrheniusEPKinetics)kinetics[numKinetics]; double H = calculateHrxn(p_temperature); if (H < 0) { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha*H; } } else { if (arrhenius.getAlpha().getValue() > 0){ double alpha = arrhenius.getAlpha().getUpperBound(); E = E + alpha * H; } else{ double alpha = arrhenius.getAlpha().getLowerBound(); E = E + alpha*H; } } } double indiv_k = 0.0; indiv_k = A*Math.pow(p_temperature.getK(),n)*Math.exp(-E/GasConstant.getKcalMolK()/p_temperature.getK()); indiv_k *= getStructure().getRedundancy(); LowerBoundRate += indiv_k*calculateKeqLowerBound(p_temperature); } return LowerBoundRate; } else{ throw new InvalidReactionDirectionException(); } //#] } //## operation calculateSrxn(Temperature) public double calculateSrxn(Temperature p_temperature) { //#[ operation calculateSrxn(Temperature) return structure.calculateSrxn(p_temperature); //#] } //## operation calculateThirdBodyCoefficient(SystemSnapshot) public double calculateThirdBodyCoefficient(SystemSnapshot p_presentStatus) { //#[ operation calculateThirdBodyCoefficient(SystemSnapshot) if (!(this instanceof ThirdBodyReaction)) return 1; else { return ((ThirdBodyReaction)this).calculateThirdBodyCoefficient(p_presentStatus); } //#] } //## operation checkRateRange() public boolean checkRateRange() { //#[ operation checkRateRange() Temperature t = new Temperature(1500,"K"); double rate = calculateTotalRate(t); if (getReactantNumber() == 2) { if (rate > BIMOLECULAR_RATE_UPPER) return false; } else if (getReactantNumber() == 1) { if (rate > UNIMOLECULAR_RATE_UPPER) return false; } else if (getReactantNumber() == 3) { if (rate > TRIMOLECULAR_RATE_UPPER) return false; } else throw new InvalidReactantNumberException(); return true; //#] } //## operation contains(Species) public boolean contains(Species p_species) { //#[ operation contains(Species) if (containsAsReactant(p_species) || containsAsProduct(p_species)) return true; else return false; //#] } //## operation containsAsProduct(Species) public boolean containsAsProduct(Species p_species) { //#[ operation containsAsProduct(Species) Iterator iter = getProducts(); while (iter.hasNext()) { //ChemGraph cg = (ChemGraph)iter.next(); Species spe = (Species)iter.next(); if (spe.equals(p_species)) return true; } return false; //#] } //## operation containsAsReactant(Species) public boolean containsAsReactant(Species p_species) { //#[ operation containsAsReactant(Species) Iterator iter = getReactants(); while (iter.hasNext()) { //ChemGraph cg = (ChemGraph)iter.next(); Species spe = (Species)iter.next(); if (spe.equals(p_species)) return true; } return false; //#] } /** * Checks if the structure of the reaction is the same. Does not check the rate constant. * Two reactions with the same structure but different rate constants will be equal. */ //## operation equals(Object) public boolean equals(Object p_reaction) { //#[ operation equals(Object) if (this == p_reaction) return true; if (!(p_reaction instanceof Reaction)) return false; Reaction r = (Reaction)p_reaction; if (!getStructure().equals(r.getStructure())) return false; return true; //#] } /* // fitReverseKineticsPrecisely and getPreciseReverseKinetics // are not used, not maintained, and we have clue what they do, // so we're commenting them out so we don't keep looking at them. // (oh, and they look pretty similar to each other!) // - RWest & JWAllen, June 2009 //## operation fitReverseKineticsPrecisely() public void fitReverseKineticsPrecisely() { //#[ operation fitReverseKineticsPrecisely() if (isForward()) { fittedReverseKinetics = null; } else { String result = ""; for (double t = 300.0; t<1500.0; t+=50.0) { double rate = calculateTotalRate(new Temperature(t,"K")); result += String.valueOf(t) + '\t' + String.valueOf(rate) + '\n'; } // run fit3p String dir = System.getProperty("RMG.workingDirectory"); File fit3p_input; try { // prepare fit3p input file, "input.dat" is the input file name fit3p_input = new File("fit3p/input.dat"); FileWriter fw = new FileWriter(fit3p_input); fw.write(result); fw.close(); } catch (IOException e) { System.out.println("Wrong input file for fit3p!"); System.out.println(e.getMessage()); System.exit(0); } try { // system call for fit3p String[] command = {dir+ "/bin/fit3pbnd.exe"}; File runningDir = new File("fit3p"); Process fit = Runtime.getRuntime().exec(command, null, runningDir); int exitValue = fit.waitFor(); } catch (Exception e) { System.out.println("Error in run fit3p!"); System.out.println(e.getMessage()); System.exit(0); } // parse the output file from chemdis try { String fit3p_output = "fit3p/output.dat"; FileReader in = new FileReader(fit3p_output); BufferedReader data = new BufferedReader(in); String line = ChemParser.readMeaningfulLine(data); line = line.trim(); StringTokenizer st = new StringTokenizer(line); String A = st.nextToken(); String temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Ar = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String n = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double nr = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String E = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Er = Double.parseDouble(temp); if (Er < 0) { System.err.println(getStructure().toString()); System.err.println("fitted Er < 0: "+Double.toString(Er)); double increase = Math.exp(-Er/GasConstant.getKcalMolK()/715.0); double deltan = Math.log(increase)/Math.log(715.0); System.err.println("n enlarged by factor of: " + Double.toString(deltan)); nr += deltan; Er = 0; } UncertainDouble udAr = new UncertainDouble(Ar, 0, "Adder"); UncertainDouble udnr = new UncertainDouble(nr, 0, "Adder"); UncertainDouble udEr = new UncertainDouble(Er, 0, "Adder"); fittedReverseKinetics = new ArrheniusKinetics(udAr, udnr , udEr, "300-1500", 1, "fitting from forward and thermal",null); in.close(); } catch (Exception e) { System.out.println("Error in read output.dat from fit3p!"); System.out.println(e.getMessage()); System.exit(0); } } return; //#] } //## operation fitReverseKineticsPrecisely() public Kinetics getPreciseReverseKinetics() { //#[ operation fitReverseKineticsPrecisely() Kinetics fittedReverseKinetics =null; String result = ""; for (double t = 300.0; t<1500.0; t+=50.0) { double rate = calculateTotalRate(new Temperature(t,"K")); result += String.valueOf(t) + '\t' + String.valueOf(rate) + '\n'; } // run fit3p String dir = System.getProperty("RMG.workingDirectory"); File fit3p_input; try { // prepare fit3p input file, "input.dat" is the input file name fit3p_input = new File("fit3p/input.dat"); FileWriter fw = new FileWriter(fit3p_input); fw.write(result); fw.close(); } catch (IOException e) { System.out.println("Wrong input file for fit3p!"); System.out.println(e.getMessage()); System.exit(0); } try { // system call for fit3p String[] command = {dir+ "/bin/fit3pbnd.exe"}; File runningDir = new File("fit3p"); Process fit = Runtime.getRuntime().exec(command, null, runningDir); int exitValue = fit.waitFor(); } catch (Exception e) { System.out.println("Error in run fit3p!"); System.out.println(e.getMessage()); System.exit(0); } // parse the output file from chemdis try { String fit3p_output = "fit3p/output.dat"; FileReader in = new FileReader(fit3p_output); BufferedReader data = new BufferedReader(in); String line = ChemParser.readMeaningfulLine(data); line = line.trim(); StringTokenizer st = new StringTokenizer(line); String A = st.nextToken(); String temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Ar = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String n = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double nr = Double.parseDouble(temp); line = ChemParser.readMeaningfulLine(data); line = line.trim(); st = new StringTokenizer(line); String E = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); temp = st.nextToken(); double Er = Double.parseDouble(temp); if (Er < 0) { System.err.println(getStructure().toString()); System.err.println("fitted Er < 0: "+Double.toString(Er)); double increase = Math.exp(-Er/GasConstant.getKcalMolK()/715.0); double deltan = Math.log(increase)/Math.log(715.0); System.err.println("n enlarged by factor of: " + Double.toString(deltan)); nr += deltan; Er = 0; } UncertainDouble udAr = new UncertainDouble(Ar, 0, "Adder"); UncertainDouble udnr = new UncertainDouble(nr, 0, "Adder"); UncertainDouble udEr = new UncertainDouble(Er, 0, "Adder"); fittedReverseKinetics = new ArrheniusKinetics(udAr, udnr , udEr, "300-1500", 1, "fitting from forward and thermal",null); in.close(); } catch (Exception e) { System.out.println("Error in read output.dat from fit3p!"); System.out.println(e.getMessage()); System.exit(0); } return fittedReverseKinetics; //#] } // */ //## operation fitReverseKineticsRoughly() public void fitReverseKineticsRoughly() { //#[ operation fitReverseKineticsRoughly() // now is a rough fitting if (isForward()) { fittedReverseKinetics = null; } else { //double temp = 715; // double temp = 298.15; //10/29/07 gmagoon: Sandeep made change to temp = 298 on his computer locally // double temp = 1350; //11/6/07 gmagoon:**** changed to actual temperature in my condition file to create agreement with old version; apparently, choice of temp has large effect; //11/9/07 gmagoon: commented out double temp = 298.15; //11/9/07 gmagoon: restored use of 298.15 per discussion with Sandeep //double temp = Global.temperature.getK(); Kinetics[] k = getKinetics(); fittedReverseKinetics = new Kinetics[k.length]; double doubleAlpha; for (int numKinetics=0; numKinetics<k.length; numKinetics++) { if (k[numKinetics] instanceof ArrheniusEPKinetics) doubleAlpha = ((ArrheniusEPKinetics)k[numKinetics]).getAlphaValue(); else doubleAlpha = 0; double Hrxn = calculateHrxn(new Temperature(temp,"K")); double Srxn = calculateSrxn(new Temperature(temp, "K")); // for EvansPolyani kinetics (Ea = Eo + alpha * Hrxn) remember that k.getEValue() gets Eo not Ea // this Hrxn is for the reverse reaction (ie. -Hrxn_forward) double doubleEr = k[numKinetics].getEValue() - (doubleAlpha-1)*Hrxn; if (doubleEr < 0) { System.err.println("fitted Er < 0: "+Double.toString(doubleEr)); System.err.println(getStructure().toString()); //doubleEr = 0; } UncertainDouble Er = new UncertainDouble(doubleEr, k[numKinetics].getE().getUncertainty(), k[numKinetics].getE().getType()); UncertainDouble n = new UncertainDouble(0,0, "Adder"); double doubleA = k[numKinetics].getAValue()* Math.pow(temp, k[numKinetics].getNValue())* Math.exp(Srxn/GasConstant.getCalMolK()); doubleA *= Math.pow(GasConstant.getCCAtmMolK()*temp, -getStructure().getDeltaN()); // assumes Ideal gas law concentration and 1 Atm reference state fittedReverseKinetics[numKinetics] = new ArrheniusKinetics(new UncertainDouble(doubleA, 0, "Adder"), n , Er, "300-1500", 1, "fitting from forward and thermal",null); } } return; //#] } /** * Generates a reaction whose structure is opposite to that of the present reaction. * Just appends the rate constant of this reaction to the reverse reaction. * */ //## operation generateReverseReaction() public void generateReverseReaction() { //#[ operation generateReverseReaction() Structure s = getStructure(); //Kinetics k = getKinetics(); Kinetics[] k = kinetics; if (kinetics == null) throw new NullPointerException(); Structure newS = s.generateReverseStructure(); newS.setRedundancy(s.getRedundancy()); Reaction r = new Reaction(newS, k); // if (hasAdditionalKinetics()){ // r.addAdditionalKinetics(additionalKinetics,1); // } r.setReverseReaction(this); this.setReverseReaction(r); return; //#] } public String getComments() { return comments; } //## operation getDirection() public int getDirection() { //#[ operation getDirection() return getStructure().getDirection(); //#] } //## operation getFittedReverseKinetics() public Kinetics[] getFittedReverseKinetics() { //#[ operation getFittedReverseKinetics() if (fittedReverseKinetics == null) fitReverseKineticsRoughly(); return fittedReverseKinetics; //#] } /*//## operation getForwardRateConstant() public Kinetics getForwardRateConstant() { //#[ operation getForwardRateConstant() if (isForward()) return kinetics; else return null; //#] } */ //## operation getKinetics() public Kinetics[] getKinetics() { //#[ operation getKinetics() // returns the kinetics OF THE FORWARD REACTION // ie. if THIS is reverse, it calls this.getReverseReaction().getKinetics() /* * 29Jun2009-MRH: * When getting kinetics, check whether it comes from a PRL or not. * If so, return the kinetics. We are not worried about the redundancy * because I assume the user inputs the Arrhenius kinetics for the overall * reaction A = B + C * * E.g. CH4 = CH3 + H A n E * The Arrhenius parameters would be for the overall decomposition of CH4, * not for each carbon-hydrogen bond fission */ if (isFromPrimaryKineticLibrary()) { return kinetics; } if (isForward()) { int red = structure.getRedundancy(); Kinetics[] kinetics2return = new Kinetics[kinetics.length]; for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { kinetics2return[numKinetics] = kinetics[numKinetics].multiply(red); } return kinetics2return; } else if (isBackward()) { Reaction rr = getReverseReaction(); // Added by MRH on 7/Sept/2009 // Required when reading in the restart files if (rr == null) { generateReverseReaction(); rr = getReverseReaction(); } if (rr == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!rr.isForward()) throw new InvalidReactionDirectionException(structure.toString()); return rr.getKinetics(); } else throw new InvalidReactionDirectionException(structure.toString()); //#] } public void setKineticsComments(String p_string, int num_k){ kinetics[num_k].setComments(p_string); } // shamel: Added this function 6/10/2010, to get Kinetics Source to identify duplicates // in Reaction Library, Seed Mech and Template Reaction with Library Reaction feature public String getKineticsSource(int num_k){ // The num_k is the number for different kinetics stored for one type of reaction but formed due to different families // Check if the kinetics exits if (kinetics != null) { // Check if the "source" string is not null if(kinetics[num_k].getSource() != null){ return kinetics[num_k].getSource(); } else{ // This is mostly done for case of H Abstraction where forward kinetic source is null //we might need to check if this "source" is also null (Can be source of Bug) return this.reverseReaction.kinetics[num_k].getSource(); } } else // Returns Source as null when there are no Kinetics at all! return null; } public void setKineticsSource(String p_string, int num_k){ kinetics[num_k].setSource(p_string); } //## operation getUpperBoundRate(Temperature) public double getUpperBoundRate(Temperature p_temperature){//svp //#[ operation getUpperBoundRate(Temperature) if (UpperBoundRate == 0.0){ calculateUpperBoundRate(p_temperature); } return UpperBoundRate; //#] } //## operation getLowerBoundRate(Temperature) public double getLowerBoundRate(Temperature p_temperature){//svp //#[ operation getLowerBoundRate(Temperature) if (LowerBoundRate == 0.0){ calculateLowerBoundRate(p_temperature); } return LowerBoundRate; //#] } //## operation getProductList() public LinkedList getProductList() { //#[ operation getProductList() return structure.getProductList(); //#] } //10/26/07 gmagoon: changed to have temperature and pressure passed as parameters (part of eliminating use of Global.temperature) public double getRateConstant(Temperature p_temperature){ //public double getRateConstant(){ if (rateConstant == 0) rateConstant = calculateTotalRate(p_temperature); // rateConstant = calculateTotalRate(Global.temperature); return rateConstant; } //## operation getProductNumber() public int getProductNumber() { //#[ operation getProductNumber() return getStructure().getProductNumber(); //#] } //## operation getProducts() public ListIterator getProducts() { //#[ operation getProducts() return structure.getProducts(); //#] } /*//## operation getRateConstant() public Kinetics getRateConstant() { //#[ operation getRateConstant() if (isForward()) { return rateConstant; } else if (isBackward()) { Reaction rr = getReverseReaction(); if (rr == null) throw new NullPointerException("Reverse reaction is null.\n" + structure.toString()); if (!rr.isForward()) throw new InvalidReactionDirectionException(structure.toString()); return rr.getRateConstant(); } else throw new InvalidReactionDirectionException(structure.toString()); //#] }*/ //## operation getReactantList() public LinkedList getReactantList() { //#[ operation getReactantList() return structure.getReactantList(); //#] } //## operation getReactantNumber() public int getReactantNumber() { //#[ operation getReactantNumber() return getStructure().getReactantNumber(); //#] } //## operation getReactants() public ListIterator getReactants() { //#[ operation getReactants() return structure.getReactants(); //#] } //## operation getRedundancy() public int getRedundancy() { //#[ operation getRedundancy() return getStructure().getRedundancy(); //#] } public boolean hasResonanceIsomer() { //#[ operation hasResonanceIsomer() return (hasResonanceIsomerAsReactant() || hasResonanceIsomerAsProduct()); //#] } //## operation hasResonanceIsomerAsProduct() public boolean hasResonanceIsomerAsProduct() { //#[ operation hasResonanceIsomerAsProduct() for (Iterator iter = getProducts(); iter.hasNext();) { Species spe = ((Species)iter.next()); if (spe.hasResonanceIsomers()) return true; } return false; //#] } //## operation hasResonanceIsomerAsReactant() public boolean hasResonanceIsomerAsReactant() { //#[ operation hasResonanceIsomerAsReactant() for (Iterator iter = getReactants(); iter.hasNext();) { Species spe = ((Species)iter.next()); if (spe.hasResonanceIsomers()) return true; } return false; //#] } //## operation hasReverseReaction() public boolean hasReverseReaction() { //#[ operation hasReverseReaction() return reverseReaction != null; //#] } //## operation hashCode() public int hashCode() { //#[ operation hashCode() // just use the structure's hashcode return structure.hashCode(); //#] } //## operation isDuplicated(Reaction) /*public boolean isDuplicated(Reaction p_reaction) { //#[ operation isDuplicated(Reaction) // the same structure, return true Structure str1 = getStructure(); Structure str2 = p_reaction.getStructure(); //if (str1.isDuplicate(str2)) return true; // if not the same structure, check the resonance isomers if (!hasResonanceIsomer()) return false; if (str1.equals(str2)) return true; else return false; //#] }*/ //## operation isBackward() public boolean isBackward() { //#[ operation isBackward() return structure.isBackward(); //#] } //## operation isForward() public boolean isForward() { //#[ operation isForward() return structure.isForward(); //#] } //## operation isIncluded(HashSet) public boolean isIncluded(HashSet p_speciesSet) { //#[ operation isIncluded(HashSet) return (allReactantsIncluded(p_speciesSet) && allProductsIncluded(p_speciesSet)); //#] } //## operation makeReaction(Structure,Kinetics,boolean) public static Reaction makeReaction(Structure p_structure, Kinetics[] p_kinetics, boolean p_generateReverse) { //#[ operation makeReaction(Structure,Kinetics,boolean) if (!p_structure.repOk()) throw new InvalidStructureException(p_structure.toChemkinString(false).toString()); for (int numKinetics=0; numKinetics<p_kinetics.length; numKinetics++) { if (!p_kinetics[numKinetics].repOk()) throw new InvalidKineticsException(p_kinetics[numKinetics].toString()); } Reaction r = new Reaction(p_structure, p_kinetics); if (p_generateReverse) { r.generateReverseReaction(); } else { r.setReverseReaction(null); } return r; //#] } //## operation reactantEqualsProduct() public boolean reactantEqualsProduct() { //#[ operation reactantEqualsProduct() return getStructure().reactantEqualsProduct(); //#] } //## operation repOk() public boolean repOk() { //#[ operation repOk() if (!structure.repOk()) { System.out.println("Invalid Reaction Structure:" + structure.toString()); return false; } if (!isForward() && !isBackward()) { System.out.println("Invalid Reaction Direction: " + String.valueOf(getDirection())); return false; } if (isBackward() && reverseReaction == null) { System.out.println("Backward Reaction without a reversed reaction defined!"); return false; } /*if (!getRateConstant().repOk()) { System.out.println("Invalid Rate Constant: " + getRateConstant().toString()); return false; }*/ Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { if (!allKinetics[numKinetics].repOk()) { System.out.println("Invalid Kinetics: " + allKinetics[numKinetics].toString()); return false; } } if (!checkRateRange()) { System.out.println("reaction rate is higher than the upper rate limit!"); System.out.println(getStructure().toString()); Temperature tup = new Temperature(1500,"K"); if (isForward()) { System.out.println("k(T=1500) = " + String.valueOf(calculateTotalRate(tup))); } else { System.out.println("k(T=1500) = " + String.valueOf(calculateTotalRate(tup))); System.out.println("Keq(T=1500) = " + String.valueOf(calculateKeq(tup))); System.out.println("krev(T=1500) = " + String.valueOf(getReverseReaction().calculateTotalRate(tup))); } System.out.println(getKinetics()); return false; } return true; //#] } //## operation setReverseReaction(Reaction) public void setReverseReaction(Reaction p_reverseReaction) { //#[ operation setReverseReaction(Reaction) reverseReaction = p_reverseReaction; if (p_reverseReaction != null) reverseReaction.reverseReaction = this; //#] } //## operation toChemkinString() public String toChemkinString(Temperature p_temperature) { //#[ operation toChemkinString() if (ChemkinString != null) return ChemkinString; StringBuilder result = new StringBuilder(); StringBuilder strucString = getStructure().toChemkinString(hasReverseReaction()); Temperature stdtemp = new Temperature(298,"K"); double Hrxn = calculateHrxn(stdtemp); Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { String k = allKinetics[numKinetics].toChemkinString(Hrxn,p_temperature,true); if (allKinetics.length == 1) result.append(strucString + " " + k); else result.append(strucString + " " + k + "\nDUP\n"); } ChemkinString = result.toString(); return result.toString(); } public String toChemkinString(Temperature p_temperature, Pressure p_pressure) { // For certain PDep cases it's helpful to be able to call this with a temperature and pressure // but usually (and in this case) the pressure is irrelevant, so we just call the above toChemkinString(Temperature) method: return toChemkinString(p_temperature); } public String toRestartString(Temperature p_temperature, boolean pathReaction) { /* * Edited by MRH on 18Jan2010 * * Writing restart files was causing a bug in the RMG-generated chem.inp file * For example, H+CH4=CH3+H2 in input file w/HXD13, CH4, and H2 * RMG would correctly multiply the A factor by the structure's redundancy * when calculating the rate to place in the ODEsolver input file. However, * the A reported in the chem.inp file would be the "per event" A. This was * due to the reaction.toChemkinString() method being called when writing the * Restart coreReactions.txt and edgeReactions.txt files. At the first point of * writing the chemkinString for this reaction (when it is still an edge reaction), * RMG had not yet computed the redundancy of the structure (as H was not a core * species at the time, but CH3 and H2 were). When RMG tried to write the chemkinString * for the above reaction, using the correct redundancy, the chemkinString already existed * and thus the toChemkinString() method was exited immediately. * MRH is replacing the reaction.toChemkinString() call with reaction.toRestartString() * when writing the Restart files, to account for this bug. */ String result = getStructure().toRestartString(hasReverseReaction()).toString(); //+ " "+getStructure().direction + " "+getStructure().redundancy; // MRH 18Jan2010: Restart files do not look for direction/redundancy /* * MRH 14Feb2010: Handle reactions with multiple kinetics */ String totalResult = ""; Kinetics[] allKinetics = getKinetics(); for (int numKinetics=0; numKinetics<allKinetics.length; ++numKinetics) { totalResult += result + " " + allKinetics[numKinetics].toChemkinString(calculateHrxn(p_temperature),p_temperature,true); if (allKinetics.length != 1) { if (pathReaction) { totalResult += "\n"; if (numKinetics != allKinetics.length-1) totalResult += getStructure().direction + "\t"; } else totalResult += "\n\tDUP\n"; } } return totalResult; } /* * MRH 23MAR2010: * Method not used in RMG */ // //## operation toFullString() // public String toFullString() { // //#[ operation toFullString() // return getStructure().toString() + getKinetics().toString() + getComments().toString(); // // // // //#] // } //## operation toString() public String toString(Temperature p_temperature) { //#[ operation toString() String string2return = ""; Kinetics[] k = getKinetics(); for (int numKinetics=0; numKinetics<k.length; ++numKinetics) { string2return += getStructure().toString() + "\t"; string2return += k[numKinetics].toChemkinString(calculateHrxn(p_temperature),p_temperature,false); if (k.length > 1) string2return += "\n"; } return string2return; } /* * MRH 23MAR2010: * This method is redundant to toString() */ //10/26/07 gmagoon: changed to take temperature as parameter (required changing function name from toString to reactionToString // public String reactionToString(Temperature p_temperature) { // // public String toString() { // //#[ operation toString() // // Temperature p_temperature = Global.temperature; // Kinetics k = getKinetics(); // String kString = k.toChemkinString(calculateHrxn(p_temperature),p_temperature,false); // // return getStructure().toString() + '\t' + kString; // //#] // } public static double getBIMOLECULAR_RATE_UPPER() { return BIMOLECULAR_RATE_UPPER; } public static double getUNIMOLECULAR_RATE_UPPER() { return UNIMOLECULAR_RATE_UPPER; } public void setComments(String p_comments) { comments = p_comments; } /** * Returns the reverse reaction of this reaction. If there is no reverse reaction present * then a null object is returned. * @return */ public Reaction getReverseReaction() { return reverseReaction; } public void setKinetics(Kinetics p_kinetics, int k_index) { if (p_kinetics == null) { kinetics = null; } else { kinetics[k_index] = p_kinetics; } } public void addAdditionalKinetics(Kinetics p_kinetics, int red) { if (finalized) return; if (p_kinetics == null) return; if (kinetics == null){ kinetics = new Kinetics[1]; kinetics[0] = p_kinetics; structure.redundancy = 1; } else { boolean kineticsAlreadyPresent = false; for (int numKinetics=0; numKinetics<kinetics.length; ++numKinetics) { if (kinetics[numKinetics].equals(p_kinetics)) { structure.increaseRedundancy(red); kineticsAlreadyPresent = true; } } if (!kineticsAlreadyPresent) { Kinetics[] tempKinetics = kinetics; kinetics = new Kinetics[tempKinetics.length+1]; for (int i=0; i<tempKinetics.length; i++) { kinetics[i] = tempKinetics[i]; } kinetics[kinetics.length-1] = p_kinetics; structure.redundancy = 1; } } /* * MRH 24MAR2010: * Commented out. As RMG will be able to handle more than 2 Kinetics * per reaction, the code below is no longer necessary */ //10/29/07 gmagoon: changed to use Global.highTemperature, Global.lowTemperature (versus Global.temperature); apparently this function chooses the top two rates when there are multiple reactions with same reactants and products; the reactions with the top two rates are used; use of high and low temperatures would be less than ideal in cases where temperature of system changes over the course of reaction //10/31/07 gmagoon: it is assumed that two rate constants vs. temperature cross each other at most one time over the temperature range of interest //if there at least three different reactions/rates and rate crossings/intersections occur, the two rates used are based on the lowest simulation temperature and a warning is displayed // else if (additionalKinetics == null){ // if (p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if (p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // additionalKinetics = kinetics; // kinetics = p_kinetics; // structure.redundancy = 1; // } // else{ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // additionalKinetics = p_kinetics; // } // } // else if (additionalKinetics.equals(p_kinetics)) // return; // else { // if(ratesForKineticsAndAdditionalKineticsCross){ // if(p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = false; // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = kinetics; // kinetics = p_kinetics; // } // else if (p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else{//else p_kinetics @ low temperature is between kinetics and additional kinetics at low temperature // if(p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = false; // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } // else{ // if ((p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)) && (p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); //10/29/07 gmagoon: note that I have moved this before reassignment of variables; I think this was a minor bug in original code // additionalKinetics = kinetics; // kinetics = p_kinetics; // } // else if ((p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) < additionalKinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else if ((p_kinetics.calculateRate(Global.lowTemperature) > additionalKinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) > additionalKinetics.calculateRate(Global.highTemperature))&&(p_kinetics.calculateRate(Global.lowTemperature) < kinetics.calculateRate(Global.lowTemperature))&&(p_kinetics.calculateRate(Global.highTemperature) < kinetics.calculateRate(Global.highTemperature))){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // else //else there is at least one crossing in the temperature range of interest between p_kinetics and either kinetics or additionalKinetics; base which reaction is kept on the lowest temperature // { // if(p_kinetics.calculateRate(Global.lowTemperature) > kinetics.calculateRate(Global.lowTemperature)){ // if(p_kinetics.calculateRate(Global.highTemperature) < additionalKinetics.calculateRate(Global.highTemperature)) // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = kinetics; // kinetics = p_kinetics; // ratesForKineticsAndAdditionalKineticsCross = true; // } // else if(p_kinetics.calculateRate(Global.lowTemperature) < additionalKinetics.calculateRate(Global.lowTemperature)){ // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else //else p_kinetics at low temperature is between kinetics and additional kinetics at low temperature // { // if(p_kinetics.calculateRate(Global.highTemperature) > kinetics.calculateRate(Global.highTemperature)) // ratesForKineticsAndAdditionalKineticsCross = true; // else//else p_kinetics crosses additional kinetics // System.out.println("WARNING: reaction that may be significant at higher temperatures within the provided range is being neglected; see following for details:"); // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } // } // } // else if (additionalKinetics == null){ // if (p_kinetics.calculateRate(Global.temperature) > kinetics.calculateRate(Global.temperature)){ // additionalKinetics = kinetics; // kinetics = p_kinetics; // structure.redundancy = 1; // } // else additionalKinetics = p_kinetics; // } // else if (additionalKinetics.equals(p_kinetics)) // return; // else { // if (p_kinetics.calculateRate(Global.temperature) > kinetics.calculateRate(Global.temperature)){ // additionalKinetics = kinetics; // kinetics = p_kinetics; // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // // } // else if (p_kinetics.calculateRate(Global.temperature) < additionalKinetics.calculateRate(Global.temperature)){ // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + p_kinetics.toString() ); // } // else { // System.out.println("More than 2 kinetics provided for reaction " + structure.toChemkinString(true)); // System.out.println("Ignoring the rate constant " + additionalKinetics.toString() ); // additionalKinetics = p_kinetics; // } // } } /* * MRH 18MAR2010: * A reaction's kinetics is now an array. The additionalKinetics is now * obsolete and this method will be removed (to ensure nothing calls it) */ // public boolean hasAdditionalKinetics(){ // return (additionalKinetics != null); // } //public int totalNumberOfKinetics(){ //7/26/09 gmagoon: this is not used, and appears to incorrectly assume that there are a maximum of two kinetics...I think this was old and I have changed it since // if (hasAdditionalKinetics()) // return 2; // else // return 1; //} /* * MRH 18MAR2010: * Restructuring a reaction's kinetics * With a reaction's kinetics now being defined as an array of Kinetics, * instead of "kinetics" and "additionalKinetics", the getAllKinetics() * method is now obsolete. */ // public HashSet getAllKinetics(){ // HashSet allKinetics = new HashSet(); // allKinetics.add(kinetics.multiply(structure.redundancy)); // if ( hasAdditionalKinetics()){ // allKinetics.add(additionalKinetics.multiply(structure.redundancy)); // // } // // return allKinetics; // } public void setFinalized(boolean p_finalized) { finalized = p_finalized; return; } public Structure getStructure() { return structure; } public void setStructure(Structure p_Structure) { structure = p_Structure; } /** * Returns the reaction as an ASCII string. * @return A string representing the reaction equation in ASCII test. */ @Override public String toString() { if (getReactantNumber() == 0 || getProductNumber() == 0) return ""; String rxn = ""; Species species = (Species) structure.getReactantList().get(0); rxn = rxn + species.getName() + "(" + Integer.toString(species.getID()) + ")"; for (int i = 1; i < getReactantNumber(); i++) { species = (Species) structure.getReactantList().get(i); rxn += " + " + species.getName() + "(" + Integer.toString(species.getID()) + ")"; } rxn += " --> "; species = (Species) structure.getProductList().get(0); rxn = rxn + species.getName() + "(" + Integer.toString(species.getID()) + ")"; for (int i = 1; i < getProductNumber(); i++) { species = (Species) structure.getProductList().get(i); rxn += " + " + species.getName() + "(" + Integer.toString(species.getID()) + ")"; } return rxn; } public String toInChIString() { if (getReactantNumber() == 0 || getProductNumber() == 0) return ""; String rxn = ""; Species species = (Species) structure.getReactantList().get(0); rxn = rxn + species.getInChI(); for (int i = 1; i < getReactantNumber(); i++) { species = (Species) structure.getReactantList().get(i); rxn += " + " + species.getInChI(); } rxn += " --> "; species = (Species) structure.getProductList().get(0); rxn = rxn + species.getInChI(); for (int i = 1; i < getProductNumber(); i++) { species = (Species) structure.getProductList().get(i); rxn += " + " + species.getInChI(); } return rxn; } /** * Calculates the flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateFlux(SystemSnapshot ss) { return calculateForwardFlux(ss) - calculateReverseFlux(ss); } /** * Calculates the forward flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateForwardFlux(SystemSnapshot ss) { Temperature T = ss.getTemperature(); double forwardFlux = getRateConstant(T); for (ListIterator<Species> iter = getReactants(); iter.hasNext(); ) { Species spe = iter.next(); double conc = 0.0; if (ss.getSpeciesStatus(spe) != null) conc = ss.getSpeciesStatus(spe).getConcentration(); if (conc < 0) { double aTol = ReactionModelGenerator.getAtol(); //if (Math.abs(conc) < aTol) conc = 0; //else throw new NegativeConcentrationException(spe.getName() + ": " + String.valueOf(conc)); if (conc < -100.0 * aTol) throw new NegativeConcentrationException("Species " + spe.getName() + " has negative concentration: " + String.valueOf(conc)); } forwardFlux *= conc; } return forwardFlux; } /** * Calculates the flux of this reaction given the provided system snapshot. * The system snapshot contains the temperature, pressure, and * concentrations of each core species. * @param ss The system snapshot at which to determine the reaction flux * @return The determined reaction flux */ public double calculateReverseFlux(SystemSnapshot ss) { if (hasReverseReaction()) return reverseReaction.calculateForwardFlux(ss); else return 0.0; } public boolean isFromPrimaryKineticLibrary() { return kineticsFromPrimaryKineticLibrary; } public void setIsFromPrimaryKineticLibrary(boolean p_boolean) { kineticsFromPrimaryKineticLibrary = p_boolean; } public ReactionTemplate getReactionTemplate() { return rxnTemplate; } public void setReactionTemplate(ReactionTemplate rt) { rxnTemplate = rt; } public boolean hasMultipleKinetics() { if (getKinetics().length > 1) return true; else return false; } } /********************************************************************* File Path : RMG\RMG\jing\rxn\Reaction.java *********************************************************************/
Diffusive limits apply for A+B=C+D+E reactions. If there are 2 reactants an 3 p;roducts, diffusive limits apply to the forward direction.
source/RMG/jing/rxn/Reaction.java
Diffusive limits apply for A+B=C+D+E reactions.
<ide><path>ource/RMG/jing/rxn/Reaction.java <ide> <ide> return keff; <ide> } <add> <add> } <add> else if (numReacts == 2 && numProds == 3) { <add> <add> double k_forw = rate; <add> LinkedList reactantsInForwRxn = structure.reactants; <add> double k_forw_diff = calculatediff(reactantsInForwRxn); <add> double k_forw_eff = k_forw*k_forw_diff/(k_forw + k_forw_diff); <add> <add> keff = k_forw_eff; <add> //if (keff/k_forw < 0.2){ <add> //if (!getKinetics().getComment().endsWith("Diffusion limited")){ <add> DiffFactor = keff/rate; <add> <add> setKineticsComments(getComments() + "\t" + "Diffusion factor = " + DiffFactor + "\t" + "Keq =" + calculateKeq(p_temperature),0); <add> <add> return keff; <ide> <ide> } <ide>
Java
apache-2.0
a4d10478dd27102d3ef1bfa42113f414fa176146
0
chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newLinkedHashMap; import static com.google.common.collect.Sets.newHashSet; import static java.lang.Integer.getInteger; import static java.lang.String.format; import static java.lang.System.currentTimeMillis; import static java.lang.Thread.currentThread; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import static org.apache.jackrabbit.oak.commons.PathUtils.elements; import static org.apache.jackrabbit.oak.commons.PathUtils.getName; import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder; import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.CLEANUP; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_FORCE_COMPACT; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_RETRY; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.ESTIMATION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.IDLE; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.EXPEDITE_OPTION; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.timeout; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; import com.google.common.io.Closer; import org.apache.jackrabbit.oak.segment.Compactor; import org.apache.jackrabbit.oak.segment.RecordId; import org.apache.jackrabbit.oak.segment.Segment; import org.apache.jackrabbit.oak.segment.SegmentId; import org.apache.jackrabbit.oak.segment.SegmentNodeBuilder; import org.apache.jackrabbit.oak.segment.SegmentNodeState; import org.apache.jackrabbit.oak.segment.SegmentNotFoundException; import org.apache.jackrabbit.oak.segment.SegmentNotFoundExceptionListener; import org.apache.jackrabbit.oak.segment.SegmentWriter; import org.apache.jackrabbit.oak.segment.WriterCacheManager; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.GCJournal.GCJournalEntry; import org.apache.jackrabbit.oak.segment.file.tar.CleanupContext; import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles.CleanupResult; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The storage implementation for tar files. */ public class FileStore extends AbstractFileStore { private static final Logger log = LoggerFactory.getLogger(FileStore.class); /** * Minimal interval in milli seconds between subsequent garbage collection cycles. * Garbage collection invoked via {@link #fullGC()} will be skipped unless at least * the specified time has passed since its last successful invocation. */ private static final long GC_BACKOFF = getInteger("oak.gc.backoff", 10*3600*1000); private static final int MB = 1024 * 1024; static final String LOCK_FILE_NAME = "repo.lock"; /** * GC counter for logging purposes */ private static final AtomicLong GC_COUNT = new AtomicLong(0); @Nonnull private final SegmentWriter segmentWriter; @Nonnull private final GarbageCollector garbageCollector; private final TarFiles tarFiles; private final RandomAccessFile lockFile; private final FileLock lock; private TarRevisions revisions; /** * Scheduler for running <em>short</em> background operations */ private final Scheduler fileStoreScheduler = new Scheduler("FileStore background tasks"); /** * List of old tar file generations that are waiting to be removed. They can * not be removed immediately, because they first need to be closed, and the * JVM needs to release the memory mapped file references. */ private final FileReaper fileReaper = new FileReaper(); /** * This flag is periodically updated by calling the {@code SegmentGCOptions} * at regular intervals. */ private final AtomicBoolean sufficientDiskSpace = new AtomicBoolean(true); /** * This flag is raised whenever the available memory falls under a specified * threshold. See {@link GCMemoryBarrier} */ private final AtomicBoolean sufficientMemory = new AtomicBoolean(true); /** * Flag signalling shutdown of the file store */ private volatile boolean shutdown; private final FileStoreStats stats; @Nonnull private final SegmentNotFoundExceptionListener snfeListener; FileStore(final FileStoreBuilder builder) throws InvalidFileStoreVersionException, IOException { super(builder); lockFile = new RandomAccessFile(new File(directory, LOCK_FILE_NAME), "rw"); try { lock = lockFile.getChannel().lock(); } catch (OverlappingFileLockException ex) { throw new IllegalStateException(directory.getAbsolutePath() + " is in use by another store.", ex); } this.segmentWriter = defaultSegmentWriterBuilder("sys") .withGeneration(() -> getGcGeneration().nonGC()) .withWriterPool() .with(builder.getCacheManager() .withAccessTracking("WRITE", builder.getStatsProvider())) .build(this); this.garbageCollector = new GarbageCollector( builder.getGcOptions(), builder.getGcListener(), new GCJournal(directory), builder.getCacheManager(), builder.getStatsProvider()); newManifestChecker(directory, builder.getStrictVersionCheck()).checkAndUpdateManifest(); this.stats = new FileStoreStats(builder.getStatsProvider(), this, 0); this.tarFiles = TarFiles.builder() .withDirectory(directory) .withMemoryMapping(memoryMapping) .withTarRecovery(recovery) .withIOMonitor(ioMonitor) .withFileStoreMonitor(stats) .withMaxFileSize(builder.getMaxFileSize() * MB) .build(); this.stats.init(this.tarFiles.size()); this.snfeListener = builder.getSnfeListener(); fileStoreScheduler.scheduleAtFixedRate( format("TarMK flush [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { if (shutdown) { return; } try { flush(); } catch (IOException e) { log.warn("Failed to flush the TarMK at {}", directory, e); } } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK filer reaper [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { fileReaper.reap(); } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK disk space check [%s]", directory), 1, MINUTES, new Runnable() { final SegmentGCOptions gcOptions = builder.getGcOptions(); @Override public void run() { checkDiskSpace(gcOptions); } }); log.info("TarMK opened: {} (mmap={})", directory, memoryMapping); log.debug("TAR files: {}", tarFiles); } FileStore bind(TarRevisions revisions) throws IOException { this.revisions = revisions; this.revisions.bind(this, tracker, initialNode()); return this; } @Nonnull private Supplier<RecordId> initialNode() { return new Supplier<RecordId>() { @Override public RecordId get() { try { SegmentWriter writer = defaultSegmentWriterBuilder("init").build(FileStore.this); NodeBuilder builder = EMPTY_NODE.builder(); builder.setChildNode("root", EMPTY_NODE); SegmentNodeState node = new SegmentNodeState(segmentReader, writer, getBlobStore(), writer.writeNode(builder.getNodeState())); writer.flush(); return node.getRecordId(); } catch (IOException e) { String msg = "Failed to write initial node"; log.error(msg, e); throw new IllegalStateException(msg, e); } } }; } @Nonnull private GCGeneration getGcGeneration() { return revisions.getHead().getSegmentId().getGcGeneration(); } /** * @return a runnable for running garbage collection */ public Runnable getGCRunner() { return new SafeRunnable(format("TarMK revision gc [%s]", directory), () -> { try { garbageCollector.run(); } catch (IOException e) { log.error("Error running revision garbage collection", e); } }); } /** * @return the currently active gc write monitor */ public GCNodeWriteMonitor getGCNodeWriteMonitor() { return garbageCollector.getGCNodeWriteMonitor(); } /** * @return the size of this store. */ private long size() { return tarFiles.size(); } public int readerCount(){ return tarFiles.readerCount(); } public FileStoreStats getStats() { return stats; } public void flush() throws IOException { if (revisions == null) { return; } revisions.flush(new Callable<Void>() { @Override public Void call() throws Exception { segmentWriter.flush(); tarFiles.flush(); stats.flushed(); return null; } }); } /** * Run full garbage collection: estimation, compaction, cleanup. */ public void fullGC() throws IOException { garbageCollector.runFull(); } /** * Run tail garbage collection. */ public void tailGC() throws IOException { garbageCollector.runTail(); } /** * Run the compaction gain estimation process. * @return */ public GCEstimation estimateCompactionGain() { return garbageCollector.estimateCompactionGain(); } /** * Copy every referenced record in data (non-bulk) segments. Bulk segments * are fully kept (they are only removed in cleanup, if there is no * reference to them). * @return {@code true} on success, {@code false} otherwise. */ public boolean compactFull() { return garbageCollector.compactFull().isSuccess(); } public boolean compactTail() { return garbageCollector.compactTail().isSuccess(); } /** * Run garbage collection on the segment level: reclaim those data segments * that are from an old segment generation and those bulk segments that are not * reachable anymore. * Those tar files that shrink by at least 25% are rewritten to a new tar generation * skipping the reclaimed segments. */ public void cleanup() throws IOException { CompactionResult compactionResult = CompactionResult.skipped( getGcGeneration(), garbageCollector.gcOptions, revisions.getHead()); fileReaper.add(garbageCollector.cleanup(compactionResult)); } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ public void collectBlobReferences(Consumer<String> collector) throws IOException { garbageCollector.collectBlobReferences(collector); } /** * Cancel a running revision garbage collection compaction process as soon as possible. * Does nothing if gc is not running. */ public void cancelGC() { garbageCollector.cancel(); } @Override @Nonnull public SegmentWriter getWriter() { return segmentWriter; } @Override @Nonnull public TarRevisions getRevisions() { return revisions; } @Override public void close() { // Flag the store as shutting / shut down shutdown = true; // avoid deadlocks by closing (and joining) the background // thread before acquiring the synchronization lock fileStoreScheduler.close(); try { flush(); } catch (IOException e) { log.warn("Unable to flush the store", e); } Closer closer = Closer.create(); closer.register(revisions); if (lock != null) { try { lock.release(); } catch (IOException e) { log.warn("Unable to release the file lock", e); } } closer.register(lockFile); closer.register(tarFiles); closeAndLogOnFail(closer); // Try removing pending files in case the scheduler didn't have a chance to run yet fileReaper.reap(); System.gc(); // for any memory-mappings that are no longer used log.info("TarMK closed: {}", directory); } @Override public boolean containsSegment(SegmentId id) { return tarFiles.containsSegment(id.getMostSignificantBits(), id.getLeastSignificantBits()); } @Override @Nonnull public Segment readSegment(final SegmentId id) { try { return segmentCache.getSegment(id, new Callable<Segment>() { @Override public Segment call() throws Exception { return readSegmentUncached(tarFiles, id); } }); } catch (ExecutionException e) { SegmentNotFoundException snfe = asSegmentNotFoundException(e, id); snfeListener.notify(id, snfe); throw snfe; } } @Override public void writeSegment(SegmentId id, byte[] buffer, int offset, int length) throws IOException { Segment segment = null; // If the segment is a data segment, create a new instance of Segment to // access some internal information stored in the segment and to store // in an in-memory cache for later use. GCGeneration generation = GCGeneration.NULL; Set<UUID> references = null; Set<String> binaryReferences = null; if (id.isDataSegmentId()) { ByteBuffer data; if (offset > 4096) { data = ByteBuffer.allocate(length); data.put(buffer, offset, length); data.rewind(); } else { data = ByteBuffer.wrap(buffer, offset, length); } segment = new Segment(tracker, segmentReader, id, data); generation = segment.getGcGeneration(); references = readReferences(segment); binaryReferences = readBinaryReferences(segment); } tarFiles.writeSegment( id.asUUID(), buffer, offset, length, generation, references, binaryReferences ); // Keep this data segment in memory as it's likely to be accessed soon. if (segment != null) { segmentCache.putSegment(segment); } } private void checkDiskSpace(SegmentGCOptions gcOptions) { long repositoryDiskSpace = size(); long availableDiskSpace = directory.getFreeSpace(); boolean updated = SegmentGCOptions.isDiskSpaceSufficient(repositoryDiskSpace, availableDiskSpace); boolean previous = sufficientDiskSpace.getAndSet(updated); if (previous && !updated) { log.warn("Available disk space ({}) is too low, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } if (updated && !previous) { log.info("Available disk space ({}) is sufficient again for repository operations, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } } private class GarbageCollector { @Nonnull private final SegmentGCOptions gcOptions; /** * {@code GcListener} listening to this instance's gc progress */ @Nonnull private final GCListener gcListener; @Nonnull private final GCJournal gcJournal; @Nonnull private final WriterCacheManager cacheManager; @Nonnull private final StatisticsProvider statisticsProvider; @Nonnull private GCNodeWriteMonitor compactionMonitor = GCNodeWriteMonitor.EMPTY; private volatile boolean cancelled; /** * Timestamp of the last time {@link #fullGC()} or {@link #tailGC()} was * successfully invoked. 0 if never. */ private long lastSuccessfullGC; GarbageCollector( @Nonnull SegmentGCOptions gcOptions, @Nonnull GCListener gcListener, @Nonnull GCJournal gcJournal, @Nonnull WriterCacheManager cacheManager, @Nonnull StatisticsProvider statisticsProvider) { this.gcOptions = gcOptions; this.gcListener = gcListener; this.gcJournal = gcJournal; this.cacheManager = cacheManager; this.statisticsProvider = statisticsProvider; } GCNodeWriteMonitor getGCNodeWriteMonitor() { return compactionMonitor; } synchronized void run() throws IOException { switch (gcOptions.getGCType()) { case FULL: runFull(); break; case TAIL: runTail(); break; default: throw new IllegalStateException("Invalid GC type"); } } synchronized void runFull() throws IOException { run(this::compactFull); } synchronized void runTail() throws IOException { run(this::compactTail); } private void run(Supplier<CompactionResult> compact) throws IOException { try { gcListener.info("TarMK GC #{}: started", GC_COUNT.incrementAndGet()); long dt = System.currentTimeMillis() - lastSuccessfullGC; if (dt < GC_BACKOFF) { gcListener.skipped("TarMK GC #{}: skipping garbage collection as it already ran " + "less than {} hours ago ({} s).", GC_COUNT, GC_BACKOFF/3600000, dt/1000); return; } boolean sufficientEstimatedGain = true; if (gcOptions.isEstimationDisabled()) { gcListener.info("TarMK GC #{}: estimation skipped because it was explicitly disabled", GC_COUNT); } else if (gcOptions.isPaused()) { gcListener.info("TarMK GC #{}: estimation skipped because compaction is paused", GC_COUNT); } else { gcListener.info("TarMK GC #{}: estimation started", GC_COUNT); gcListener.updateStatus(ESTIMATION.message()); Stopwatch watch = Stopwatch.createStarted(); GCEstimation estimate = estimateCompactionGain(); sufficientEstimatedGain = estimate.gcNeeded(); String gcLog = estimate.gcLog(); if (sufficientEstimatedGain) { gcListener.info( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } else { gcListener.skipped( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } } if (sufficientEstimatedGain) { if (!gcOptions.isPaused()) { try (GCMemoryBarrier gcMemoryBarrier = new GCMemoryBarrier( sufficientMemory, gcListener, GC_COUNT.get(), gcOptions)) { CompactionResult compactionResult = compact.get(); if (compactionResult.isSuccess()) { lastSuccessfullGC = System.currentTimeMillis(); } else { gcListener.info("TarMK GC #{}: cleaning up after failed compaction", GC_COUNT); } fileReaper.add(cleanup(compactionResult)); } } else { gcListener.skipped("TarMK GC #{}: compaction paused", GC_COUNT); } } } finally { compactionMonitor.finished(); gcListener.updateStatus(IDLE.message()); } } /** * Estimated compaction gain. The result will be undefined if stopped through * the passed {@code stop} signal. * @return compaction gain estimate */ synchronized GCEstimation estimateCompactionGain() { return new SizeDeltaGcEstimation(gcOptions, gcJournal, stats.getApproximateSize()); } @Nonnull private CompactionResult compactionAborted(@Nonnull GCGeneration generation) { gcListener.compactionFailed(generation); return CompactionResult.aborted(getGcGeneration(), generation); } @Nonnull private CompactionResult compactionSucceeded(@Nonnull GCGeneration generation, @Nonnull RecordId compactedRootId) { gcListener.compactionSucceeded(generation); return CompactionResult.succeeded(generation, gcOptions, compactedRootId); } @CheckForNull private SegmentNodeState getBase() { String root = gcJournal.read().getRoot(); RecordId rootId = RecordId.fromString(tracker, root); if (RecordId.NULL.equals(rootId)) { return null; } try { SegmentNodeState node = segmentReader.readNode(rootId); node.getPropertyCount(); // Resilience: fail early with a SNFE if the segment is not there return node; } catch (SegmentNotFoundException snfe) { gcListener.error("TarMK GC #" + GC_COUNT + ": Base state " + rootId + " is not accessible", snfe); return null; } } synchronized CompactionResult compactFull() { gcListener.info("TarMK GC #{}: running full compaction", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } synchronized CompactionResult compactTail() { gcListener.info("TarMK GC #{}: running tail compaction", GC_COUNT); SegmentNodeState base = getBase(); if (base != null) { return compact(base, getGcGeneration().nextTail()); } gcListener.info("TarMK GC #{}: no base state available, running full compaction instead", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } private CompactionResult compact(SegmentNodeState base, GCGeneration newGeneration) { try { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: compaction started, gc options={}", GC_COUNT, gcOptions); gcListener.updateStatus(COMPACTION.message()); GCJournalEntry gcEntry = gcJournal.read(); long initialSize = size(); compactionMonitor = new GCNodeWriteMonitor(gcOptions.getGcLogInterval(), gcListener); compactionMonitor.init(GC_COUNT.get(), gcEntry.getRepoSize(), gcEntry.getNodes(), initialSize); SegmentNodeState before = getHead(); CancelCompactionSupplier cancel = new CancelCompactionSupplier(FileStore.this); SegmentWriter writer = defaultSegmentWriterBuilder("c") .with(cacheManager .withAccessTracking("COMPACT", statisticsProvider)) .withGeneration(newGeneration) .withoutWriterPool() .build(FileStore.this); Compactor compactor = new Compactor( segmentReader, writer, getBlobStore(), cancel, compactionMonitor); SegmentNodeState after = compact(base, before, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle 0 completed in {} ({} ms). Compacted {} to {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), before.getRecordId(), after.getRecordId()); int cycles = 0; boolean success = false; while (cycles < gcOptions.getRetryCount() && !(success = revisions.setHead(before.getRecordId(), after.getRecordId(), EXPEDITE_OPTION))) { // Some other concurrent changes have been made. // Rebase (and compact) those changes on top of the // compacted state before retrying to set the head. cycles++; gcListener.info("TarMK GC #{}: compaction detected concurrent commits while compacting. " + "Compacting these commits. Cycle {} of {}", GC_COUNT, cycles, gcOptions.getRetryCount()); gcListener.updateStatus(COMPACTION_RETRY.message() + cycles); Stopwatch cycleWatch = Stopwatch.createStarted(); SegmentNodeState head = getHead(); after = compact(after, head, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle {} completed in {} ({} ms). Compacted {} against {} to {}", GC_COUNT, cycles, cycleWatch, cycleWatch.elapsed(MILLISECONDS), head.getRecordId(), before.getRecordId(), after.getRecordId()); before = head; } if (!success) { gcListener.info("TarMK GC #{}: compaction gave up compacting concurrent commits after {} cycles.", GC_COUNT, cycles); int forceTimeout = gcOptions.getForceTimeout(); if (forceTimeout > 0) { gcListener.info("TarMK GC #{}: trying to force compact remaining commits for {} seconds. " + "Concurrent commits to the store will be blocked.", GC_COUNT, forceTimeout); gcListener.updateStatus(COMPACTION_FORCE_COMPACT.message()); Stopwatch forceWatch = Stopwatch.createStarted(); cycles++; cancel.timeOutAfter(forceTimeout, SECONDS); after = forceCompact(after, compactor, writer); success = after != null; if (success) { gcListener.info("TarMK GC #{}: compaction succeeded to force compact remaining commits " + "after {} ({} ms).", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } else { if (cancel.get()) { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits " + "after {} ({} ms). Compaction was cancelled: {}.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS), cancel); } else { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits. " + "after {} ({} ms). Most likely compaction didn't get exclusive access to the store.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } } } } if (success) { writer.flush(); gcListener.info("TarMK GC #{}: compaction succeeded in {} ({} ms), after {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionSucceeded(newGeneration, after.getRecordId()); } else { gcListener.info("TarMK GC #{}: compaction failed after {} ({} ms), and {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionAborted(newGeneration); } } catch (InterruptedException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction interrupted", e); currentThread().interrupt(); return compactionAborted(newGeneration); } catch (IOException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction encountered an error", e); return compactionAborted(newGeneration); } } /** * Compact {@code uncompacted} on top of an optional {@code base}. * @param base the base state to compact onto or {@code null} for an empty state. * @param uncompacted the uncompacted state to compact * @param compactor the compactor for creating the new generation of the * uncompacted state. * @param writer the segment writer used by {@code compactor} for writing to the * new generation. * @return compacted clone of {@code uncompacted} or null if cancelled. * @throws IOException */ @CheckForNull private SegmentNodeState compact( @Nullable SegmentNodeState base, @Nonnull SegmentNodeState uncompacted, @Nonnull Compactor compactor, @Nonnull SegmentWriter writer) throws IOException { // Collect a chronologically ordered list of roots for the base and the uncompacted // state. This list consists of all checkpoints followed by the root. LinkedHashMap<String, NodeState> baseRoots = collectRoots(base); LinkedHashMap<String, NodeState> uncompactedRoots = collectRoots(uncompacted); // Compact the list of uncompacted roots to a list of compacted roots. LinkedHashMap<String, NodeState> compactedRoots = compact(baseRoots, uncompactedRoots, compactor); if (compactedRoots == null) { return null; } // Build a compacted super root by replacing the uncompacted roots with // the compacted ones in the original node. SegmentNodeBuilder builder = uncompacted.builder(); for (Entry<String, NodeState> compactedRoot : compactedRoots.entrySet()) { String path = compactedRoot.getKey(); NodeState state = compactedRoot.getValue(); NodeBuilder childBuilder = getChild(builder, getParentPath(path)); childBuilder.setChildNode(getName(path), state); } // Use the segment writer of the *new generation* to persist the compacted super root. RecordId nodeId = writer.writeNode(builder.getNodeState(), uncompacted.getStableIdBytes()); return new SegmentNodeState(segmentReader, segmentWriter, getBlobStore(), nodeId); } /** * Compact a list of uncompacted roots on top of base roots of the same key or * an empty node if none. */ @CheckForNull private LinkedHashMap<String, NodeState> compact( @Nonnull LinkedHashMap<String, NodeState> baseRoots, @Nonnull LinkedHashMap<String, NodeState> uncompactedRoots, @Nonnull Compactor compactor) throws IOException { NodeState onto = baseRoots.get("root"); NodeState previous = onto; LinkedHashMap<String, NodeState> compactedRoots = newLinkedHashMap(); for (Entry<String, NodeState> uncompactedRoot : uncompactedRoots.entrySet()) { String path = uncompactedRoot.getKey(); NodeState state = uncompactedRoot.getValue(); NodeState compacted; if (onto == null) { compacted = compactor.compact(state); } else { compacted = compactor.compact(previous, state, onto); } if (compacted == null) { return null; } previous = state; onto = compacted; compactedRoots.put(path, compacted); } return compactedRoots; } /** * Collect a chronologically ordered list of roots for the base and the uncompacted * state from a {@code superRoot} . This list consists of all checkpoints followed by * the root. */ @Nonnull private LinkedHashMap<String, NodeState> collectRoots(@Nullable SegmentNodeState superRoot) { LinkedHashMap<String, NodeState> roots = newLinkedHashMap(); if (superRoot != null) { List<ChildNodeEntry> checkpoints = newArrayList( superRoot.getChildNode("checkpoints").getChildNodeEntries()); checkpoints.sort((cne1, cne2) -> { long c1 = cne1.getNodeState().getLong("created"); long c2 = cne2.getNodeState().getLong("created"); return Long.compare(c1, c2); }); for (ChildNodeEntry checkpoint : checkpoints) { roots.put("checkpoints/" + checkpoint.getName() + "/root", checkpoint.getNodeState().getChildNode("root")); } roots.put("root", superRoot.getChildNode("root")); } return roots; } @Nonnull private NodeBuilder getChild(NodeBuilder builder, String path) { for (String name : elements(path)) { builder = builder.getChildNode(name); } return builder; } private SegmentNodeState forceCompact( @Nonnull final SegmentNodeState base, @Nonnull final Compactor compactor, @Nonnull SegmentWriter writer) throws InterruptedException { RecordId compactedId = revisions.setHead(new Function<RecordId, RecordId>() { @Nullable @Override public RecordId apply(RecordId headId) { try { long t0 = currentTimeMillis(); SegmentNodeState after = compact( base, segmentReader.readNode(headId), compactor, writer); if (after == null) { gcListener.info("TarMK GC #{}: compaction cancelled after {} seconds", GC_COUNT, (currentTimeMillis() - t0) / 1000); return null; } else { return after.getRecordId(); } } catch (IOException e) { gcListener.error("TarMK GC #{" + GC_COUNT + "}: Error during forced compaction.", e); return null; } } }, timeout(gcOptions.getForceTimeout(), SECONDS)); return compactedId != null ? segmentReader.readNode(compactedId) : null; } private CleanupContext newCleanupContext(Predicate<GCGeneration> old) { return new CleanupContext() { private boolean isUnreferencedBulkSegment(UUID id, boolean referenced) { return !isDataSegmentId(id.getLeastSignificantBits()) && !referenced; } private boolean isOldDataSegment(UUID id, GCGeneration generation) { return isDataSegmentId(id.getLeastSignificantBits()) && old.apply(generation); } @Override public Collection<UUID> initialReferences() { Set<UUID> references = newHashSet(); for (SegmentId id : tracker.getReferencedSegmentIds()) { if (id.isBulkSegmentId()) { references.add(id.asUUID()); } } return references; } @Override public boolean shouldReclaim(UUID id, GCGeneration generation, boolean referenced) { return isUnreferencedBulkSegment(id, referenced) || isOldDataSegment(id, generation); } @Override public boolean shouldFollow(UUID from, UUID to) { return !isDataSegmentId(to.getLeastSignificantBits()); } }; } /** * Cleanup segments whose generation matches the {@link CompactionResult#reclaimer()} predicate. * @return list of files to be removed * @throws IOException */ @Nonnull private List<File> cleanup(@Nonnull CompactionResult compactionResult) throws IOException { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: cleanup started.", GC_COUNT); gcListener.updateStatus(CLEANUP.message()); segmentCache.clear(); // Suggest to the JVM that now would be a good time // to clear stale weak references in the SegmentTracker System.gc(); CleanupResult cleanupResult = tarFiles.cleanup(newCleanupContext(compactionResult.reclaimer())); if (cleanupResult.isInterrupted()) { gcListener.info("TarMK GC #{}: cleanup interrupted", GC_COUNT); } tracker.clearSegmentIdTables(cleanupResult.getReclaimedSegmentIds(), compactionResult.gcInfo()); gcListener.info("TarMK GC #{}: cleanup marking files for deletion: {}", GC_COUNT, toFileNames(cleanupResult.getRemovableFiles())); long finalSize = size(); long reclaimedSize = cleanupResult.getReclaimedSize(); stats.reclaimed(reclaimedSize); gcJournal.persist(reclaimedSize, finalSize, getGcGeneration(), compactionMonitor.getCompactedNodes(), compactionResult.getCompactedRootId().toString10()); gcListener.cleaned(reclaimedSize, finalSize); gcListener.info("TarMK GC #{}: cleanup completed in {} ({} ms). Post cleanup size is {} ({} bytes)" + " and space reclaimed {} ({} bytes).", GC_COUNT, watch, watch.elapsed(MILLISECONDS), humanReadableByteCount(finalSize), finalSize, humanReadableByteCount(reclaimedSize), reclaimedSize); return cleanupResult.getRemovableFiles(); } private String toFileNames(@Nonnull List<File> files) { if (files.isEmpty()) { return "none"; } else { return Joiner.on(",").join(files); } } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ synchronized void collectBlobReferences(Consumer<String> collector) throws IOException { segmentWriter.flush(); tarFiles.collectBlobReferences(collector, Reclaimers.newOldReclaimer(getGcGeneration(), gcOptions.getRetainedGenerations())); } void cancel() { cancelled = true; } /** * Represents the cancellation policy for the compaction phase. If the disk * space was considered insufficient at least once during compaction (or if * the space was never sufficient to begin with), compaction is considered * canceled. Furthermore when the file store is shutting down, compaction is * considered canceled. * Finally the cancellation can be triggered by a timeout that can be set * at any time. */ private class CancelCompactionSupplier implements Supplier<Boolean> { private final FileStore store; private String reason; private volatile long deadline; public CancelCompactionSupplier(@Nonnull FileStore store) { cancelled = false; this.store = store; } /** * Set a timeout for cancellation. Setting a different timeout cancels * a previous one that did not yet elapse. Setting a timeout after * cancellation took place has no effect. */ public void timeOutAfter(final long duration, @Nonnull final TimeUnit unit) { deadline = currentTimeMillis() + MILLISECONDS.convert(duration, unit); } @Override public Boolean get() { // The outOfDiskSpace and shutdown flags can only transition from // false (their initial values), to true. Once true, there should // be no way to go back. if (!store.sufficientDiskSpace.get()) { reason = "Not enough disk space"; return true; } if (!store.sufficientMemory.get()) { reason = "Not enough memory"; return true; } if (store.shutdown) { reason = "The FileStore is shutting down"; return true; } if (cancelled) { reason = "Cancelled by user"; return true; } if (deadline > 0 && currentTimeMillis() > deadline) { reason = "Timeout after " + deadline/1000 + " seconds"; return true; } return false; } @Override public String toString() { return reason; } } } /** * Instances of this class represent the result from a compaction. * Either {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded}, * {@link #aborted(GCGeneration, GCGeneration) aborted} or {@link #skipped(GCGeneration, SegmentGCOptions) skipped}. */ private abstract static class CompactionResult { @Nonnull private final GCGeneration currentGeneration; protected CompactionResult(@Nonnull GCGeneration currentGeneration) { this.currentGeneration = currentGeneration; } /** * Result of a succeeded compaction. * @param newGeneration the generation successfully created by compaction * @param gcOptions the current GC options used by compaction * @param compactedRootId the record id of the root created by compaction */ static CompactionResult succeeded( @Nonnull GCGeneration newGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(newGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(newGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * Result of an aborted compaction. * @param currentGeneration the current generation of the store * @param failedGeneration the generation that compaction attempted to create */ static CompactionResult aborted( @Nonnull GCGeneration currentGeneration, @Nonnull final GCGeneration failedGeneration) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newExactReclaimer(failedGeneration); } @Override boolean isSuccess() { return false; } }; } /** * Result serving as a placeholder for a compaction that was skipped. * @param currentGeneration the current generation of the store * @param gcOptions the current GC options used by compaction */ static CompactionResult skipped( @Nonnull GCGeneration currentGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(currentGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * @return a predicate determining which segments to * {@link GarbageCollector#cleanup(CompactionResult) clean up} for * the given compaction result. */ abstract Predicate<GCGeneration> reclaimer(); /** * @return {@code true} for {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded} * and {@link #skipped(GCGeneration, SegmentGCOptions, RecordId) skipped}, {@code false} otherwise. */ abstract boolean isSuccess(); /** * @return the record id of the compacted root on {@link #isSuccess() success}, * {@link RecordId#NULL} otherwise. */ RecordId getCompactedRootId() { return RecordId.NULL; } /** * @return a diagnostic message describing the outcome of this compaction. */ String gcInfo() { return "gc-count=" + GC_COUNT + ",gc-status=" + (isSuccess() ? "success" : "failed") + ",store-generation=" + currentGeneration + ",reclaim-predicate=" + reclaimer(); } } }
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment.file; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newLinkedHashMap; import static com.google.common.collect.Sets.newHashSet; import static java.lang.Integer.getInteger; import static java.lang.String.format; import static java.lang.System.currentTimeMillis; import static java.lang.Thread.currentThread; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import static org.apache.jackrabbit.oak.commons.PathUtils.elements; import static org.apache.jackrabbit.oak.commons.PathUtils.getName; import static org.apache.jackrabbit.oak.commons.PathUtils.getParentPath; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder; import static org.apache.jackrabbit.oak.segment.SegmentId.isDataSegmentId; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.CLEANUP; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_FORCE_COMPACT; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.COMPACTION_RETRY; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.ESTIMATION; import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCStatus.IDLE; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.EXPEDITE_OPTION; import static org.apache.jackrabbit.oak.segment.file.TarRevisions.timeout; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileLock; import java.nio.channels.OverlappingFileLockException; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; import com.google.common.io.Closer; import org.apache.jackrabbit.oak.segment.Compactor; import org.apache.jackrabbit.oak.segment.RecordId; import org.apache.jackrabbit.oak.segment.Segment; import org.apache.jackrabbit.oak.segment.SegmentId; import org.apache.jackrabbit.oak.segment.SegmentNodeBuilder; import org.apache.jackrabbit.oak.segment.SegmentNodeState; import org.apache.jackrabbit.oak.segment.SegmentNotFoundException; import org.apache.jackrabbit.oak.segment.SegmentNotFoundExceptionListener; import org.apache.jackrabbit.oak.segment.SegmentWriter; import org.apache.jackrabbit.oak.segment.WriterCacheManager; import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions; import org.apache.jackrabbit.oak.segment.file.GCJournal.GCJournalEntry; import org.apache.jackrabbit.oak.segment.file.tar.CleanupContext; import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles; import org.apache.jackrabbit.oak.segment.file.tar.TarFiles.CleanupResult; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.stats.StatisticsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The storage implementation for tar files. */ public class FileStore extends AbstractFileStore { private static final Logger log = LoggerFactory.getLogger(FileStore.class); /** * Minimal interval in milli seconds between subsequent garbage collection cycles. * Garbage collection invoked via {@link #fullGC()} will be skipped unless at least * the specified time has passed since its last successful invocation. */ private static final long GC_BACKOFF = getInteger("oak.gc.backoff", 10*3600*1000); private static final int MB = 1024 * 1024; static final String LOCK_FILE_NAME = "repo.lock"; /** * GC counter for logging purposes */ private static final AtomicLong GC_COUNT = new AtomicLong(0); @Nonnull private final SegmentWriter segmentWriter; @Nonnull private final GarbageCollector garbageCollector; private final TarFiles tarFiles; private final RandomAccessFile lockFile; private final FileLock lock; private TarRevisions revisions; /** * Scheduler for running <em>short</em> background operations */ private final Scheduler fileStoreScheduler = new Scheduler("FileStore background tasks"); /** * List of old tar file generations that are waiting to be removed. They can * not be removed immediately, because they first need to be closed, and the * JVM needs to release the memory mapped file references. */ private final FileReaper fileReaper = new FileReaper(); /** * This flag is periodically updated by calling the {@code SegmentGCOptions} * at regular intervals. */ private final AtomicBoolean sufficientDiskSpace = new AtomicBoolean(true); /** * This flag is raised whenever the available memory falls under a specified * threshold. See {@link GCMemoryBarrier} */ private final AtomicBoolean sufficientMemory = new AtomicBoolean(true); /** * Flag signalling shutdown of the file store */ private volatile boolean shutdown; private final FileStoreStats stats; @Nonnull private final SegmentNotFoundExceptionListener snfeListener; FileStore(final FileStoreBuilder builder) throws InvalidFileStoreVersionException, IOException { super(builder); lockFile = new RandomAccessFile(new File(directory, LOCK_FILE_NAME), "rw"); try { lock = lockFile.getChannel().lock(); } catch (OverlappingFileLockException ex) { throw new IllegalStateException(directory.getAbsolutePath() + " is in use by another store.", ex); } this.segmentWriter = defaultSegmentWriterBuilder("sys") .withGeneration(() -> getGcGeneration().nonGC()) .withWriterPool() .with(builder.getCacheManager() .withAccessTracking("WRITE", builder.getStatsProvider())) .build(this); this.garbageCollector = new GarbageCollector( builder.getGcOptions(), builder.getGcListener(), new GCJournal(directory), builder.getCacheManager(), builder.getStatsProvider()); newManifestChecker(directory, builder.getStrictVersionCheck()).checkAndUpdateManifest(); this.stats = new FileStoreStats(builder.getStatsProvider(), this, 0); this.tarFiles = TarFiles.builder() .withDirectory(directory) .withMemoryMapping(memoryMapping) .withTarRecovery(recovery) .withIOMonitor(ioMonitor) .withFileStoreMonitor(stats) .withMaxFileSize(builder.getMaxFileSize() * MB) .build(); this.stats.init(this.tarFiles.size()); this.snfeListener = builder.getSnfeListener(); fileStoreScheduler.scheduleAtFixedRate( format("TarMK flush [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { if (shutdown) { return; } try { flush(); } catch (IOException e) { log.warn("Failed to flush the TarMK at {}", directory, e); } } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK filer reaper [%s]", directory), 5, SECONDS, new Runnable() { @Override public void run() { fileReaper.reap(); } }); fileStoreScheduler.scheduleAtFixedRate( format("TarMK disk space check [%s]", directory), 1, MINUTES, new Runnable() { final SegmentGCOptions gcOptions = builder.getGcOptions(); @Override public void run() { checkDiskSpace(gcOptions); } }); log.info("TarMK opened: {} (mmap={})", directory, memoryMapping); log.debug("TAR files: {}", tarFiles); } FileStore bind(TarRevisions revisions) throws IOException { this.revisions = revisions; this.revisions.bind(this, tracker, initialNode()); return this; } @Nonnull private Supplier<RecordId> initialNode() { return new Supplier<RecordId>() { @Override public RecordId get() { try { SegmentWriter writer = defaultSegmentWriterBuilder("init").build(FileStore.this); NodeBuilder builder = EMPTY_NODE.builder(); builder.setChildNode("root", EMPTY_NODE); SegmentNodeState node = new SegmentNodeState(segmentReader, writer, getBlobStore(), writer.writeNode(builder.getNodeState())); writer.flush(); return node.getRecordId(); } catch (IOException e) { String msg = "Failed to write initial node"; log.error(msg, e); throw new IllegalStateException(msg, e); } } }; } @Nonnull private GCGeneration getGcGeneration() { return revisions.getHead().getSegmentId().getGcGeneration(); } /** * @return a runnable for running garbage collection */ public Runnable getGCRunner() { return new SafeRunnable(format("TarMK revision gc [%s]", directory), () -> { try { garbageCollector.run(); } catch (IOException e) { log.error("Error running revision garbage collection", e); } }); } /** * @return the currently active gc write monitor */ public GCNodeWriteMonitor getGCNodeWriteMonitor() { return garbageCollector.getGCNodeWriteMonitor(); } /** * @return the size of this store. */ private long size() { return tarFiles.size(); } public int readerCount(){ return tarFiles.readerCount(); } public FileStoreStats getStats() { return stats; } public void flush() throws IOException { if (revisions == null) { return; } revisions.flush(new Callable<Void>() { @Override public Void call() throws Exception { segmentWriter.flush(); tarFiles.flush(); stats.flushed(); return null; } }); } /** * Run full garbage collection: estimation, compaction, cleanup. */ public void fullGC() throws IOException { garbageCollector.runFull(); } /** * Run tail garbage collection. */ public void tailGC() throws IOException { garbageCollector.runTail(); } /** * Run the compaction gain estimation process. * @return */ public GCEstimation estimateCompactionGain() { return garbageCollector.estimateCompactionGain(); } /** * Copy every referenced record in data (non-bulk) segments. Bulk segments * are fully kept (they are only removed in cleanup, if there is no * reference to them). * @return {@code true} on success, {@code false} otherwise. */ public boolean compactFull() { return garbageCollector.compactFull().isSuccess(); } public boolean compactTail() { return garbageCollector.compactTail().isSuccess(); } /** * Run garbage collection on the segment level: reclaim those data segments * that are from an old segment generation and those bulk segments that are not * reachable anymore. * Those tar files that shrink by at least 25% are rewritten to a new tar generation * skipping the reclaimed segments. */ public void cleanup() throws IOException { CompactionResult compactionResult = CompactionResult.skipped( getGcGeneration(), garbageCollector.gcOptions, revisions.getHead()); fileReaper.add(garbageCollector.cleanup(compactionResult)); } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ public void collectBlobReferences(Consumer<String> collector) throws IOException { garbageCollector.collectBlobReferences(collector); } /** * Cancel a running revision garbage collection compaction process as soon as possible. * Does nothing if gc is not running. */ public void cancelGC() { garbageCollector.cancel(); } @Override @Nonnull public SegmentWriter getWriter() { return segmentWriter; } @Override @Nonnull public TarRevisions getRevisions() { return revisions; } @Override public void close() { // Flag the store as shutting / shut down shutdown = true; // avoid deadlocks by closing (and joining) the background // thread before acquiring the synchronization lock fileStoreScheduler.close(); try { flush(); } catch (IOException e) { log.warn("Unable to flush the store", e); } Closer closer = Closer.create(); closer.register(revisions); if (lock != null) { try { lock.release(); } catch (IOException e) { log.warn("Unable to release the file lock", e); } } closer.register(lockFile); closer.register(tarFiles); closeAndLogOnFail(closer); // Try removing pending files in case the scheduler didn't have a chance to run yet fileReaper.reap(); System.gc(); // for any memory-mappings that are no longer used log.info("TarMK closed: {}", directory); } @Override public boolean containsSegment(SegmentId id) { return tarFiles.containsSegment(id.getMostSignificantBits(), id.getLeastSignificantBits()); } @Override @Nonnull public Segment readSegment(final SegmentId id) { try { return segmentCache.getSegment(id, new Callable<Segment>() { @Override public Segment call() throws Exception { return readSegmentUncached(tarFiles, id); } }); } catch (ExecutionException e) { SegmentNotFoundException snfe = asSegmentNotFoundException(e, id); snfeListener.notify(id, snfe); throw snfe; } } @Override public void writeSegment(SegmentId id, byte[] buffer, int offset, int length) throws IOException { Segment segment = null; // If the segment is a data segment, create a new instance of Segment to // access some internal information stored in the segment and to store // in an in-memory cache for later use. GCGeneration generation = GCGeneration.NULL; Set<UUID> references = null; Set<String> binaryReferences = null; if (id.isDataSegmentId()) { ByteBuffer data; if (offset > 4096) { data = ByteBuffer.allocate(length); data.put(buffer, offset, length); data.rewind(); } else { data = ByteBuffer.wrap(buffer, offset, length); } segment = new Segment(tracker, segmentReader, id, data); generation = segment.getGcGeneration(); references = readReferences(segment); binaryReferences = readBinaryReferences(segment); } tarFiles.writeSegment( id.asUUID(), buffer, offset, length, generation, references, binaryReferences ); // Keep this data segment in memory as it's likely to be accessed soon. if (segment != null) { segmentCache.putSegment(segment); } } private void checkDiskSpace(SegmentGCOptions gcOptions) { long repositoryDiskSpace = size(); long availableDiskSpace = directory.getFreeSpace(); boolean updated = SegmentGCOptions.isDiskSpaceSufficient(repositoryDiskSpace, availableDiskSpace); boolean previous = sufficientDiskSpace.getAndSet(updated); if (previous && !updated) { log.warn("Available disk space ({}) is too low, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } if (updated && !previous) { log.info("Available disk space ({}) is sufficient again for repository operations, current repository size is approx. {}", humanReadableByteCount(availableDiskSpace), humanReadableByteCount(repositoryDiskSpace)); } } private class GarbageCollector { @Nonnull private final SegmentGCOptions gcOptions; /** * {@code GcListener} listening to this instance's gc progress */ @Nonnull private final GCListener gcListener; @Nonnull private final GCJournal gcJournal; @Nonnull private final WriterCacheManager cacheManager; @Nonnull private final StatisticsProvider statisticsProvider; @Nonnull private GCNodeWriteMonitor compactionMonitor = GCNodeWriteMonitor.EMPTY; private volatile boolean cancelled; /** * Timestamp of the last time {@link #fullGC()} or {@link #tailGC()} was * successfully invoked. 0 if never. */ private long lastSuccessfullGC; GarbageCollector( @Nonnull SegmentGCOptions gcOptions, @Nonnull GCListener gcListener, @Nonnull GCJournal gcJournal, @Nonnull WriterCacheManager cacheManager, @Nonnull StatisticsProvider statisticsProvider) { this.gcOptions = gcOptions; this.gcListener = gcListener; this.gcJournal = gcJournal; this.cacheManager = cacheManager; this.statisticsProvider = statisticsProvider; } GCNodeWriteMonitor getGCNodeWriteMonitor() { return compactionMonitor; } synchronized void run() throws IOException { switch (gcOptions.getGCType()) { case FULL: runFull(); break; case TAIL: runTail(); break; default: throw new IllegalStateException("Invalid GC type"); } } synchronized void runFull() throws IOException { run(this::compactFull); } synchronized void runTail() throws IOException { run(this::compactTail); } private void run(Supplier<CompactionResult> compact) throws IOException { try { gcListener.info("TarMK GC #{}: started", GC_COUNT.incrementAndGet()); long dt = System.currentTimeMillis() - lastSuccessfullGC; if (dt < GC_BACKOFF) { gcListener.skipped("TarMK GC #{}: skipping garbage collection as it already ran " + "less than {} hours ago ({} s).", GC_COUNT, GC_BACKOFF/3600000, dt/1000); return; } GCMemoryBarrier gcMemoryBarrier = new GCMemoryBarrier( sufficientMemory, gcListener, GC_COUNT.get(), gcOptions); boolean sufficientEstimatedGain = true; if (gcOptions.isEstimationDisabled()) { gcListener.info("TarMK GC #{}: estimation skipped because it was explicitly disabled", GC_COUNT); } else if (gcOptions.isPaused()) { gcListener.info("TarMK GC #{}: estimation skipped because compaction is paused", GC_COUNT); } else { gcListener.info("TarMK GC #{}: estimation started", GC_COUNT); gcListener.updateStatus(ESTIMATION.message()); Stopwatch watch = Stopwatch.createStarted(); GCEstimation estimate = estimateCompactionGain(); sufficientEstimatedGain = estimate.gcNeeded(); String gcLog = estimate.gcLog(); if (sufficientEstimatedGain) { gcListener.info( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } else { gcListener.skipped( "TarMK GC #{}: estimation completed in {} ({} ms). {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), gcLog); } } if (sufficientEstimatedGain) { if (!gcOptions.isPaused()) { CompactionResult compactionResult = compact.get(); if (compactionResult.isSuccess()) { lastSuccessfullGC = System.currentTimeMillis(); } else { gcListener.info("TarMK GC #{}: cleaning up after failed compaction", GC_COUNT); } fileReaper.add(cleanup(compactionResult)); } else { gcListener.skipped("TarMK GC #{}: compaction paused", GC_COUNT); } } gcMemoryBarrier.close(); } finally { compactionMonitor.finished(); gcListener.updateStatus(IDLE.message()); } } /** * Estimated compaction gain. The result will be undefined if stopped through * the passed {@code stop} signal. * @return compaction gain estimate */ synchronized GCEstimation estimateCompactionGain() { return new SizeDeltaGcEstimation(gcOptions, gcJournal, stats.getApproximateSize()); } @Nonnull private CompactionResult compactionAborted(@Nonnull GCGeneration generation) { gcListener.compactionFailed(generation); return CompactionResult.aborted(getGcGeneration(), generation); } @Nonnull private CompactionResult compactionSucceeded(@Nonnull GCGeneration generation, @Nonnull RecordId compactedRootId) { gcListener.compactionSucceeded(generation); return CompactionResult.succeeded(generation, gcOptions, compactedRootId); } @CheckForNull private SegmentNodeState getBase() { String root = gcJournal.read().getRoot(); RecordId rootId = RecordId.fromString(tracker, root); if (RecordId.NULL.equals(rootId)) { return null; } try { SegmentNodeState node = segmentReader.readNode(rootId); node.getPropertyCount(); // Resilience: fail early with a SNFE if the segment is not there return node; } catch (SegmentNotFoundException snfe) { gcListener.error("TarMK GC #" + GC_COUNT + ": Base state " + rootId + " is not accessible", snfe); return null; } } synchronized CompactionResult compactFull() { gcListener.info("TarMK GC #{}: running full compaction", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } synchronized CompactionResult compactTail() { gcListener.info("TarMK GC #{}: running tail compaction", GC_COUNT); SegmentNodeState base = getBase(); if (base != null) { return compact(base, getGcGeneration().nextTail()); } gcListener.info("TarMK GC #{}: no base state available, running full compaction instead", GC_COUNT); return compact(null, getGcGeneration().nextFull()); } private CompactionResult compact(SegmentNodeState base, GCGeneration newGeneration) { try { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: compaction started, gc options={}", GC_COUNT, gcOptions); gcListener.updateStatus(COMPACTION.message()); GCJournalEntry gcEntry = gcJournal.read(); long initialSize = size(); compactionMonitor = new GCNodeWriteMonitor(gcOptions.getGcLogInterval(), gcListener); compactionMonitor.init(GC_COUNT.get(), gcEntry.getRepoSize(), gcEntry.getNodes(), initialSize); SegmentNodeState before = getHead(); CancelCompactionSupplier cancel = new CancelCompactionSupplier(FileStore.this); SegmentWriter writer = defaultSegmentWriterBuilder("c") .with(cacheManager .withAccessTracking("COMPACT", statisticsProvider)) .withGeneration(newGeneration) .withoutWriterPool() .build(FileStore.this); Compactor compactor = new Compactor( segmentReader, writer, getBlobStore(), cancel, compactionMonitor); SegmentNodeState after = compact(base, before, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle 0 completed in {} ({} ms). Compacted {} to {}", GC_COUNT, watch, watch.elapsed(MILLISECONDS), before.getRecordId(), after.getRecordId()); int cycles = 0; boolean success = false; while (cycles < gcOptions.getRetryCount() && !(success = revisions.setHead(before.getRecordId(), after.getRecordId(), EXPEDITE_OPTION))) { // Some other concurrent changes have been made. // Rebase (and compact) those changes on top of the // compacted state before retrying to set the head. cycles++; gcListener.info("TarMK GC #{}: compaction detected concurrent commits while compacting. " + "Compacting these commits. Cycle {} of {}", GC_COUNT, cycles, gcOptions.getRetryCount()); gcListener.updateStatus(COMPACTION_RETRY.message() + cycles); Stopwatch cycleWatch = Stopwatch.createStarted(); SegmentNodeState head = getHead(); after = compact(after, head, compactor, writer); if (after == null) { gcListener.warn("TarMK GC #{}: compaction cancelled: {}.", GC_COUNT, cancel); return compactionAborted(newGeneration); } gcListener.info("TarMK GC #{}: compaction cycle {} completed in {} ({} ms). Compacted {} against {} to {}", GC_COUNT, cycles, cycleWatch, cycleWatch.elapsed(MILLISECONDS), head.getRecordId(), before.getRecordId(), after.getRecordId()); before = head; } if (!success) { gcListener.info("TarMK GC #{}: compaction gave up compacting concurrent commits after {} cycles.", GC_COUNT, cycles); int forceTimeout = gcOptions.getForceTimeout(); if (forceTimeout > 0) { gcListener.info("TarMK GC #{}: trying to force compact remaining commits for {} seconds. " + "Concurrent commits to the store will be blocked.", GC_COUNT, forceTimeout); gcListener.updateStatus(COMPACTION_FORCE_COMPACT.message()); Stopwatch forceWatch = Stopwatch.createStarted(); cycles++; cancel.timeOutAfter(forceTimeout, SECONDS); after = forceCompact(after, compactor, writer); success = after != null; if (success) { gcListener.info("TarMK GC #{}: compaction succeeded to force compact remaining commits " + "after {} ({} ms).", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } else { if (cancel.get()) { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits " + "after {} ({} ms). Compaction was cancelled: {}.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS), cancel); } else { gcListener.warn("TarMK GC #{}: compaction failed to force compact remaining commits. " + "after {} ({} ms). Most likely compaction didn't get exclusive access to the store.", GC_COUNT, forceWatch, forceWatch.elapsed(MILLISECONDS)); } } } } if (success) { writer.flush(); gcListener.info("TarMK GC #{}: compaction succeeded in {} ({} ms), after {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionSucceeded(newGeneration, after.getRecordId()); } else { gcListener.info("TarMK GC #{}: compaction failed after {} ({} ms), and {} cycles", GC_COUNT, watch, watch.elapsed(MILLISECONDS), cycles); return compactionAborted(newGeneration); } } catch (InterruptedException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction interrupted", e); currentThread().interrupt(); return compactionAborted(newGeneration); } catch (IOException e) { gcListener.error("TarMK GC #" + GC_COUNT + ": compaction encountered an error", e); return compactionAborted(newGeneration); } } /** * Compact {@code uncompacted} on top of an optional {@code base}. * @param base the base state to compact onto or {@code null} for an empty state. * @param uncompacted the uncompacted state to compact * @param compactor the compactor for creating the new generation of the * uncompacted state. * @param writer the segment writer used by {@code compactor} for writing to the * new generation. * @return compacted clone of {@code uncompacted} or null if cancelled. * @throws IOException */ @CheckForNull private SegmentNodeState compact( @Nullable SegmentNodeState base, @Nonnull SegmentNodeState uncompacted, @Nonnull Compactor compactor, @Nonnull SegmentWriter writer) throws IOException { // Collect a chronologically ordered list of roots for the base and the uncompacted // state. This list consists of all checkpoints followed by the root. LinkedHashMap<String, NodeState> baseRoots = collectRoots(base); LinkedHashMap<String, NodeState> uncompactedRoots = collectRoots(uncompacted); // Compact the list of uncompacted roots to a list of compacted roots. LinkedHashMap<String, NodeState> compactedRoots = compact(baseRoots, uncompactedRoots, compactor); if (compactedRoots == null) { return null; } // Build a compacted super root by replacing the uncompacted roots with // the compacted ones in the original node. SegmentNodeBuilder builder = uncompacted.builder(); for (Entry<String, NodeState> compactedRoot : compactedRoots.entrySet()) { String path = compactedRoot.getKey(); NodeState state = compactedRoot.getValue(); NodeBuilder childBuilder = getChild(builder, getParentPath(path)); childBuilder.setChildNode(getName(path), state); } // Use the segment writer of the *new generation* to persist the compacted super root. RecordId nodeId = writer.writeNode(builder.getNodeState(), uncompacted.getStableIdBytes()); return new SegmentNodeState(segmentReader, segmentWriter, getBlobStore(), nodeId); } /** * Compact a list of uncompacted roots on top of base roots of the same key or * an empty node if none. */ @CheckForNull private LinkedHashMap<String, NodeState> compact( @Nonnull LinkedHashMap<String, NodeState> baseRoots, @Nonnull LinkedHashMap<String, NodeState> uncompactedRoots, @Nonnull Compactor compactor) throws IOException { NodeState onto = baseRoots.get("root"); NodeState previous = onto; LinkedHashMap<String, NodeState> compactedRoots = newLinkedHashMap(); for (Entry<String, NodeState> uncompactedRoot : uncompactedRoots.entrySet()) { String path = uncompactedRoot.getKey(); NodeState state = uncompactedRoot.getValue(); NodeState compacted; if (onto == null) { compacted = compactor.compact(state); } else { compacted = compactor.compact(previous, state, onto); } if (compacted == null) { return null; } previous = state; onto = compacted; compactedRoots.put(path, compacted); } return compactedRoots; } /** * Collect a chronologically ordered list of roots for the base and the uncompacted * state from a {@code superRoot} . This list consists of all checkpoints followed by * the root. */ @Nonnull private LinkedHashMap<String, NodeState> collectRoots(@Nullable SegmentNodeState superRoot) { LinkedHashMap<String, NodeState> roots = newLinkedHashMap(); if (superRoot != null) { List<ChildNodeEntry> checkpoints = newArrayList( superRoot.getChildNode("checkpoints").getChildNodeEntries()); checkpoints.sort((cne1, cne2) -> { long c1 = cne1.getNodeState().getLong("created"); long c2 = cne2.getNodeState().getLong("created"); return Long.compare(c1, c2); }); for (ChildNodeEntry checkpoint : checkpoints) { roots.put("checkpoints/" + checkpoint.getName() + "/root", checkpoint.getNodeState().getChildNode("root")); } roots.put("root", superRoot.getChildNode("root")); } return roots; } @Nonnull private NodeBuilder getChild(NodeBuilder builder, String path) { for (String name : elements(path)) { builder = builder.getChildNode(name); } return builder; } private SegmentNodeState forceCompact( @Nonnull final SegmentNodeState base, @Nonnull final Compactor compactor, @Nonnull SegmentWriter writer) throws InterruptedException { RecordId compactedId = revisions.setHead(new Function<RecordId, RecordId>() { @Nullable @Override public RecordId apply(RecordId headId) { try { long t0 = currentTimeMillis(); SegmentNodeState after = compact( base, segmentReader.readNode(headId), compactor, writer); if (after == null) { gcListener.info("TarMK GC #{}: compaction cancelled after {} seconds", GC_COUNT, (currentTimeMillis() - t0) / 1000); return null; } else { return after.getRecordId(); } } catch (IOException e) { gcListener.error("TarMK GC #{" + GC_COUNT + "}: Error during forced compaction.", e); return null; } } }, timeout(gcOptions.getForceTimeout(), SECONDS)); return compactedId != null ? segmentReader.readNode(compactedId) : null; } private CleanupContext newCleanupContext(Predicate<GCGeneration> old) { return new CleanupContext() { private boolean isUnreferencedBulkSegment(UUID id, boolean referenced) { return !isDataSegmentId(id.getLeastSignificantBits()) && !referenced; } private boolean isOldDataSegment(UUID id, GCGeneration generation) { return isDataSegmentId(id.getLeastSignificantBits()) && old.apply(generation); } @Override public Collection<UUID> initialReferences() { Set<UUID> references = newHashSet(); for (SegmentId id : tracker.getReferencedSegmentIds()) { if (id.isBulkSegmentId()) { references.add(id.asUUID()); } } return references; } @Override public boolean shouldReclaim(UUID id, GCGeneration generation, boolean referenced) { return isUnreferencedBulkSegment(id, referenced) || isOldDataSegment(id, generation); } @Override public boolean shouldFollow(UUID from, UUID to) { return !isDataSegmentId(to.getLeastSignificantBits()); } }; } /** * Cleanup segments whose generation matches the {@link CompactionResult#reclaimer()} predicate. * @return list of files to be removed * @throws IOException */ @Nonnull private List<File> cleanup(@Nonnull CompactionResult compactionResult) throws IOException { Stopwatch watch = Stopwatch.createStarted(); gcListener.info("TarMK GC #{}: cleanup started.", GC_COUNT); gcListener.updateStatus(CLEANUP.message()); segmentCache.clear(); // Suggest to the JVM that now would be a good time // to clear stale weak references in the SegmentTracker System.gc(); CleanupResult cleanupResult = tarFiles.cleanup(newCleanupContext(compactionResult.reclaimer())); if (cleanupResult.isInterrupted()) { gcListener.info("TarMK GC #{}: cleanup interrupted", GC_COUNT); } tracker.clearSegmentIdTables(cleanupResult.getReclaimedSegmentIds(), compactionResult.gcInfo()); gcListener.info("TarMK GC #{}: cleanup marking files for deletion: {}", GC_COUNT, toFileNames(cleanupResult.getRemovableFiles())); long finalSize = size(); long reclaimedSize = cleanupResult.getReclaimedSize(); stats.reclaimed(reclaimedSize); gcJournal.persist(reclaimedSize, finalSize, getGcGeneration(), compactionMonitor.getCompactedNodes(), compactionResult.getCompactedRootId().toString10()); gcListener.cleaned(reclaimedSize, finalSize); gcListener.info("TarMK GC #{}: cleanup completed in {} ({} ms). Post cleanup size is {} ({} bytes)" + " and space reclaimed {} ({} bytes).", GC_COUNT, watch, watch.elapsed(MILLISECONDS), humanReadableByteCount(finalSize), finalSize, humanReadableByteCount(reclaimedSize), reclaimedSize); return cleanupResult.getRemovableFiles(); } private String toFileNames(@Nonnull List<File> files) { if (files.isEmpty()) { return "none"; } else { return Joiner.on(",").join(files); } } /** * Finds all external blob references that are currently accessible * in this repository and adds them to the given collector. Useful * for collecting garbage in an external data store. * <p> * Note that this method only collects blob references that are already * stored in the repository (at the time when this method is called), so * the garbage collector will need some other mechanism for tracking * in-memory references and references stored while this method is * running. * @param collector reference collector called back for each blob reference found */ synchronized void collectBlobReferences(Consumer<String> collector) throws IOException { segmentWriter.flush(); tarFiles.collectBlobReferences(collector, Reclaimers.newOldReclaimer(getGcGeneration(), gcOptions.getRetainedGenerations())); } void cancel() { cancelled = true; } /** * Represents the cancellation policy for the compaction phase. If the disk * space was considered insufficient at least once during compaction (or if * the space was never sufficient to begin with), compaction is considered * canceled. Furthermore when the file store is shutting down, compaction is * considered canceled. * Finally the cancellation can be triggered by a timeout that can be set * at any time. */ private class CancelCompactionSupplier implements Supplier<Boolean> { private final FileStore store; private String reason; private volatile long deadline; public CancelCompactionSupplier(@Nonnull FileStore store) { cancelled = false; this.store = store; } /** * Set a timeout for cancellation. Setting a different timeout cancels * a previous one that did not yet elapse. Setting a timeout after * cancellation took place has no effect. */ public void timeOutAfter(final long duration, @Nonnull final TimeUnit unit) { deadline = currentTimeMillis() + MILLISECONDS.convert(duration, unit); } @Override public Boolean get() { // The outOfDiskSpace and shutdown flags can only transition from // false (their initial values), to true. Once true, there should // be no way to go back. if (!store.sufficientDiskSpace.get()) { reason = "Not enough disk space"; return true; } if (!store.sufficientMemory.get()) { reason = "Not enough memory"; return true; } if (store.shutdown) { reason = "The FileStore is shutting down"; return true; } if (cancelled) { reason = "Cancelled by user"; return true; } if (deadline > 0 && currentTimeMillis() > deadline) { reason = "Timeout after " + deadline/1000 + " seconds"; return true; } return false; } @Override public String toString() { return reason; } } } /** * Instances of this class represent the result from a compaction. * Either {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded}, * {@link #aborted(GCGeneration, GCGeneration) aborted} or {@link #skipped(GCGeneration, SegmentGCOptions) skipped}. */ private abstract static class CompactionResult { @Nonnull private final GCGeneration currentGeneration; protected CompactionResult(@Nonnull GCGeneration currentGeneration) { this.currentGeneration = currentGeneration; } /** * Result of a succeeded compaction. * @param newGeneration the generation successfully created by compaction * @param gcOptions the current GC options used by compaction * @param compactedRootId the record id of the root created by compaction */ static CompactionResult succeeded( @Nonnull GCGeneration newGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(newGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(newGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * Result of an aborted compaction. * @param currentGeneration the current generation of the store * @param failedGeneration the generation that compaction attempted to create */ static CompactionResult aborted( @Nonnull GCGeneration currentGeneration, @Nonnull final GCGeneration failedGeneration) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newExactReclaimer(failedGeneration); } @Override boolean isSuccess() { return false; } }; } /** * Result serving as a placeholder for a compaction that was skipped. * @param currentGeneration the current generation of the store * @param gcOptions the current GC options used by compaction */ static CompactionResult skipped( @Nonnull GCGeneration currentGeneration, @Nonnull final SegmentGCOptions gcOptions, @Nonnull final RecordId compactedRootId) { return new CompactionResult(currentGeneration) { @Override Predicate<GCGeneration> reclaimer() { return Reclaimers.newOldReclaimer(currentGeneration, gcOptions.getRetainedGenerations()); } @Override boolean isSuccess() { return true; } @Override RecordId getCompactedRootId() { return compactedRootId; } }; } /** * @return a predicate determining which segments to * {@link GarbageCollector#cleanup(CompactionResult) clean up} for * the given compaction result. */ abstract Predicate<GCGeneration> reclaimer(); /** * @return {@code true} for {@link #succeeded(GCGeneration, SegmentGCOptions, RecordId) succeeded} * and {@link #skipped(GCGeneration, SegmentGCOptions, RecordId) skipped}, {@code false} otherwise. */ abstract boolean isSuccess(); /** * @return the record id of the compacted root on {@link #isSuccess() success}, * {@link RecordId#NULL} otherwise. */ RecordId getCompactedRootId() { return RecordId.NULL; } /** * @return a diagnostic message describing the outcome of this compaction. */ String gcInfo() { return "gc-count=" + GC_COUNT + ",gc-status=" + (isSuccess() ? "success" : "failed") + ",store-generation=" + currentGeneration + ",reclaim-predicate=" + reclaimer(); } } }
OAK-6634: Confusing log entries when memory requirements are not met at start of OnRC Protect only the compaction phase with the memory barrier. git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1807600 13f79535-47bb-0310-9956-ffa450edef68
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java
OAK-6634: Confusing log entries when memory requirements are not met at start of OnRC Protect only the compaction phase with the memory barrier.
<ide><path>ak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/file/FileStore.java <ide> return; <ide> } <ide> <del> GCMemoryBarrier gcMemoryBarrier = new GCMemoryBarrier( <del> sufficientMemory, gcListener, GC_COUNT.get(), gcOptions); <del> <ide> boolean sufficientEstimatedGain = true; <ide> if (gcOptions.isEstimationDisabled()) { <ide> gcListener.info("TarMK GC #{}: estimation skipped because it was explicitly disabled", GC_COUNT); <ide> <ide> if (sufficientEstimatedGain) { <ide> if (!gcOptions.isPaused()) { <del> CompactionResult compactionResult = compact.get(); <del> if (compactionResult.isSuccess()) { <del> lastSuccessfullGC = System.currentTimeMillis(); <del> } else { <del> gcListener.info("TarMK GC #{}: cleaning up after failed compaction", GC_COUNT); <add> try (GCMemoryBarrier gcMemoryBarrier = new GCMemoryBarrier( <add> sufficientMemory, gcListener, GC_COUNT.get(), gcOptions)) <add> { <add> CompactionResult compactionResult = compact.get(); <add> if (compactionResult.isSuccess()) { <add> lastSuccessfullGC = System.currentTimeMillis(); <add> } else { <add> gcListener.info("TarMK GC #{}: cleaning up after failed compaction", GC_COUNT); <add> } <add> fileReaper.add(cleanup(compactionResult)); <ide> } <del> fileReaper.add(cleanup(compactionResult)); <ide> } else { <ide> gcListener.skipped("TarMK GC #{}: compaction paused", GC_COUNT); <ide> } <ide> } <del> gcMemoryBarrier.close(); <ide> } finally { <ide> compactionMonitor.finished(); <ide> gcListener.updateStatus(IDLE.message());
Java
apache-2.0
9c846025572f520325e9eb934b08c4426bd0fc24
0
retomerz/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,retomerz/intellij-community,xfournet/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,FHannes/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,ibinti/intellij-community,semonte/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,da1z/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,hurricup/intellij-community,retomerz/intellij-community,FHannes/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,retomerz/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,fitermay/intellij-community,ibinti/intellij-community,apixandru/intellij-community,semonte/intellij-community,suncycheng/intellij-community,semonte/intellij-community,asedunov/intellij-community,apixandru/intellij-community,asedunov/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,retomerz/intellij-community,ibinti/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,fitermay/intellij-community,semonte/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,signed/intellij-community,allotria/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,da1z/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,allotria/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,FHannes/intellij-community,allotria/intellij-community,vvv1559/intellij-community,da1z/intellij-community,fitermay/intellij-community,signed/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,hurricup/intellij-community,retomerz/intellij-community,da1z/intellij-community,fitermay/intellij-community,xfournet/intellij-community,signed/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,allotria/intellij-community,da1z/intellij-community,hurricup/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,hurricup/intellij-community,allotria/intellij-community,hurricup/intellij-community,signed/intellij-community,semonte/intellij-community,signed/intellij-community,da1z/intellij-community,FHannes/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,suncycheng/intellij-community,semonte/intellij-community,youdonghai/intellij-community,signed/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,allotria/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,signed/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,signed/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,semonte/intellij-community,semonte/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,allotria/intellij-community,signed/intellij-community,fitermay/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,asedunov/intellij-community,da1z/intellij-community,ibinti/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,hurricup/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,signed/intellij-community,signed/intellij-community,ibinti/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.wm.impl; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.IdeTooltipManager; import com.intellij.ide.dnd.DnDAware; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Divider; import com.intellij.openapi.ui.Painter; import com.intellij.openapi.ui.impl.GlassPaneDialogWrapperPeer; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Weighted; import com.intellij.openapi.wm.IdeGlassPane; import com.intellij.openapi.wm.IdeGlassPaneUtil; import com.intellij.util.containers.FactoryMap; import com.intellij.util.ui.MouseEventAdapter; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.text.html.HTMLEditorKit; import java.awt.*; import java.awt.event.AWTEventListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.util.*; import java.util.List; public class IdeGlassPaneImpl extends JPanel implements IdeGlassPaneEx, IdeEventQueue.EventDispatcher { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.IdeGlassPaneImpl"); private static final String PREPROCESSED_CURSOR_KEY = "SuperCursor"; private final List<EventListener> myMouseListeners = new ArrayList<EventListener>(); private final Set<EventListener> mySortedMouseListeners = new TreeSet<EventListener>(new Comparator<EventListener>() { @Override public int compare(EventListener o1, EventListener o2) { double weight1 = 0; double weight2 = 0; if (o1 instanceof Weighted) { weight1 = ((Weighted)o1).getWeight(); } if (o2 instanceof Weighted) { weight2 = ((Weighted)o2).getWeight(); } return weight1 > weight2 ? 1 : weight1 < weight2 ? -1 : myMouseListeners.indexOf(o1) - myMouseListeners.indexOf(o2); } }); private final JRootPane myRootPane; private final Map<String, PaintersHelper> myNamedPainters = new FactoryMap<String, PaintersHelper>() { @Nullable @Override protected PaintersHelper create(String key) { return new PaintersHelper(IdeGlassPaneImpl.this); } }; private boolean myPreprocessorActive; private final Map<Object, Cursor> myListener2Cursor = new LinkedHashMap<Object, Cursor>(); private Component myLastCursorComponent; private Cursor myLastOriginalCursor; private MouseEvent myPrevPressEvent; public IdeGlassPaneImpl(JRootPane rootPane) { myRootPane = rootPane; setOpaque(false); setVisible(false); setLayout(null); if (myRootPane instanceof IdeRootPane) { IdeBackgroundUtil.initFramePainters(getNamedPainters("ide")); IdeBackgroundUtil.initEditorPainters(getNamedPainters("editor")); } } @Override public void addNotify() { super.addNotify(); } public boolean dispatch(final AWTEvent e) { JRootPane eventRootPane = myRootPane; if (e instanceof MouseEvent) { MouseEvent me = (MouseEvent)e; Window eventWindow = UIUtil.getWindow(me.getComponent()); if (isContextMenu(eventWindow)) return false; final Window thisGlassWindow = SwingUtilities.getWindowAncestor(myRootPane); if (eventWindow instanceof JWindow) { eventRootPane = ((JWindow)eventWindow).getRootPane(); if (eventRootPane != null) { if (!(eventRootPane.getGlassPane() instanceof IdeGlassPane)) { final Container parentWindow = eventWindow.getParent(); if (parentWindow instanceof Window) { eventWindow = (Window)parentWindow; } } } } if (eventWindow != thisGlassWindow) return false; } if (e.getID() == MouseEvent.MOUSE_DRAGGED) { if (ApplicationManager.getApplication() != null) { IdeTooltipManager.getInstance().hideCurrent((MouseEvent)e); } } boolean dispatched; if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) { dispatched = preprocess((MouseEvent)e, false, eventRootPane); } else if (e.getID() == MouseEvent.MOUSE_MOVED || e.getID() == MouseEvent.MOUSE_DRAGGED) { dispatched = preprocess((MouseEvent)e, true, eventRootPane); } else if (e.getID() == MouseEvent.MOUSE_EXITED || e.getID() == MouseEvent.MOUSE_ENTERED) { dispatched = preprocess((MouseEvent)e, false, eventRootPane); } else { return false; } MouseEvent me = (MouseEvent)e; final Component meComponent = me.getComponent(); if (!dispatched && meComponent != null) { final Window eventWindow = UIUtil.getWindow(meComponent); if (eventWindow != SwingUtilities.getWindowAncestor(myRootPane)) { return false; } int button1 = MouseEvent.BUTTON1_MASK | MouseEvent.BUTTON1_DOWN_MASK; final boolean pureMouse1Event = (me.getModifiersEx() | button1) == button1; if (pureMouse1Event && me.getClickCount() <= 1 && !me.isPopupTrigger()) { final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane()); JMenuBar menuBar = myRootPane.getJMenuBar(); point.y += menuBar != null ? menuBar.getHeight() : 0; final Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (target instanceof DnDAware) { final Point targetPoint = SwingUtilities.convertPoint(myRootPane.getContentPane().getParent(), point.x, point.y, target); final boolean overSelection = ((DnDAware)target).isOverSelection(targetPoint); if (overSelection) { final MouseListener[] listeners = target.getListeners(MouseListener.class); final MouseEvent mouseEvent = convertEvent(me, target); switch (me.getID()) { case MouseEvent.MOUSE_PRESSED: boolean consumed = false; if (target.isFocusable()) target.requestFocus(); for (final MouseListener listener : listeners) { final String className = listener.getClass().getName(); if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) continue; fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { consumed = true; break; } } if (!mouseEvent.isConsumed()) { final AWTEventListener[] eventListeners = Toolkit.getDefaultToolkit().getAWTEventListeners(MouseEvent.MOUSE_EVENT_MASK); if (eventListeners != null && eventListeners.length > 0) { for (final AWTEventListener eventListener : eventListeners) { eventListener.eventDispatched(me); if (me.isConsumed()) break; } if (me.isConsumed()) { consumed = true; break; } } } if (!consumed) { myPrevPressEvent = mouseEvent; } else { me.consume(); } dispatched = true; break; case MouseEvent.MOUSE_RELEASED: if (myPrevPressEvent != null && myPrevPressEvent.getComponent() == target) { for (final MouseListener listener : listeners) { final String className = listener.getClass().getName(); if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) { fireMouseEvent(listener, myPrevPressEvent); fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { break; } } fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { break; } } if (mouseEvent.isConsumed()) { me.consume(); } myPrevPressEvent = null; dispatched = true; } break; default: myPrevPressEvent = null; break; } } } } } if (isVisible() && getComponentCount() == 0) { boolean cursorSet = false; if (meComponent != null) { final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane()); if (myRootPane.getMenuBar() != null && myRootPane.getMenuBar().isVisible()) { point.y += myRootPane.getMenuBar().getHeight(); } final Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (target != null) { setCursor(target.getCursor()); cursorSet = true; } } if (!cursorSet) { setCursor(Cursor.getDefaultCursor()); } } return dispatched; } private static boolean isContextMenu(Window window) { if (window != null) { for (Component component : window.getComponents()) { if (component instanceof JComponent && UIUtil.findComponentOfType((JComponent)component, JPopupMenu.class) != null) { return true; } } } return false; } private boolean preprocess(final MouseEvent e, final boolean motion, JRootPane eventRootPane) { try { if (UIUtil.getWindow(this) != UIUtil.getWindow(e.getComponent())) return false; final MouseEvent event = convertEvent(e, eventRootPane); if (!IdeGlassPaneUtil.canBePreprocessed(e)) { return false; } for (EventListener each : mySortedMouseListeners) { if (motion && each instanceof MouseMotionListener) { fireMouseMotion((MouseMotionListener)each, event); } else if (!motion && each instanceof MouseListener) { fireMouseEvent((MouseListener)each, event); } if (event.isConsumed()) { e.consume(); return true; } } return false; } finally { if (eventRootPane == myRootPane) { Cursor cursor; if (!myListener2Cursor.isEmpty()) { cursor = myListener2Cursor.values().iterator().next(); final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), myRootPane.getContentPane()); Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (canProcessCursorFor(target)) { target = getCompWithCursor(target); restoreLastComponent(target); if (target != null) { if (myLastCursorComponent != target) { myLastCursorComponent = target; myLastOriginalCursor = target.getCursor(); } if (cursor != null && !cursor.equals(target.getCursor())) { if (target instanceof JComponent) { ((JComponent)target).putClientProperty(PREPROCESSED_CURSOR_KEY, Boolean.TRUE); } target.setCursor(cursor); } } getRootPane().setCursor(cursor); } } else if (!e.isConsumed() && e.getID() != MouseEvent.MOUSE_DRAGGED) { cursor = Cursor.getDefaultCursor(); JRootPane rootPane = getRootPane(); if (rootPane != null) { rootPane.setCursor(cursor); } else { LOG.warn("Root pane is null. Event: " + e); } restoreLastComponent(null); myLastOriginalCursor = null; myLastCursorComponent = null; } myListener2Cursor.clear(); } } } private boolean canProcessCursorFor(Component target) { if (target instanceof JMenu || target instanceof JMenuItem || target instanceof Divider || target instanceof JSeparator || (target instanceof JEditorPane && ((JEditorPane)target).getEditorKit() instanceof HTMLEditorKit)) { return false; } return true; } private Component getCompWithCursor(Component c) { Component eachParentWithCursor = c; while (eachParentWithCursor != null) { if (eachParentWithCursor.isCursorSet()) return eachParentWithCursor; eachParentWithCursor = eachParentWithCursor.getParent(); } return null; } private void restoreLastComponent(Component newC) { if (myLastCursorComponent != null && myLastCursorComponent != newC) { myLastCursorComponent.setCursor(myLastOriginalCursor); if (myLastCursorComponent instanceof JComponent) { ((JComponent)myLastCursorComponent).putClientProperty(PREPROCESSED_CURSOR_KEY, null); } } } public static boolean hasPreProcessedCursor(@NotNull JComponent component) { return component.getClientProperty(PREPROCESSED_CURSOR_KEY) != null; } public void setCursor(Cursor cursor, @NotNull Object requestor) { if (cursor == null) { myListener2Cursor.remove(requestor); } else { myListener2Cursor.put(requestor, cursor); } } private static MouseEvent convertEvent(final MouseEvent e, final Component target) { final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), target); return MouseEventAdapter.convert(e, target, point.x, point.y); } private static void fireMouseEvent(final MouseListener listener, final MouseEvent event) { switch (event.getID()) { case MouseEvent.MOUSE_PRESSED: listener.mousePressed(event); break; case MouseEvent.MOUSE_RELEASED: listener.mouseReleased(event); break; case MouseEvent.MOUSE_ENTERED: listener.mouseEntered(event); break; case MouseEvent.MOUSE_EXITED: listener.mouseExited(event); break; case MouseEvent.MOUSE_CLICKED: listener.mouseClicked(event); break; } } private static void fireMouseMotion(MouseMotionListener listener, final MouseEvent event) { switch (event.getID()) { case MouseEvent.MOUSE_DRAGGED: listener.mouseDragged(event); case MouseEvent.MOUSE_MOVED: listener.mouseMoved(event); } } public void addMousePreprocessor(final MouseListener listener, Disposable parent) { _addListener(listener, parent); } public void addMouseMotionPreprocessor(final MouseMotionListener listener, final Disposable parent) { _addListener(listener, parent); } private void _addListener(final EventListener listener, final Disposable parent) { if (!myMouseListeners.contains(listener)) { myMouseListeners.add(listener); updateSortedList(); } activateIfNeeded(); Disposer.register(parent, new Disposable() { public void dispose() { UIUtil.invokeLaterIfNeeded(new Runnable() { public void run() { removeListener(listener); } }); } }); } public void removeMousePreprocessor(final MouseListener listener) { removeListener(listener); } public void removeMouseMotionPreprocessor(final MouseMotionListener listener) { removeListener(listener); } private void removeListener(final EventListener listener) { if (myMouseListeners.remove(listener)) { updateSortedList(); } deactivateIfNeeded(); } private void updateSortedList() { mySortedMouseListeners.clear(); mySortedMouseListeners.addAll(myMouseListeners); } private void deactivateIfNeeded() { if (myPreprocessorActive && myMouseListeners.isEmpty()) { myPreprocessorActive = false; } applyActivationState(); } private void activateIfNeeded() { if (!myPreprocessorActive && !myMouseListeners.isEmpty()) { myPreprocessorActive = true; } applyActivationState(); } private void applyActivationState() { boolean wasVisible = isVisible(); boolean hasWork = getPainters().hasPainters() || getComponentCount() > 0; if (wasVisible != hasWork) { setVisible(hasWork); } IdeEventQueue queue = IdeEventQueue.getInstance(); if (!queue.containsDispatcher(this) && (myPreprocessorActive || isVisible())) { queue.addDispatcher(this, null); } else if (queue.containsDispatcher(this) && !myPreprocessorActive && !isVisible()) { queue.removeDispatcher(this); } if (wasVisible != isVisible()) { revalidate(); repaint(); } } @NotNull PaintersHelper getNamedPainters(@NotNull String name) { return myNamedPainters.get(name); } @NotNull private PaintersHelper getPainters() { return getNamedPainters("glass"); } public void addPainter(final Component component, final Painter painter, final Disposable parent) { getPainters().addPainter(painter, component); activateIfNeeded(); Disposer.register(parent, new Disposable() { public void dispose() { SwingUtilities.invokeLater(new Runnable() { public void run() { removePainter(painter); } }); } }); } public void removePainter(final Painter painter) { getPainters().removePainter(painter); deactivateIfNeeded(); } @Override protected void addImpl(Component comp, Object constraints, int index) { super.addImpl(comp, constraints, index); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { activateIfNeeded(); } }); } @Override public void remove(final Component comp) { super.remove(comp); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { deactivateIfNeeded(); } }); } public boolean isInModalContext() { final Component[] components = getComponents(); for (Component component : components) { if (component instanceof GlassPaneDialogWrapperPeer.TransparentLayeredPane) { return true; } } return false; } protected void paintComponent(final Graphics g) { getPainters().paint(g); } @Override protected void paintChildren(Graphics g) { super.paintChildren(g); } public Component getTargetComponentFor(MouseEvent e) { Component candidate = findComponent(e, myRootPane.getLayeredPane()); if (candidate != null) return candidate; candidate = findComponent(e, myRootPane.getContentPane()); if (candidate != null) return candidate; return e.getComponent(); } private static Component findComponent(final MouseEvent e, final Container container) { final Point lpPoint = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), container); return SwingUtilities.getDeepestComponentAt(container, lpPoint.x, lpPoint.y); } @Override public boolean isOptimizedDrawingEnabled() { return !getPainters().hasPainters() && super.isOptimizedDrawingEnabled(); } }
platform/platform-impl/src/com/intellij/openapi/wm/impl/IdeGlassPaneImpl.java
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.wm.impl; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.IdeTooltipManager; import com.intellij.ide.dnd.DnDAware; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Divider; import com.intellij.openapi.ui.Painter; import com.intellij.openapi.ui.impl.GlassPaneDialogWrapperPeer; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Weighted; import com.intellij.openapi.wm.IdeGlassPane; import com.intellij.openapi.wm.IdeGlassPaneUtil; import com.intellij.util.containers.FactoryMap; import com.intellij.util.ui.MouseEventAdapter; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.text.html.HTMLEditorKit; import java.awt.*; import java.awt.event.AWTEventListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.util.*; import java.util.List; public class IdeGlassPaneImpl extends JPanel implements IdeGlassPaneEx, IdeEventQueue.EventDispatcher { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.IdeGlassPaneImpl"); private static final String PREPROCESSED_CURSOR_KEY = "SuperCursor"; private final List<EventListener> myMouseListeners = new ArrayList<EventListener>(); private final Set<EventListener> mySortedMouseListeners = new TreeSet<EventListener>(new Comparator<EventListener>() { @Override public int compare(EventListener o1, EventListener o2) { double weight1 = 0; double weight2 = 0; if (o1 instanceof Weighted) { weight1 = ((Weighted)o1).getWeight(); } if (o2 instanceof Weighted) { weight2 = ((Weighted)o2).getWeight(); } return weight1 > weight2 ? 1 : weight1 < weight2 ? -1 : myMouseListeners.indexOf(o1) - myMouseListeners.indexOf(o2); } }); private final JRootPane myRootPane; private final Map<String, PaintersHelper> myNamedPainters = new FactoryMap<String, PaintersHelper>() { @Nullable @Override protected PaintersHelper create(String key) { return new PaintersHelper(IdeGlassPaneImpl.this); } }; private boolean myPreprocessorActive; private final Map<Object, Cursor> myListener2Cursor = new LinkedHashMap<Object, Cursor>(); private Component myLastCursorComponent; private Cursor myLastOriginalCursor; private MouseEvent myPrevPressEvent; public IdeGlassPaneImpl(JRootPane rootPane) { myRootPane = rootPane; setOpaque(false); setVisible(false); setLayout(null); if (myRootPane instanceof IdeRootPane) { IdeBackgroundUtil.initFramePainters(getNamedPainters("ide")); IdeBackgroundUtil.initEditorPainters(getNamedPainters("editor")); } } @Override public void addNotify() { super.addNotify(); } public boolean dispatch(final AWTEvent e) { JRootPane eventRootPane = myRootPane; if (e instanceof MouseEvent) { MouseEvent me = (MouseEvent)e; Window eventWindow = UIUtil.getWindow(me.getComponent()); if (isContextMenu(eventWindow)) return false; final Window thisGlassWindow = SwingUtilities.getWindowAncestor(myRootPane); if (eventWindow instanceof JWindow) { eventRootPane = ((JWindow)eventWindow).getRootPane(); if (eventRootPane != null) { if (!(eventRootPane.getGlassPane() instanceof IdeGlassPane)) { final Container parentWindow = eventWindow.getParent(); if (parentWindow instanceof Window) { eventWindow = (Window)parentWindow; } } } } if (eventWindow != thisGlassWindow) return false; } if (e.getID() == MouseEvent.MOUSE_DRAGGED) { if (ApplicationManager.getApplication() != null) { IdeTooltipManager.getInstance().hideCurrent((MouseEvent)e); } } boolean dispatched; if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) { dispatched = preprocess((MouseEvent)e, false, eventRootPane); } else if (e.getID() == MouseEvent.MOUSE_MOVED || e.getID() == MouseEvent.MOUSE_DRAGGED) { dispatched = preprocess((MouseEvent)e, true, eventRootPane); } else if (e.getID() == MouseEvent.MOUSE_EXITED || e.getID() == MouseEvent.MOUSE_ENTERED) { dispatched = preprocess((MouseEvent)e, false, eventRootPane); } else { return false; } MouseEvent me = (MouseEvent)e; final Component meComponent = me.getComponent(); if (!dispatched && meComponent != null) { final Window eventWindow = UIUtil.getWindow(meComponent); if (eventWindow != SwingUtilities.getWindowAncestor(myRootPane)) { return false; } int button1 = MouseEvent.BUTTON1_MASK | MouseEvent.BUTTON1_DOWN_MASK; final boolean pureMouse1Event = (me.getModifiersEx() | button1) == button1; if (pureMouse1Event && me.getClickCount() <= 1 && !me.isPopupTrigger()) { final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane()); JMenuBar menuBar = myRootPane.getJMenuBar(); point.y += menuBar != null ? menuBar.getHeight() : 0; final Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (target instanceof DnDAware) { final Point targetPoint = SwingUtilities.convertPoint(myRootPane.getContentPane().getParent(), point.x, point.y, target); final boolean overSelection = ((DnDAware)target).isOverSelection(targetPoint); if (overSelection) { final MouseListener[] listeners = target.getListeners(MouseListener.class); final MouseEvent mouseEvent = convertEvent(me, target); switch (me.getID()) { case MouseEvent.MOUSE_PRESSED: boolean consumed = false; if (target.isFocusable()) target.requestFocus(); for (final MouseListener listener : listeners) { final String className = listener.getClass().getName(); if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) continue; fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { consumed = true; break; } } if (!mouseEvent.isConsumed()) { final AWTEventListener[] eventListeners = Toolkit.getDefaultToolkit().getAWTEventListeners(MouseEvent.MOUSE_EVENT_MASK); if (eventListeners != null && eventListeners.length > 0) { for (final AWTEventListener eventListener : eventListeners) { eventListener.eventDispatched(me); if (me.isConsumed()) break; } if (me.isConsumed()) { consumed = true; break; } } } if (!consumed) { myPrevPressEvent = mouseEvent; } else { me.consume(); } dispatched = true; break; case MouseEvent.MOUSE_RELEASED: if (myPrevPressEvent != null && myPrevPressEvent.getComponent() == target) { for (final MouseListener listener : listeners) { final String className = listener.getClass().getName(); if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) { fireMouseEvent(listener, myPrevPressEvent); fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { break; } } fireMouseEvent(listener, mouseEvent); if (mouseEvent.isConsumed()) { break; } } if (mouseEvent.isConsumed()) { me.consume(); } myPrevPressEvent = null; dispatched = true; } break; default: myPrevPressEvent = null; break; } } } } } if (isVisible() && getComponentCount() == 0) { boolean cursorSet = false; if (meComponent != null) { final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane()); if (myRootPane.getMenuBar() != null && myRootPane.getMenuBar().isVisible()) { point.y += myRootPane.getMenuBar().getHeight(); } final Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (target != null) { setCursor(target.getCursor()); cursorSet = true; } } if (!cursorSet) { setCursor(Cursor.getDefaultCursor()); } } return dispatched; } private static boolean isContextMenu(Window window) { if (window != null) { for (Component component : window.getComponents()) { if (component instanceof JComponent && UIUtil.findComponentOfType((JComponent)component, JPopupMenu.class) != null) { return true; } } } return false; } private boolean preprocess(final MouseEvent e, final boolean motion, JRootPane eventRootPane) { try { final MouseEvent event = convertEvent(e, eventRootPane); if (!IdeGlassPaneUtil.canBePreprocessed(e)) { return false; } for (EventListener each : mySortedMouseListeners) { if (motion && each instanceof MouseMotionListener) { fireMouseMotion((MouseMotionListener)each, event); } else if (!motion && each instanceof MouseListener) { fireMouseEvent((MouseListener)each, event); } if (event.isConsumed()) { e.consume(); return true; } } return false; } finally { if (eventRootPane == myRootPane) { Cursor cursor; if (!myListener2Cursor.isEmpty()) { cursor = myListener2Cursor.values().iterator().next(); final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), myRootPane.getContentPane()); Component target = SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y); if (canProcessCursorFor(target)) { target = getCompWithCursor(target); restoreLastComponent(target); if (target != null) { if (myLastCursorComponent != target) { myLastCursorComponent = target; myLastOriginalCursor = target.getCursor(); } if (cursor != null && !cursor.equals(target.getCursor())) { if (target instanceof JComponent) { ((JComponent)target).putClientProperty(PREPROCESSED_CURSOR_KEY, Boolean.TRUE); } target.setCursor(cursor); } } getRootPane().setCursor(cursor); } } else if (!e.isConsumed() && e.getID() != MouseEvent.MOUSE_DRAGGED) { cursor = Cursor.getDefaultCursor(); JRootPane rootPane = getRootPane(); if (rootPane != null) { rootPane.setCursor(cursor); } else { LOG.warn("Root pane is null. Event: " + e); } restoreLastComponent(null); myLastOriginalCursor = null; myLastCursorComponent = null; } myListener2Cursor.clear(); } } } private boolean canProcessCursorFor(Component target) { if (target instanceof JMenu || target instanceof JMenuItem || target instanceof Divider || target instanceof JSeparator || (target instanceof JEditorPane && ((JEditorPane)target).getEditorKit() instanceof HTMLEditorKit)) { return false; } return true; } private Component getCompWithCursor(Component c) { Component eachParentWithCursor = c; while (eachParentWithCursor != null) { if (eachParentWithCursor.isCursorSet()) return eachParentWithCursor; eachParentWithCursor = eachParentWithCursor.getParent(); } return null; } private void restoreLastComponent(Component newC) { if (myLastCursorComponent != null && myLastCursorComponent != newC) { myLastCursorComponent.setCursor(myLastOriginalCursor); if (myLastCursorComponent instanceof JComponent) { ((JComponent)myLastCursorComponent).putClientProperty(PREPROCESSED_CURSOR_KEY, null); } } } public static boolean hasPreProcessedCursor(@NotNull JComponent component) { return component.getClientProperty(PREPROCESSED_CURSOR_KEY) != null; } public void setCursor(Cursor cursor, @NotNull Object requestor) { if (cursor == null) { myListener2Cursor.remove(requestor); } else { myListener2Cursor.put(requestor, cursor); } } private static MouseEvent convertEvent(final MouseEvent e, final Component target) { final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), target); return MouseEventAdapter.convert(e, target, point.x, point.y); } private static void fireMouseEvent(final MouseListener listener, final MouseEvent event) { switch (event.getID()) { case MouseEvent.MOUSE_PRESSED: listener.mousePressed(event); break; case MouseEvent.MOUSE_RELEASED: listener.mouseReleased(event); break; case MouseEvent.MOUSE_ENTERED: listener.mouseEntered(event); break; case MouseEvent.MOUSE_EXITED: listener.mouseExited(event); break; case MouseEvent.MOUSE_CLICKED: listener.mouseClicked(event); break; } } private static void fireMouseMotion(MouseMotionListener listener, final MouseEvent event) { switch (event.getID()) { case MouseEvent.MOUSE_DRAGGED: listener.mouseDragged(event); case MouseEvent.MOUSE_MOVED: listener.mouseMoved(event); } } public void addMousePreprocessor(final MouseListener listener, Disposable parent) { _addListener(listener, parent); } public void addMouseMotionPreprocessor(final MouseMotionListener listener, final Disposable parent) { _addListener(listener, parent); } private void _addListener(final EventListener listener, final Disposable parent) { if (!myMouseListeners.contains(listener)) { myMouseListeners.add(listener); updateSortedList(); } activateIfNeeded(); Disposer.register(parent, new Disposable() { public void dispose() { UIUtil.invokeLaterIfNeeded(new Runnable() { public void run() { removeListener(listener); } }); } }); } public void removeMousePreprocessor(final MouseListener listener) { removeListener(listener); } public void removeMouseMotionPreprocessor(final MouseMotionListener listener) { removeListener(listener); } private void removeListener(final EventListener listener) { if (myMouseListeners.remove(listener)) { updateSortedList(); } deactivateIfNeeded(); } private void updateSortedList() { mySortedMouseListeners.clear(); mySortedMouseListeners.addAll(myMouseListeners); } private void deactivateIfNeeded() { if (myPreprocessorActive && myMouseListeners.isEmpty()) { myPreprocessorActive = false; } applyActivationState(); } private void activateIfNeeded() { if (!myPreprocessorActive && !myMouseListeners.isEmpty()) { myPreprocessorActive = true; } applyActivationState(); } private void applyActivationState() { boolean wasVisible = isVisible(); boolean hasWork = getPainters().hasPainters() || getComponentCount() > 0; if (wasVisible != hasWork) { setVisible(hasWork); } IdeEventQueue queue = IdeEventQueue.getInstance(); if (!queue.containsDispatcher(this) && (myPreprocessorActive || isVisible())) { queue.addDispatcher(this, null); } else if (queue.containsDispatcher(this) && !myPreprocessorActive && !isVisible()) { queue.removeDispatcher(this); } if (wasVisible != isVisible()) { revalidate(); repaint(); } } @NotNull PaintersHelper getNamedPainters(@NotNull String name) { return myNamedPainters.get(name); } @NotNull private PaintersHelper getPainters() { return getNamedPainters("glass"); } public void addPainter(final Component component, final Painter painter, final Disposable parent) { getPainters().addPainter(painter, component); activateIfNeeded(); Disposer.register(parent, new Disposable() { public void dispose() { SwingUtilities.invokeLater(new Runnable() { public void run() { removePainter(painter); } }); } }); } public void removePainter(final Painter painter) { getPainters().removePainter(painter); deactivateIfNeeded(); } @Override protected void addImpl(Component comp, Object constraints, int index) { super.addImpl(comp, constraints, index); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { activateIfNeeded(); } }); } @Override public void remove(final Component comp) { super.remove(comp); SwingUtilities.invokeLater(new Runnable() { @Override public void run() { deactivateIfNeeded(); } }); } public boolean isInModalContext() { final Component[] components = getComponents(); for (Component component : components) { if (component instanceof GlassPaneDialogWrapperPeer.TransparentLayeredPane) { return true; } } return false; } protected void paintComponent(final Graphics g) { getPainters().paint(g); } @Override protected void paintChildren(Graphics g) { super.paintChildren(g); } public Component getTargetComponentFor(MouseEvent e) { Component candidate = findComponent(e, myRootPane.getLayeredPane()); if (candidate != null) return candidate; candidate = findComponent(e, myRootPane.getContentPane()); if (candidate != null) return candidate; return e.getComponent(); } private static Component findComponent(final MouseEvent e, final Container container) { final Point lpPoint = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), container); return SwingUtilities.getDeepestComponentAt(container, lpPoint.x, lpPoint.y); } @Override public boolean isOptimizedDrawingEnabled() { return !getPainters().hasPainters() && super.isOptimizedDrawingEnabled(); } }
IDEA-154247 Able to drag docked frame's border through ListPopup do not preprocess mouse event from a child window on a main frame
platform/platform-impl/src/com/intellij/openapi/wm/impl/IdeGlassPaneImpl.java
IDEA-154247 Able to drag docked frame's border through ListPopup do not preprocess mouse event from a child window on a main frame
<ide><path>latform/platform-impl/src/com/intellij/openapi/wm/impl/IdeGlassPaneImpl.java <ide> <ide> private boolean preprocess(final MouseEvent e, final boolean motion, JRootPane eventRootPane) { <ide> try { <add> if (UIUtil.getWindow(this) != UIUtil.getWindow(e.getComponent())) return false; <add> <ide> final MouseEvent event = convertEvent(e, eventRootPane); <ide> <ide> if (!IdeGlassPaneUtil.canBePreprocessed(e)) {
Java
mit
8f84c40deb26a4bdb31654534d5ff2fe3fd41979
0
sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code,sodash/open-code
package com.winterwell.web.data; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import com.winterwell.utils.StrUtils; import com.winterwell.utils.io.FileUtils; import com.winterwell.utils.log.Log; import com.winterwell.utils.web.IHasJson; import com.winterwell.utils.web.SimpleJson; import com.winterwell.utils.web.WebUtils2; import com.winterwell.web.LoginDetails; /** * An id for an external service. * * @see DBLogin (database backed) * @see LoginDetails (this is lighter and a bit different) * @author daniel * @testedby XIdTest */ public final class XId implements Serializable, IHasJson, CharSequence, Comparable<XId> { private static final long serialVersionUID = 1L; /** * Group 1 is the name, group 2 is the service */ public static final Pattern XID_PATTERN = Pattern.compile( "(\\S+)@([A-Za-z\\.]+)?"); /** * Company-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_C = "c_"; /** * Person-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_P = "p_"; /** * Video-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_V = "v_"; /** * Group-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_G = "g_"; /** * XId for unknown person + unspecified service */ public static final XId ANON = new XId(WART_P+"anon@unspecified", false); public final String name; public final String service; /** * @param name Canonicalises via {@link IPlugin#canonical(XId, KKind)} * @param plugin */ public XId(String name, String service, IDoCanonical plugin) { this(name, null, service, plugin); } /** * * @param name * @param kind Can be null * @param service * @param plugin */ public XId(String name, Object kind, String service, IDoCanonical plugin) { if (plugin != null) name = plugin.canonical(name, kind); this.name = name; this.service = service; // null@twitter is a real user :( c.f. bug #14109 assert notNullNameCheck() : name; assert name != null; assert ! service.contains("@") : service; } static Map<String,IDoCanonical> service2canonical = IDoCanonical.DUMMY_CANONICALISER; /** * Use with {@link IDoCanonical#DUMMY_CANONICALISER} to allow XIds to be used _without_ initialising Creole. * @param service2canonical */ public static void setService2canonical( Map<String, IDoCanonical> service2canonical) { XId.service2canonical = service2canonical; } /** * @param name * @param service */ public XId(String name, String service) { this(name, service, service2canonical.get(service)); } /** * Usage: to bypass canonicalisation and syntax checks on name. * This is handy where the plugin canonicalises for people, but XIds * are used for both people and messages (e.g. Email). * * @param name * @param service * @param checkName Must be false to switch off the syntax checks performed by * {@link #XId(String, String)}. */ public XId(String name, String service, boolean checkName) { this.service = service; this.name = name; assert notNullNameCheck() : name+"@"+service; assert ! checkName; assert ! service.contains("@") : service; return; } /** * Convert a name@service String (as produced by this class) into * a XId object. * @throws IllegalArgumentException if id cannot be parsed */ public XId(String id) { this(id, (Object)null); } /** * * @param id e.g. "alice@twitter" * @param kind e.g. KKind.Person */ public XId(String id, Object kind) { int i = id.lastIndexOf('@'); if (i <= 0) { throw new IllegalArgumentException("Invalid XId " + id); } this.service = id.substring(i+1); // Text for XStream badness assert ! id.startsWith("<xid>") : id; // HACK: canonicalise here for main service (helps with boot-strapping) if (isMainService()) { this.name = id.substring(0, i).toLowerCase(); assert notNullNameCheck() : id; return; } // a database object? if (service.startsWith("DB")) { // try { // commented out to cut creole dependency // assert Fields.CLASS.fromString(service.substring(2)) != null : service; // } catch (ClassNotFoundException e) { // throw Utils.runtime(e); // } this.name = id.substring(0, i); assert notNullNameCheck() : id; return; } IDoCanonical plugin = service2canonical.get(service); String _name = id.substring(0, i); this.name = plugin==null? _name : plugin.canonical(_name, kind); assert notNullNameCheck() : id; } private boolean notNullNameCheck() { if (name==null || name.length()==0) return false; if (name.equals("null") && ! "twitter".equals(service)) return false; return true; } /** * Convert a name@service String (as produced by this class) into * a XId object. * @param canonicaliseName Must be false, to switch off using plugins to canonicalise * the name. */ public XId(String id, boolean canonicaliseName) { assert ! canonicaliseName; int i = id.lastIndexOf('@'); // handle unescaped web inputs -- with some log noise 'cos we don't want this if (i==-1 && id.contains("%40")) { Log.e("XId", "(handling smoothly) Unescaped url id: "+id); id = WebUtils2.urlDecode(id); i = id.lastIndexOf('@'); } assert i>0 : "no @ in XId: "+id; this.service = id.substring(i+1); this.name = id.substring(0, i); assert notNullNameCheck() : id; } public XId(String name, IDoCanonical plugin) { this(name, null, plugin.getService(), plugin); } public XId(String name, Object kind, IDoCanonical plugin) { this(name, kind, plugin.getService(), plugin); } /** * name@service * This is the inverse of the String constructor, i.e. * xid equals new Xid(xid.toString()). So you can use it for storage. */ @Override public String toString() { return name+"@"+service; } @Override public int hashCode() { final int prime = 31; int result = name.hashCode(); result = prime * result + service.hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) { // Probably a bug! // Log.d("xid", "XId.equals() "+this+" to "+obj.getClass()+" "+ReflectionUtils.getSomeStack(8)); return false; } XId other = (XId) obj; if (!name.equals(other.name)) return false; if (!service.equals(other.service)) return false; return true; } /** * Never null */ public String getName() { return name; } public String getService() { return service; } /** * TODO poke this value on JVM start-up */ static String MAIN_SERVICE = initMAIN_SERVICE(); public boolean isMainService() { return MAIN_SERVICE.equals(service); } private static String initMAIN_SERVICE() { // NB: This property gets set by AWebsiteConfig String s = System.getProperty("XId.MAIN_SERVICE"); if (s!=null) return s; // HACK -- known WW instances File dir = FileUtils.getWorkingDirectory(); if (dir.getName().equals("creole")) { return "platypusinnovation.com"; } return "soda.sh"; } /** * @return true for rubbish XIds of the form "[email protected]" or "foo@temp" */ public boolean isTemporary() { return isService("temp") || (isMainService() && StrUtils.isNumber(name)); } /** * Convenience method * @param other * @return true if the services match */ public boolean isService(String _service) { return this.service.equals(_service); } /** * @return The name, minus any Hungarian warts SoDash has added * to ensure uniqueness between types. */ public String dewart() { // person? if (name.startsWith(WART_P)) return name.substring(WART_P.length()); if (name.startsWith(WART_G)) return name.substring(WART_G.length()); if (name.startsWith(WART_V)) return name.substring(WART_V.length()); if (name.startsWith(WART_C)) return name.substring(WART_C.length()); // TODO do we use any others? return name; } public boolean hasWart(String wart) { return name.startsWith(wart); } /** * Convenience for ensuring a List contains XId objects. * @param xids May be Strings or XIds or IHasXIds (or a mix). Must not be null. * Note: Strings are NOT run through canonicalisation -- they are assumed to be OK! * @return a copy of xids, can be modified */ public static ArrayList<XId> xids(Collection xids) { return xids(xids, false); } /** * Convenience for ensuring a List contains XId objects. Uses {@link #xid(Object, boolean)} * @param xids May be Strings or XIds (or a mix). * @return a copy of xids, can be modified */ public static ArrayList<XId> xids(Collection xids, boolean canonicalise) { final ArrayList _xids = new ArrayList(xids.size()); for (Object x : xids) { if (x==null) continue; XId xid = xid(x, canonicalise); _xids.add(xid); } return _xids; } /** * Flexible type coercion / constructor convenience. * @param xid Can be String (actually any CharSequence) or XId or IHasXId or null (returns null). Does NOT canonicalise * */ public static XId xid(Object xid) { return xid(xid, false); } public static XId xid(Object xid, boolean canon) { if (xid==null) return null; if (xid instanceof XId) return (XId) xid; if (xid instanceof CharSequence) { return new XId(xid.toString(), canon); } IHasXId hasxid = (IHasXId) xid; return hasxid.getXId(); } @Override public String toJSONString() { return new SimpleJson().toJson(toString()); } @Override public Object toJson2() throws UnsupportedOperationException { return toString(); } @Override public int length() { return toString().length(); } @Override public char charAt(int index) { return toString().charAt(index); } @Override public CharSequence subSequence(int start, int end) { return toString().subSequence(start, end); } @Override public int compareTo(XId o) { return toString().compareTo(o.toString()); } }
winterwell.web/src/com/winterwell/web/data/XId.java
package com.winterwell.web.data; import java.io.File; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import com.winterwell.utils.StrUtils; import com.winterwell.utils.io.FileUtils; import com.winterwell.utils.log.Log; import com.winterwell.utils.web.IHasJson; import com.winterwell.utils.web.SimpleJson; import com.winterwell.utils.web.WebUtils2; import com.winterwell.web.LoginDetails; /** * An id for an external service. * * @see DBLogin (database backed) * @see LoginDetails (this is lighter and a bit different) * @author daniel * @testedby XIdTest */ public final class XId implements Serializable, IHasJson, CharSequence, Comparable<XId> { private static final long serialVersionUID = 1L; /** * Group 1 is the name, group 2 is the service */ public static final Pattern XID_PATTERN = Pattern.compile( "(\\S+)@([A-Za-z\\.]+)?"); /** * Company-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_C = "c_"; /** * Person-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_P = "p_"; /** * Video-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_V = "v_"; /** * Group-type. Added to the start of the XId name for SOME data-sources, * to avoid any overlap with other types. */ public static final String WART_G = "g_"; /** * XId for unknown person + unspecified service */ public static final XId ANON = new XId(WART_P+"anon@unspecified", false); public final String name; public final String service; /** * @param name Canonicalises via {@link IPlugin#canonical(XId, KKind)} * @param plugin */ public XId(String name, String service, IDoCanonical plugin) { this(name, null, service, plugin); } /** * * @param name * @param kind Can be null * @param service * @param plugin */ public XId(String name, Object kind, String service, IDoCanonical plugin) { if (plugin != null) name = plugin.canonical(name, kind); this.name = name; this.service = service; // null@twitter is a real user :( c.f. bug #14109 assert notNullNameCheck() : name; assert name != null; assert ! service.contains("@") : service; } static Map<String,IDoCanonical> service2canonical = IDoCanonical.DUMMY_CANONICALISER; /** * Use with {@link IDoCanonical#DUMMY_CANONICALISER} to allow XIds to be used _without_ initialising Creole. * @param service2canonical */ public static void setService2canonical( Map<String, IDoCanonical> service2canonical) { XId.service2canonical = service2canonical; } /** * @param name * @param service */ public XId(String name, String service) { this(name, service, service2canonical.get(service)); } /** * Usage: to bypass canonicalisation and syntax checks on name. * This is handy where the plugin canonicalises for people, but XIds * are used for both people and messages (e.g. Email). * * @param name * @param service * @param checkName Must be false to switch off the syntax checks performed by * {@link #XId(String, String)}. */ public XId(String name, String service, boolean checkName) { this.service = service; this.name = name; assert notNullNameCheck() : name+"@"+service; assert ! checkName; assert ! service.contains("@") : service; return; } /** * Convert a name@service String (as produced by this class) into * a XId object. * @throws IllegalArgumentException if id cannot be parsed */ public XId(String id) { this(id, (Object)null); } /** * * @param id e.g. "alice@twitter" * @param kind e.g. KKind.Person */ public XId(String id, Object kind) { int i = id.lastIndexOf('@'); if (i <= 0) { throw new IllegalArgumentException("Invalid XId " + id); } this.service = id.substring(i+1); // Text for XStream badness assert ! id.startsWith("<xid>") : id; // HACK: canonicalise here for main service (helps with boot-strapping) if (isMainService()) { this.name = id.substring(0, i).toLowerCase(); assert notNullNameCheck() : id; return; } // a database object? if (service.startsWith("DB")) { // try { // commented out to cut creole dependency // assert Fields.CLASS.fromString(service.substring(2)) != null : service; // } catch (ClassNotFoundException e) { // throw Utils.runtime(e); // } this.name = id.substring(0, i); assert notNullNameCheck() : id; return; } IDoCanonical plugin = service2canonical.get(service); String _name = id.substring(0, i); this.name = plugin==null? _name : plugin.canonical(_name, kind); assert notNullNameCheck() : id; } private boolean notNullNameCheck() { if (name==null || name.length()==0) return false; if (name.equals("null") && ! "twitter".equals(service)) return false; return true; } /** * Convert a name@service String (as produced by this class) into * a XId object. * @param canonicaliseName Must be false, to switch off using plugins to canonicalise * the name. */ public XId(String id, boolean canonicaliseName) { assert ! canonicaliseName; int i = id.lastIndexOf('@'); // handle unescaped web inputs -- with some log noise 'cos we don't want this if (i==-1 && id.contains("%40")) { Log.e("XId", "(handling smoothly) Unescaped url id: "+id); id = WebUtils2.urlDecode(id); i = id.lastIndexOf('@'); } assert i>0 : id; this.service = id.substring(i+1); this.name = id.substring(0, i); assert notNullNameCheck() : id; } public XId(String name, IDoCanonical plugin) { this(name, null, plugin.getService(), plugin); } public XId(String name, Object kind, IDoCanonical plugin) { this(name, kind, plugin.getService(), plugin); } /** * name@service * This is the inverse of the String constructor, i.e. * xid equals new Xid(xid.toString()). So you can use it for storage. */ @Override public String toString() { return name+"@"+service; } @Override public int hashCode() { final int prime = 31; int result = name.hashCode(); result = prime * result + service.hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) { // Probably a bug! // Log.d("xid", "XId.equals() "+this+" to "+obj.getClass()+" "+ReflectionUtils.getSomeStack(8)); return false; } XId other = (XId) obj; if (!name.equals(other.name)) return false; if (!service.equals(other.service)) return false; return true; } /** * Never null */ public String getName() { return name; } public String getService() { return service; } /** * TODO poke this value on JVM start-up */ static String MAIN_SERVICE = initMAIN_SERVICE(); public boolean isMainService() { return MAIN_SERVICE.equals(service); } private static String initMAIN_SERVICE() { // NB: This property gets set by AWebsiteConfig String s = System.getProperty("XId.MAIN_SERVICE"); if (s!=null) return s; // HACK -- known WW instances File dir = FileUtils.getWorkingDirectory(); if (dir.getName().equals("creole")) { return "platypusinnovation.com"; } return "soda.sh"; } /** * @return true for rubbish XIds of the form "[email protected]" or "foo@temp" */ public boolean isTemporary() { return isService("temp") || (isMainService() && StrUtils.isNumber(name)); } /** * Convenience method * @param other * @return true if the services match */ public boolean isService(String _service) { return this.service.equals(_service); } /** * @return The name, minus any Hungarian warts SoDash has added * to ensure uniqueness between types. */ public String dewart() { // person? if (name.startsWith(WART_P)) return name.substring(WART_P.length()); if (name.startsWith(WART_G)) return name.substring(WART_G.length()); if (name.startsWith(WART_V)) return name.substring(WART_V.length()); if (name.startsWith(WART_C)) return name.substring(WART_C.length()); // TODO do we use any others? return name; } public boolean hasWart(String wart) { return name.startsWith(wart); } /** * Convenience for ensuring a List contains XId objects. * @param xids May be Strings or XIds or IHasXIds (or a mix). Must not be null. * Note: Strings are NOT run through canonicalisation -- they are assumed to be OK! * @return a copy of xids, can be modified */ public static ArrayList<XId> xids(Collection xids) { return xids(xids, false); } /** * Convenience for ensuring a List contains XId objects. Uses {@link #xid(Object, boolean)} * @param xids May be Strings or XIds (or a mix). * @return a copy of xids, can be modified */ public static ArrayList<XId> xids(Collection xids, boolean canonicalise) { final ArrayList _xids = new ArrayList(xids.size()); for (Object x : xids) { if (x==null) continue; XId xid = xid(x, canonicalise); _xids.add(xid); } return _xids; } /** * Flexible type coercion / constructor convenience. * @param xid Can be String (actually any CharSequence) or XId or IHasXId or null (returns null). Does NOT canonicalise * */ public static XId xid(Object xid) { return xid(xid, false); } public static XId xid(Object xid, boolean canon) { if (xid==null) return null; if (xid instanceof XId) return (XId) xid; if (xid instanceof CharSequence) { return new XId(xid.toString(), canon); } IHasXId hasxid = (IHasXId) xid; return hasxid.getXId(); } @Override public String toJSONString() { return new SimpleJson().toJson(toString()); } @Override public Object toJson2() throws UnsupportedOperationException { return toString(); } @Override public int length() { return toString().length(); } @Override public char charAt(int index) { return toString().charAt(index); } @Override public CharSequence subSequence(int start, int end) { return toString().subSequence(start, end); } @Override public int compareTo(XId o) { return toString().compareTo(o.toString()); } }
assertion-error-safety-code
winterwell.web/src/com/winterwell/web/data/XId.java
assertion-error-safety-code
<ide><path>interwell.web/src/com/winterwell/web/data/XId.java <ide> id = WebUtils2.urlDecode(id); <ide> i = id.lastIndexOf('@'); <ide> } <del> assert i>0 : id; <add> assert i>0 : "no @ in XId: "+id; <ide> this.service = id.substring(i+1); <ide> this.name = id.substring(0, i); <ide> assert notNullNameCheck() : id;
Java
apache-2.0
a277433d5bc0d374bf9552aa6762d5365e271d2f
0
madankb/cgeo,brok85/cgeo,auricgoldfinger/cgeo,cgeo/cgeo,marco-dev/c-geo-opensource,Huertix/cgeo,KublaikhanGeek/cgeo,Huertix/cgeo,tobiasge/cgeo,rsudev/c-geo-opensource,lewurm/cgeo,lewurm/cgeo,Huertix/cgeo,ThibaultR/cgeo,cgeo/cgeo,mucek4/cgeo,marco-dev/c-geo-opensource,superspindel/cgeo,xiaoyanit/cgeo,pstorch/cgeo,ThibaultR/cgeo,rsudev/c-geo-opensource,brok85/cgeo,mucek4/cgeo,tobiasge/cgeo,KublaikhanGeek/cgeo,rsudev/c-geo-opensource,madankb/cgeo,auricgoldfinger/cgeo,schwabe/cgeo,pstorch/cgeo,marco-dev/c-geo-opensource,S-Bartfast/cgeo,vishwakulkarni/cgeo,kumy/cgeo,kumy/cgeo,Bananeweizen/cgeo,kumy/cgeo,superspindel/cgeo,S-Bartfast/cgeo,yummy222/cgeo,matej116/cgeo,brok85/cgeo,madankb/cgeo,cgeo/cgeo,pstorch/cgeo,matej116/cgeo,superspindel/cgeo,xiaoyanit/cgeo,SammysHP/cgeo,tobiasge/cgeo,schwabe/cgeo,vishwakulkarni/cgeo,yummy222/cgeo,mucek4/cgeo,schwabe/cgeo,Bananeweizen/cgeo,samueltardieu/cgeo,Bananeweizen/cgeo,vishwakulkarni/cgeo,samueltardieu/cgeo,matej116/cgeo,SammysHP/cgeo,auricgoldfinger/cgeo,yummy222/cgeo,SammysHP/cgeo,S-Bartfast/cgeo,lewurm/cgeo,samueltardieu/cgeo,ThibaultR/cgeo,cgeo/cgeo,KublaikhanGeek/cgeo,schwabe/cgeo,xiaoyanit/cgeo
package cgeo.geocaching.geopoint; import cgeo.geocaching.cgBase; import cgeo.geocaching.cgSettings; import java.util.regex.Matcher; import java.util.regex.Pattern; public final class DistanceParser { private static final Pattern pattern = Pattern.compile("^([0-9\\.,]+)[ ]*(m|km|ft|yd|mi|)?$", Pattern.CASE_INSENSITIVE); /** * Parse a distance string composed by a number and an optional suffix * (such as "1.2km"). * * @param distanceText * the string to analyze * @return the distance in kilometers * * @throws NumberFormatException * if the given number is invalid */ public static float parseDistance(String distanceText, final int defaultUnit) { final Matcher matcher = pattern.matcher(distanceText); if (!matcher.find()) { throw new NumberFormatException(distanceText); } final float value = Float.parseFloat(matcher.group(1).replace(',', '.')); final String unit = matcher.group(2).toLowerCase(); if (unit.equals("m") || (unit.length() == 0 && defaultUnit == cgSettings.unitsMetric)) { return value / 1000; } if (unit.equals("km")) { return value; } if (unit.equals("yd")) { return value * cgBase.yards2km; } if (unit.equals("mi")) { return value * cgBase.miles2km; } return value * cgBase.feet2km; } }
main/src/cgeo/geocaching/geopoint/DistanceParser.java
package cgeo.geocaching.geopoint; import cgeo.geocaching.cgBase; import cgeo.geocaching.cgSettings; import java.util.regex.Matcher; import java.util.regex.Pattern; public final class DistanceParser { private static final Pattern pattern = Pattern.compile("^([0-9\\.\\,]+)[ ]*(m|km|ft|yd|mi|)?$", Pattern.CASE_INSENSITIVE); /** * Parse a distance string composed by a number and an optional suffix * (such as "1.2km"). * * @param distanceText * the string to analyze * @return the distance in kilometers * * @throws NumberFormatException * if the given number is invalid */ public static float parseDistance(String distanceText, final int defaultUnit) { final Matcher matcher = pattern.matcher(distanceText); if (!matcher.find()) { throw new NumberFormatException(distanceText); } final float value = Float.parseFloat(matcher.group(1)); final String unit = matcher.group(2).toLowerCase(); if (unit.equals("m") || (unit.length() == 0 && defaultUnit == cgSettings.unitsMetric)) { return value / 1000; } if (unit.equals("km")) { return value; } if (unit.equals("yd")) { return value * cgBase.yards2km; } if (unit.equals("mi")) { return value * cgBase.miles2km; } return value * cgBase.feet2km; } }
Correctly parse distance using commas
main/src/cgeo/geocaching/geopoint/DistanceParser.java
Correctly parse distance using commas
<ide><path>ain/src/cgeo/geocaching/geopoint/DistanceParser.java <ide> <ide> public final class DistanceParser { <ide> <del> private static final Pattern pattern = Pattern.compile("^([0-9\\.\\,]+)[ ]*(m|km|ft|yd|mi|)?$", Pattern.CASE_INSENSITIVE); <add> private static final Pattern pattern = Pattern.compile("^([0-9\\.,]+)[ ]*(m|km|ft|yd|mi|)?$", Pattern.CASE_INSENSITIVE); <ide> <ide> /** <ide> * Parse a distance string composed by a number and an optional suffix <ide> throw new NumberFormatException(distanceText); <ide> } <ide> <del> final float value = Float.parseFloat(matcher.group(1)); <add> final float value = Float.parseFloat(matcher.group(1).replace(',', '.')); <ide> final String unit = matcher.group(2).toLowerCase(); <ide> <ide> if (unit.equals("m") || (unit.length() == 0 && defaultUnit == cgSettings.unitsMetric)) {
Java
apache-2.0
4208407bbc5b965273f9b23d5ec2ad9ab023d4b9
0
theanuradha/cubeon
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.netbeans.cubeon.ui.query; import java.awt.BorderLayout; import javax.swing.JPanel; import javax.swing.event.ChangeListener; import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider; import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider.ConfigurationHandler; import org.netbeans.cubeon.ui.query.NewQueryWizardAction.WizardObject; import org.openide.util.NbBundle; final class TaskQueryAttributes extends JPanel { private ConfigurationHandler handler; private final ChangeListener wizard; /** Creates new form TaskQueryAttributes */ TaskQueryAttributes(final ChangeListener wizard) { initComponents(); this.wizard = wizard; } @Override public String getName() { return "Attributes"; } void setWizardObject(WizardObject object) { TaskQuerySupportProvider tqsp = object.getRepository().getLookup().lookup(TaskQuerySupportProvider.class); handler = tqsp.createConfigurationHandler(object.getQuery()); handler.addChangeListener(wizard); lblMainHeader.setText(NbBundle.getMessage(TaskQueryAttributes.class, "LBL_Create_New", object.getRepository().getName())); lblSubHeader.setText(NbBundle.getMessage(TaskQueryAttributes.class, "LBL_Create_New_Dec")); pnlHolder.removeAll(); pnlHolder.add(handler.getComponent(), BorderLayout.CENTER); pnlHolder.repaint(); pnlHolder.updateUI(); } ConfigurationHandler getHandler() { return handler; } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { pnlHolder = new javax.swing.JPanel(); jPanel1 = new javax.swing.JPanel(); pnlHeader = new javax.swing.JPanel(); lblMainHeader = new javax.swing.JLabel(); lblSubHeader = new javax.swing.JLabel(); lblIcon = new javax.swing.JLabel(); jSeparator1 = new javax.swing.JSeparator(); pnlHolder.setLayout(new java.awt.BorderLayout()); jPanel1.setLayout(new java.awt.BorderLayout()); pnlHeader.setBackground(new java.awt.Color(255, 255, 255)); lblMainHeader.setFont(new java.awt.Font("Tahoma", 1, 11)); org.openide.awt.Mnemonics.setLocalizedText(lblMainHeader, "_MAIN_HEADING_"); org.openide.awt.Mnemonics.setLocalizedText(lblSubHeader, "_SUB_DECRIPTION_"); // NOI18N lblIcon.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/netbeans/cubeon/ui/new_task.png"))); // NOI18N org.jdesktop.layout.GroupLayout pnlHeaderLayout = new org.jdesktop.layout.GroupLayout(pnlHeader); pnlHeader.setLayout(pnlHeaderLayout); pnlHeaderLayout.setHorizontalGroup( pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHeaderLayout.createSequentialGroup() .addContainerGap() .add(pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(pnlHeaderLayout.createSequentialGroup() .add(10, 10, 10) .add(lblSubHeader, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 407, Short.MAX_VALUE)) .add(lblMainHeader, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 417, Short.MAX_VALUE)) .add(45, 45, 45) .add(lblIcon, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 18, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addContainerGap()) .add(org.jdesktop.layout.GroupLayout.TRAILING, jSeparator1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) ); pnlHeaderLayout.setVerticalGroup( pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHeaderLayout.createSequentialGroup() .add(pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(org.jdesktop.layout.GroupLayout.TRAILING, pnlHeaderLayout.createSequentialGroup() .addContainerGap() .add(lblMainHeader) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(lblSubHeader)) .add(lblIcon, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 45, Short.MAX_VALUE)) .add(9, 9, 9) .add(jSeparator1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) ); jPanel1.add(pnlHeader, java.awt.BorderLayout.CENTER); org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHolder, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) .add(jPanel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(jPanel1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(0, 0, 0) .add(pnlHolder, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 344, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel jPanel1; private javax.swing.JSeparator jSeparator1; private javax.swing.JLabel lblIcon; private javax.swing.JLabel lblMainHeader; private javax.swing.JLabel lblSubHeader; private javax.swing.JPanel pnlHeader; private javax.swing.JPanel pnlHolder; // End of variables declaration//GEN-END:variables }
core/ui/src/main/java/org/netbeans/cubeon/ui/query/TaskQueryAttributes.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.netbeans.cubeon.ui.query; import java.awt.BorderLayout; import javax.swing.JPanel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider; import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider.ConfigurationHandler; import org.netbeans.cubeon.ui.query.NewQueryWizardAction.WizardObject; import org.openide.util.NbBundle; final class TaskQueryAttributes extends JPanel { private ConfigurationHandler handler; private final ChangeListener wizard; /** Creates new form TaskQueryAttributes */ TaskQueryAttributes(final ChangeListener wizard) { initComponents(); this.wizard = wizard; } @Override public String getName() { return "Attributes"; } void setWizardObject(WizardObject object) { TaskQuerySupportProvider tqsp = object.getRepository().getLookup().lookup(TaskQuerySupportProvider.class); handler = tqsp.createConfigurationHandler(object.getQuery()); handler.addChangeListener(wizard); lblMainHeader.setText(NbBundle.getMessage(TaskQueryAttributes.class, "LBL_Create_New", object.getRepository().getName())); lblSubHeader.setText(NbBundle.getMessage(TaskQueryAttributes.class, "LBL_Create_New_Dec")); pnlHolder.removeAll(); pnlHolder.add(handler.getComponent(), BorderLayout.CENTER); pnlHolder.repaint(); pnlHolder.updateUI(); } ConfigurationHandler getHandler() { return handler; } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { pnlHolder = new javax.swing.JPanel(); jPanel1 = new javax.swing.JPanel(); pnlHeader = new javax.swing.JPanel(); lblMainHeader = new javax.swing.JLabel(); lblSubHeader = new javax.swing.JLabel(); lblIcon = new javax.swing.JLabel(); jSeparator1 = new javax.swing.JSeparator(); pnlHolder.setLayout(new java.awt.BorderLayout()); jPanel1.setLayout(new java.awt.BorderLayout()); pnlHeader.setBackground(new java.awt.Color(255, 255, 255)); lblMainHeader.setFont(new java.awt.Font("Tahoma", 1, 11)); org.openide.awt.Mnemonics.setLocalizedText(lblMainHeader, "_MAIN_HEADING_"); org.openide.awt.Mnemonics.setLocalizedText(lblSubHeader, "_SUB_DECRIPTION_"); // NOI18N lblIcon.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/netbeans/cubeon/ui/new_task.png"))); // NOI18N org.jdesktop.layout.GroupLayout pnlHeaderLayout = new org.jdesktop.layout.GroupLayout(pnlHeader); pnlHeader.setLayout(pnlHeaderLayout); pnlHeaderLayout.setHorizontalGroup( pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHeaderLayout.createSequentialGroup() .addContainerGap() .add(pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(pnlHeaderLayout.createSequentialGroup() .add(10, 10, 10) .add(lblSubHeader, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 407, Short.MAX_VALUE)) .add(lblMainHeader, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 417, Short.MAX_VALUE)) .add(45, 45, 45) .add(lblIcon, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 18, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .addContainerGap()) .add(org.jdesktop.layout.GroupLayout.TRAILING, jSeparator1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) ); pnlHeaderLayout.setVerticalGroup( pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHeaderLayout.createSequentialGroup() .add(pnlHeaderLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(org.jdesktop.layout.GroupLayout.TRAILING, pnlHeaderLayout.createSequentialGroup() .addContainerGap() .add(lblMainHeader) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .add(lblSubHeader)) .add(lblIcon, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 45, Short.MAX_VALUE)) .add(9, 9, 9) .add(jSeparator1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) ); jPanel1.add(pnlHeader, java.awt.BorderLayout.CENTER); org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(pnlHolder, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) .add(jPanel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 500, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(jPanel1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE) .add(0, 0, 0) .add(pnlHolder, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 344, Short.MAX_VALUE)) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JPanel jPanel1; private javax.swing.JSeparator jSeparator1; private javax.swing.JLabel lblIcon; private javax.swing.JLabel lblMainHeader; private javax.swing.JLabel lblSubHeader; private javax.swing.JPanel pnlHeader; private javax.swing.JPanel pnlHolder; // End of variables declaration//GEN-END:variables }
unused import
core/ui/src/main/java/org/netbeans/cubeon/ui/query/TaskQueryAttributes.java
unused import
<ide><path>ore/ui/src/main/java/org/netbeans/cubeon/ui/query/TaskQueryAttributes.java <ide> import java.awt.BorderLayout; <ide> import javax.swing.JPanel; <ide> <del>import javax.swing.event.ChangeEvent; <ide> import javax.swing.event.ChangeListener; <ide> import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider; <ide> import org.netbeans.cubeon.tasks.spi.query.TaskQuerySupportProvider.ConfigurationHandler;
Java
apache-2.0
58540b383357668da38053e510e0c57d1a1c7d75
0
MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab
package org.myrobotlab.service; import static org.myrobotlab.arduino.Msg.MAX_MSG_SIZE; import static org.myrobotlab.arduino.Msg.MRLCOMM_VERSION; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import org.myrobotlab.arduino.ArduinoUtils; import org.myrobotlab.arduino.BoardInfo; import org.myrobotlab.arduino.BoardType; import org.myrobotlab.arduino.DeviceSummary; import org.myrobotlab.arduino.Msg; import org.myrobotlab.framework.Message; import org.myrobotlab.framework.ServiceType; import org.myrobotlab.framework.interfaces.Attachable; import org.myrobotlab.framework.interfaces.NameProvider; import org.myrobotlab.i2c.I2CBus; import org.myrobotlab.image.Util; import org.myrobotlab.io.FileIO; import org.myrobotlab.io.Zip; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.Logging; import org.myrobotlab.logging.LoggingFactory; import org.myrobotlab.math.interfaces.Mapper; import org.myrobotlab.math.MapperLinear; import org.myrobotlab.sensor.EncoderData; import org.myrobotlab.service.abstracts.AbstractMicrocontroller; import org.myrobotlab.service.data.DeviceMapping; import org.myrobotlab.service.data.PinData; import org.myrobotlab.service.data.SerialRelayData; import org.myrobotlab.service.interfaces.EncoderControl; import org.myrobotlab.service.interfaces.EncoderController; import org.myrobotlab.service.interfaces.I2CBusControl; import org.myrobotlab.service.interfaces.I2CBusController; import org.myrobotlab.service.interfaces.I2CControl; import org.myrobotlab.service.interfaces.I2CController; import org.myrobotlab.service.interfaces.MotorControl; import org.myrobotlab.service.interfaces.MotorController; import org.myrobotlab.service.interfaces.MrlCommPublisher; import org.myrobotlab.service.interfaces.NeoPixelController; import org.myrobotlab.service.interfaces.PinArrayListener; import org.myrobotlab.service.interfaces.PinArrayPublisher; import org.myrobotlab.service.interfaces.PinDefinition; import org.myrobotlab.service.interfaces.PinListener; import org.myrobotlab.service.interfaces.PortConnector; import org.myrobotlab.service.interfaces.PortListener; import org.myrobotlab.service.interfaces.PortPublisher; import org.myrobotlab.service.interfaces.RecordControl; import org.myrobotlab.service.interfaces.SerialDataListener; import org.myrobotlab.service.interfaces.ServoControl; import org.myrobotlab.service.interfaces.ServoController; import org.myrobotlab.service.interfaces.UltrasonicSensorControl; import org.myrobotlab.service.interfaces.UltrasonicSensorController; import org.slf4j.Logger; public class Arduino extends AbstractMicrocontroller implements I2CBusController, I2CController, SerialDataListener, ServoController, MotorController, NeoPixelController, UltrasonicSensorController, PortConnector, RecordControl, /* SerialRelayListener, */PortListener, PortPublisher, EncoderController, PinArrayPublisher, MrlCommPublisher { transient public final static Logger log = LoggerFactory.getLogger(Arduino.class); public static class I2CDeviceMap { public String busAddress; public transient I2CControl control; public String deviceAddress; } public static class Sketch implements Serializable { private static final long serialVersionUID = 1L; public String data; public String name; public Sketch(String name, String data) { this.name = name; this.data = data; } } public static final int ANALOG = 1; public transient static final int BOARD_TYPE_ID_ADK_MEGA = 3; public transient static final int BOARD_TYPE_ID_MEGA = 1; public transient static final int BOARD_TYPE_ID_NANO = 4; public transient static final int BOARD_TYPE_ID_PRO_MINI = 5; public transient static final int BOARD_TYPE_ID_UNKNOWN = 0; public transient static final int BOARD_TYPE_ID_UNO = 2; public transient static final String BOARD_TYPE_MEGA = "mega.atmega2560"; public transient static final String BOARD_TYPE_MEGA_ADK = "megaADK"; public transient static final String BOARD_TYPE_NANO = "nano"; public transient static final String BOARD_TYPE_PRO_MINI = "pro mini"; public transient static final String BOARD_TYPE_UNO = "uno"; public static final int DIGITAL = 0; public static final int INPUT = 0x0; public static final int MOTOR_BACKWARD = 0; public static final int MOTOR_FORWARD = 1; public static final int MOTOR_TYPE_DUAL_PWM = 2; public static final int MOTOR_TYPE_SIMPLE = 1; public static final int MRL_IO_NOT_DEFINED = 0; public static final int MRL_IO_SERIAL_0 = 1; public static final int MRL_IO_SERIAL_1 = 2; public static final int MRL_IO_SERIAL_2 = 3; public static final int MRL_IO_SERIAL_3 = 4; public static final int OUTPUT = 0x1; private static final long serialVersionUID = 1L; /** * This static method returns all the details of the class without it having * to be constructed. It has description, categories, dependencies, and peer * definitions. * * @return ServiceType - returns all the data * */ static public ServiceType getMetaData() { ServiceType meta = new ServiceType(Arduino.class.getCanonicalName()); meta.addDescription("controls an Arduino microcontroller as a slave, which allows control of all the devices the Arduino is attached to, such as servos, motors and sensors"); meta.addCategory("microcontroller"); meta.addPeer("serial", "Serial", "serial device for this Arduino"); return meta; } /** * path of the Arduino IDE must be set by user should not be static - since * gson will not serialize it, and it won't be 'saved()' */ public String arduinoPath; String aref; @Deprecated /* * should be just another attachable - this is a bad * implementation */ transient Map<Integer, Arduino> attachedController = new ConcurrentHashMap<Integer, Arduino>(); /** * board info "from" MrlComm - which can be different from what the user say's * it is - if there is a difference the "user" should be notified - but not * forced to use the mrlBoardInfo. */ volatile BoardInfo boardInfo = null; volatile BoardInfo lastBoardInfo = null; boolean boardInfoEnabled = true; private long boardInfoRequestTs; @Deprecated /* * should develop a MrlSerial on Arduinos and * Arduino.getSerial("s1") */ public transient int controllerAttachAs = MRL_IO_NOT_DEFINED; /** * id reference of sensor, key is the MrlComm device id */ transient Map<Integer, DeviceMapping> deviceIndex = new ConcurrentHashMap<Integer, DeviceMapping>(); /** * Devices - string name index of device we need 2 indexes for sensors because * they will be referenced by name OR by index */ transient Map<String, DeviceMapping> deviceList = new ConcurrentHashMap<String, DeviceMapping>(); I2CBus i2cBus = null; volatile byte[] i2cData = new byte[64]; /** * i2c This needs to be volatile because it will be updated in a different * threads */ volatile boolean i2cDataReturned = false; volatile int i2cDataSize; Map<String, I2CDeviceMap> i2cDevices = new ConcurrentHashMap<String, I2CDeviceMap>(); transient int[] ioCmd = new int[MAX_MSG_SIZE]; @Deprecated /* * use attachables like everything else - power mapping should be * inside the motorcontrol */ transient Mapper motorPowerMapper = new MapperLinear(-1.0, 1.0, -255.0, 255.0); public transient Msg msg; Integer nextDeviceId = 0; /** * Serial service - the Arduino's serial connection */ transient Serial serial; /** * MrlComm sketch */ public Sketch sketch; public String uploadSketchResult = ""; transient private VirtualArduino virtual; int mrlCommBegin = 0; private volatile boolean syncInProgress = false; public Arduino(String n, String id) { super(n, id); // config - if saved is loaded - if not default to uno if (board == null) { board = "uno"; } // board is set // now we can create a pin list getPinList(); // get list of board types getBoardTypes(); // FIXME - load from unzipped resource directory ? - no more jar access like // below String mrlcomm = FileIO.resourceToString("Arduino/MrlComm/MrlComm.ino"); setSketch(new Sketch("MrlComm", mrlcomm)); // add self as an attached device // to handle pin events attachDevice(this, (Object[]) null); } // > analogWrite/address/value public void analogWrite(int address, int value) { log.info("analogWrite({},{})", address, value); msg.analogWrite(address, value); } public void analogWrite(String pin, Integer value) { PinDefinition pinDef = getPin(pin); analogWrite(pinDef.getAddress(), value); } DeviceSummary[] arrayToDeviceSummary(int[] deviceSummary) { log.debug("mds - {}", Arrays.toString(deviceSummary)); DeviceSummary[] ds = new DeviceSummary[deviceSummary.length]; for (int i = 0; i < deviceSummary.length; i++) { int id = deviceSummary[i]; DeviceSummary ds0 = new DeviceSummary(getDeviceName(id), id); ds[i] = ds0; } // log.error("ds - {}", Arrays.toString(ds)); return ds; } /** * Routing Attach - routes ServiceInterface.attach(service) to appropriate * methods for this class */ @Override public void attach(Attachable service) throws Exception { if (ServoControl.class.isAssignableFrom(service.getClass())) { attachServoControl((ServoControl) service); ((ServoControl) service).attach(this); return; } else if (MotorControl.class.isAssignableFrom(service.getClass())) { attachMotorControl((MotorControl) service); return; } else if (EncoderControl.class.isAssignableFrom(service.getClass())) { // need to determine the encoder type! attach((EncoderControl) service); return; } error("%s doesn't know how to attach a %s", getClass().getSimpleName(), service.getClass().getSimpleName()); } @Override public void attach(ServoControl servo, int pin) throws Exception { servo.setPin(pin); attachServoControl(servo); } /** * String interface - this allows you to easily use url api requests like * /attach/nameOfListener/3 */ public void attach(String listener, int address) { attach((PinListener) Runtime.getService(listener), address); } @Override public void attach(UltrasonicSensorControl sensor, Integer triggerPin, Integer echoPin) throws Exception { // refer to // http://myrobotlab.org/content/control-controller-manifesto if (isAttached(sensor)) { log.info("{} already attached", sensor.getName()); return; } // critical init code DeviceMapping dm = attachDevice(sensor, new Object[] { triggerPin, echoPin }); Integer deviceId = dm.getId(); msg.ultrasonicSensorAttach(deviceId, triggerPin, echoPin); // call the other service's attach sensor.attach(this, triggerPin, echoPin); } synchronized private DeviceMapping attachDevice(Attachable device, Object[] attachConfig) { DeviceMapping map = new DeviceMapping(device, attachConfig); map.setId(nextDeviceId); log.info("DEVICE LIST PUT ------ Name: {} Class: {} Map: {}", device.getName(), device.getClass().getSimpleName(), map); deviceList.put(device.getName(), map); deviceIndex.put(nextDeviceId, map); ++nextDeviceId; // return map.getId(); return map; } /** * Attach an encoder to the arduino * * @param encoder * - the encoder control to attach * @throws Exception */ @Override public void attach(EncoderControl encoder) throws Exception { Integer deviceId = null; // send data to micro-controller // TODO: update this with some enum of various encoder types.. // for now it's just AMT203 ... int type = 0; Integer address = null; if (encoder instanceof Amt203Encoder) { type = 0; address = getAddress(((Amt203Encoder) encoder).getPin()); } else if (encoder instanceof As5048AEncoder) { type = 1; address = getAddress(((As5048AEncoder) encoder).getPin()); } else { error("unknown encoder type {}", encoder.getClass().getName()); } attachDevice(encoder, new Object[] { address }); // FIXME - don't know why // this is necessary - // Attachable is only // needed msg.encoderAttach(deviceId, type, address); encoder.attach(this); } @Override public void attachI2CControl(I2CControl control) { // Create the i2c bus device in MrlComm the first time this method is // invoked. // Add the i2c device to the list of i2cDevices // Pattern: deviceAttach(device, Object... config) // To add the i2c bus to the deviceList I need an device that represents // the i2c bus here and in MrlComm // This will only handle the creation of i2cBus. if (i2cBus == null) { i2cBus = new I2CBus(String.format("I2CBus%s", control.getDeviceBus())); i2cBusAttach(i2cBus, Integer.parseInt(control.getDeviceBus())); } // This part adds the service to the mapping between // busAddress||DeviceAddress // and the service name to be able to send data back to the invoker String key = String.format("%s.%s", control.getDeviceBus(), control.getDeviceAddress()); I2CDeviceMap devicedata = new I2CDeviceMap(); if (i2cDevices.containsKey(key)) { log.error("Device {} {} {} already exists.", control.getDeviceBus(), control.getDeviceAddress(), control.getName()); } else { devicedata.busAddress = control.getDeviceBus(); devicedata.deviceAddress = control.getDeviceAddress(); devicedata.control = control; i2cDevices.put(key, devicedata); control.attachI2CController(this); } } // @Override public void attachMotorControl(MotorControl motor) throws Exception { if (isAttached(motor)) { log.info("motor {} already attached", motor.getName()); return; } Integer motorType = null; int[] pins = null; if (motor.getClass().equals(Motor.class)) { motorType = MOTOR_TYPE_SIMPLE; Motor m = (Motor) motor; pins = new int[] { getAddress(m.getPwrPin()), getAddress(m.getDirPin()) }; } else if (motor.getClass().equals(MotorDualPwm.class)) { motorType = MOTOR_TYPE_DUAL_PWM; MotorDualPwm m = (MotorDualPwm) motor; pins = new int[] { getAddress(m.getLeftPwmPin()), getAddress(m.getRightPwmPin()) }; // } else if (motor.getClass().equals(MotorStepper)){ // FIXME implement } else { throw new IOException(String.format("do not know how to attach Motor type %s", motor.getClass().getSimpleName())); } // this saves original "attach" configuration - and maintains internal // data // structures // and does DeviceControl.attach(this) DeviceMapping dm = attachDevice(motor, new Object[] { motorType, pins }); Integer deviceId = dm.getId(); // send data to micro-controller - convert degrees to microseconds // int uS = degreeToMicroseconds(targetOutput); msg.motorAttach(deviceId, motorType, pins); // the callback - motor better have a check // isAttached(MotorControl) to prevent infinite loop // motor.attach(this, pin, targetOutput, velocity); motor.attachMotorController(this); } @Override public void attachServoControl(ServoControl servo) { if (isAttached(servo)) { log.info("servo {} already attached", servo.getName()); return; } int pin = getAddress(servo.getPin()); // targetOutput is ALWAYS ALWAYS degrees double targetOutput = servo.getTargetOutput(); double speed = (servo.getSpeed() == null) ? -1 : servo.getSpeed(); // add a device to our deviceList DeviceMapping dm = attachDevice(servo, new Object[] { pin, targetOutput, speed }); if (isConnected()) { int uS = degreeToMicroseconds(servo.getTargetOutput()); msg.servoAttach(dm.getId(), pin, uS, (int) speed, servo.getName()); if (servo.isEnabled()) { msg.servoAttachPin(dm.getId(), pin); } } servo.attach(this); } /** * * @param dm */ public void reattach(DeviceMapping dm) { Attachable attachable = dm.getDevice(); if (attachable instanceof Servo) { Servo servo = (Servo) attachable; int uS = degreeToMicroseconds(servo.getTargetOutput()); double speed = (servo.getSpeed() == null) ? -1 : servo.getSpeed(); int pin = getAddress(servo.getPin()); log.info("================ re-attaching {} {} {} ================", servo.getName(), dm.getId(), pin); msg.servoAttach(dm.getId(), pin, uS, (int) speed, servo.getName()); // if (servo.isEnabled()) { msg.servoAttachPin(dm.getId(), pin); } } } public void connect(String port) { connect(port, Serial.BAUD_115200, 8, 1, 0); } @Override public void connect(String port, int rate) throws Exception { connect(port, rate, 8, 1, 0); } public VirtualArduino getVirtual() { return virtual; } /** * default params to connect to Arduino &amp; MrlComm.ino FIXME - remove the * parameters except rate as they are not allowed to change with MRLComm */ @Override public void connect(String port, int rate, int databits, int stopbits, int parity) { // test to see if we've been started. the serial might be null try { initSerial(); if (isConnected() && port.equals(serial.getPortName())) { log.info("already connected to port {}", port); return; } if (isVirtual()) { if (virtual == null) { virtual = (VirtualArduino) Runtime.start("v" + getName(), "VirtualArduino"); } virtual.connect(port); } serial.connect(port, rate, databits, stopbits, parity); // most likely on a real board this send will never get to // mrlcomm - because the board is not ready - but it doesnt hurt // and in fact it helps VirtualArduino - since we currently do not // have a DTR CDR line in the virtual port as use this as a signal // of // connection // by default ack'ing is now on.. // but with this first msg there is no msg before it, // and there is a high probability that the board is not really // ready // and this msg along with the ack will be ignored // so we turn off ack'ing locally // TODO - can we re-enable acks ? msg.enableAcks(true); long startBoardRequestTs = System.currentTimeMillis(); // start the heartbeat enableBoardInfo(boardInfoEnabled); log.info("waiting for boardInfo .........."); // long waitTime = System.currentTimeMillis(); // while ts < startedRequest && < 4.5 sec wait 30 try again // if timeout report error while ((boardInfo == null || boardInfo.getReceiveTs() < startBoardRequestTs) && System.currentTimeMillis() - startBoardRequestTs < 4500) { sleep(30); } log.info("waited {} ms for Arduino {} to say hello", System.currentTimeMillis() - startBoardRequestTs, getName()); // we might be connected now // see what our version is like... if (boardInfo != null) { Integer version = boardInfo.getVersion(); if (version == null) { error("%s did not get response from arduino....", serial.getPortName()); } else if (!version.equals(MRLCOMM_VERSION)) { error("MrlComm.ino responded with version %s expected version is %s", version, MRLCOMM_VERSION); } else { info("%s connected on %s responded version %s ... goodtimes...", serial.getName(), serial.getPortName(), version); } } else { log.error("board info is null ! - has MrlComm.ino been loaded ?"); } } catch (Exception e) { log.error("serial open threw", e); error(e.getMessage()); } broadcastState(); } /** * sync our device list with mrlcomm */ public void sync() { if (syncInProgress) { log.warn("Alreadying calling sync! Skipping this request"); return; } syncInProgress = true; log.warn("================================ sync !!! =============================="); try { for (DeviceMapping device : deviceList.values()) { // invoke("reattach", device); send(getName(), "reattach", device); // Thread.sleep(1000); // reattach(device); } List<PinDefinition> list = getPinList(); for (PinDefinition pindef : list) { if (pindef.isEnabled()) { enablePin(pindef.getPinName()); } } } catch (Exception e) { log.error("sync threw", e); } syncInProgress = false; log.info("Sync completed"); } // > customMsg/[] msg public void customMsg(int... params) { msg.customMsg(params); } // @Override // > deviceDetach/deviceId public void detach(Attachable device) { log.info("{} detaching {}", getName(), device.getName()); // if this service doesn't think its attached, we are done if (!isAttached(device)) { log.info("device {} not attached", device.getName()); return; } // Servo requirements if (device instanceof ServoControl && device.isAttached(this)) { // if the other service thinks its attached - give it a chance to detach // this is important for Servo - because servo will want to disable() // before // detaching - and it needs the controller to do so... device.detach(this); } log.info("detaching device {}", device.getName()); Integer id = getDeviceId(device); if (id != null) { msg.deviceDetach(id); deviceIndex.remove(id); } deviceList.remove(device.getName()); } @Override public void detach(String controllerName) { detach(Runtime.getService(controllerName)); } @Override public void detachI2CControl(I2CControl control) { // This method should delete the i2c device entry from the list of // I2CDevices // The order of the detach is important because the higher level service may // want to execute something that // needs this service to still be availabe if (i2cDevices.containsKey(control.getName())) { i2cDevices.remove(control.getName()); control.detachI2CController(this); } } public void detachI2CControls() { for (Map.Entry<String, I2CDeviceMap> i2cDevice : i2cDevices.entrySet()) { I2CControl i2cControl = i2cDevice.getValue().control; i2cControl.detach(this); } } /** * silly Arduino implementation - but keeping it since its familiar * digitalWrite/pin/value */ public void digitalWrite(int address, int value) { log.info("digitalWrite {} {}", address, value); msg.digitalWrite(address, value); } public void digitalWrite(String pin, int value) { PinDefinition pinDef = getPin(pin); digitalWrite(pinDef.getAddress(), value); } @Override public Integer getAddress(String pin) { PinDefinition pinDef = getPin(pin); if (pinDef != null) { return pinDef.getAddress(); } try { return Integer.parseInt(pin); } catch (Exception e) { } return null; } /** * disablePin/address */ @Override public void disablePin(int address) { PinDefinition pinDef = getPin(address); pinDef.setEnabled(false); msg.disablePin(address); } /** * disablePin/address */ @Override public void disablePin(String pinName) { // PinDefinition pinDef = getPin(address); PinDefinition pinDef = getPin(pinName); pinDef.setEnabled(false); msg.disablePin(pinDef.getAddress()); } /** * disable all pins */ public void disablePins() { msg.disablePins(); } public void disconnect() { // FIXED - all don in 'onDisconnect()' // enableBoardInfo(false); // boardInfo is not valid after disconnect // because we might be connecting to a different Arduino // boardInfo.reset(); for (Arduino controller : attachedController.values()) { controller.disconnect(); } attachedController.clear(); if (controllerAttachAs != MRL_IO_NOT_DEFINED) { controllerAttachAs = MRL_IO_NOT_DEFINED; serial = (Serial) createPeer("serial"); } else { if (serial != null) { serial.disconnect(); } } broadcastState(); } public void echo(float myFloat, int myByte, float secondFloat) { msg.echo(myFloat, myByte, secondFloat); } // > enableAck/bool enabled public void enableAck(boolean enabled) { msg.enableAcks(enabled); } transient BoardInfoPoller poller = new BoardInfoPoller(); public class BoardInfoPoller implements Runnable { boolean running = false; Thread thread = null; public void run() { try { running = true; while (running) { sendBoardInfoRequest(); sleep(1000); } } catch (Exception e) { log.info("board info stopping {}", e.getMessage()); } thread = null; running = false; } public void start() { if (thread == null) { thread = new Thread(this, "boardInfoPoller"); thread.start(); } } public void stop() { if (thread != null) { thread.interrupt(); } } } // TODO - remove // MrlComm now constantantly sends a stream of BoardInfo // > enableBoardInfo/bool enabled - no point to this public void enableBoardInfo(Boolean enabled) { /* * if (enabled) { poller.start(); } else { poller.stop(); } */ boardInfoEnabled = enabled; } @Override public void enablePin(int address) { enablePin(address, 0); } // > enablePin/address/type/b16 rate public void enablePin(int address, int rate) { PinDefinition pinDef = getPin(address); msg.enablePin(address, getMrlPinType(pinDef), rate); pinDef.setEnabled(true); invoke("publishPinDefinition", pinDef); // broadcast pin change } /** * start polling reads of selected pin enablePin/address/type/b16 rate */ public void enablePin(String pin, int rate) { if (!isConnected()) { error("must be connected to enable pins"); return; } PinDefinition pinDef = getPin(pin); enablePin(pinDef.getAddress(), rate); } public String getArduinoPath() { return arduinoPath; } public String getAref() { return aref; } @Override public Set<String> getAttached() { return deviceList.keySet(); } public int getAttachedCount() { return deviceList.size(); } /** * Heart-beat method on time, driven by the Arduino service to get information * from the board its currently connected. This is the "last" boardInfo * returned from the task inserted with addTask("getBoardInfo", 1000, 0, * "sendBoardInfoRequest"); * * getBoardInfo */ public BoardInfo getBoardInfo() { return boardInfo; } @Override // override to get Arduino board types public List<BoardType> getBoardTypes() { List<BoardType> boardTypes = new ArrayList<BoardType>(); try { String b = FileIO.resourceToString("Arduino" + File.separator + "boards.txt"); Properties boardProps = new Properties(); boardProps.load(new ByteArrayInputStream(b.getBytes())); Enumeration<?> e = boardProps.propertyNames(); Set<String> distinct = new TreeSet<String>(); Set<String> hasProcessorTypes = new TreeSet<String>(); while (e.hasMoreElements()) { String keyLine = (String) e.nextElement(); String[] parts = keyLine.split("\\."); String key = parts[0]; if (key.startsWith("menu")) { continue; } if (keyLine.contains("menu.cpu")) { hasProcessorTypes.add(key); // split - remove previous if (distinct.contains(key)) { distinct.remove(key); } // for diecimila.atmega328 try { key = parts[0] + "." + parts[3]; } catch (Exception e2) { log.error("board.txt is weird", e2); } distinct.add(key); } else if (!hasProcessorTypes.contains(key)) { distinct.add(key); } } for (String longKey : distinct) { String[] parts = longKey.split("\\."); String key = parts[0]; String processorType = null; if (parts.length > 1) { processorType = parts[1]; } BoardType boardType = new BoardType(); if (processorType != null) { boardType.setName(boardProps.getProperty(String.format("%s.name", key)) + " - " + processorType); } else { boardType.setName(boardProps.getProperty(String.format("%s.name", key))); } boardType.setBoard(longKey); boardType.setId(longKey.hashCode()); boardTypes.add(boardType); } } catch (Exception e) { log.error("getBoards threw", e); } return boardTypes; } @Override public org.myrobotlab.math.interfaces.Mapper getDefaultMapper() { // best guess :P MapperLinear mapper = new MapperLinear(); mapper.map(-1.0, 1.0, 0.0, 255.0); return mapper; } public Attachable getDevice(Integer deviceId) { DeviceMapping dm = deviceIndex.get(deviceId); if (dm == null) { log.error("no device with deviceId {}", deviceId); return null; } return dm.getDevice(); } Integer getDeviceId(NameProvider device) { return getDeviceId(device.getName()); } Integer getDeviceId(String name) { if (deviceList.containsKey(name)) { Integer id = deviceList.get(name).getId(); if (id == null) { error("cannot get device id for %s - device attempetd to attach - but I suspect something went wrong", name); } return id; } log.error("getDeviceId could not find device {}", name); return null; } private String getDeviceName(int deviceId) { if (getDevice(deviceId) == null) { log.error("getDeviceName({}) is null", deviceId); return null; } return getDevice(deviceId).getName(); } /** * int type to describe the pin defintion to Pin.h 0 digital 1 analog * */ public Integer getMrlPinType(PinDefinition pin) { if (board == null) { error("must have pin board type to determin pin definition"); return null; } if (pin == null) { log.error("pin definition null"); return null; } if (pin.isAnalog()) { return 1; } return 0; } /** * FIXME - have local This creates the pin definitions based on boardType Not * sure how many pin definition sets there are. Currently there are only 2 * supported - Mega-Like 70 pins &amp; Uno-Like 20 pins (14 digital 6 analog) * FIXME - sync with VirtualArduino FIXME - String boardType */ @Override // override for arduino to get pin list public List<PinDefinition> getPinList() { // 2 board types have been identified (perhaps this is based on // processor?) // mega-like & uno like // if no change - just return the values if ((pinIndex != null && board.contains("mega") && pinIndex.size() == 70) || (pinIndex != null && !board.contains("mega") && pinIndex.size() == 20)) { return new ArrayList<PinDefinition>(pinIndex.values()); } // create 2 indexes for fast retrieval // based on "name" or "address" pinMap.clear(); pinIndex.clear(); List<PinDefinition> pinList = new ArrayList<PinDefinition>(); if (board.contains("mega")) { for (int i = 0; i < 70; ++i) { PinDefinition pindef = new PinDefinition(getName(), i); // begin wacky pin def logic String pinName = null; if (i == 0) { pindef.setRx(true); } if (i == 1) { pindef.setTx(true); } if (i < 1 || (i > 13 && i < 54)) { pinName = String.format("D%d", i); pindef.setDigital(true); } else if (i > 53) { pinName = String.format("A%d", i - 54); pindef.setAnalog(true); pindef.setDigital(true); pindef.canWrite(true); } else { pinName = String.format("D%d", i); pindef.setPwm(true); } pindef.setPinName(pinName); pindef.setAddress(i); pinMap.put(pinName, pindef); pinIndex.put(pindef.getAddress(), pindef); pinList.add(pindef); } } else { for (int i = 0; i < 20; ++i) { PinDefinition pindef = new PinDefinition(getName(), i); String pinName = null; if (i == 0) { pindef.setRx(true); } if (i == 1) { pindef.setTx(true); } if (i < 14) { pinName = String.format("D%d", i); pindef.setDigital(true); } else { pindef.setAnalog(true); pindef.canWrite(false); pindef.setDigital(false); pinName = String.format("A%d", i - 14); } if (i == 3 || i == 5 || i == 6 || i == 9 || i == 10 || i == 11) { pindef.setPwm(true); pinName = String.format("D%d", i); } pindef.setPinName(pinName); pindef.setAddress(i); pinMap.put(pinName, pindef); pinIndex.put(pindef.getAddress(), pindef); pinList.add(pindef); } // FIXME - nano pico other ??? if (board.contains("nano")) { /* * int i = 20; pinName = String.format("A%d", i - 14); PinDefinition * pindef = new PinDefinition(getName(), i); pindef.setDigital(false); * pindef.setPwm(false); pindef.setAnalog(true); pindef.canWrite(false); * pinIndex.put(i, pindef); pinMap.put(pinName, pindef); */ } } return pinList; } public String getPortName() { return serial.getPortName(); } @Override public List<String> getPortNames() { if (serial != null) { return serial.getPortNames(); } return new ArrayList<String>(); } @Override public List<String> getPorts() { // we use pins not ports List<String> ret = new ArrayList<String>(); return ret; } /* * Use the serial service for serial activities ! No reason to replicate * methods */ public Serial getSerial() { return serial; } public Sketch getSketch() { return sketch; } /** * Internal Arduino method to create an i2cBus object in MrlComm that is * shared between all i2c devices * * @param control * @param busAddress */ // > i2cBusAttach/deviceId/i2cBus private void i2cBusAttach(I2CBusControl control, int busAddress) { DeviceMapping dm = attachDevice(i2cBus, new Object[] { busAddress }); Integer deviceId = dm.getId(); msg.i2cBusAttach(deviceId, busAddress); } @Override // > i2cRead/deviceId/deviceAddress/size public int i2cRead(I2CControl control, int busAddress, int deviceAddress, byte[] buffer, int size) { i2cDataReturned = false; // Get the device index to the MRL i2c bus String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); log.info("i2cRead requesting {} bytes", size); msg.i2cRead(deviceId, deviceAddress, size); int retry = 0; int retryMax = 1000; // ( About 1000ms = s) try { /** * We will wait up to retryMax times to get the i2c data back from * MrlComm.c and wait 1 ms between each try. A blocking queue is not * needed, as this is only a single data element - and blocking is not * necessary. */ while ((retry < retryMax) && (!i2cDataReturned)) { sleep(1); ++retry; } } catch (Exception e) { Logging.logError(e); } if (i2cDataReturned) { log.debug("i2cReturnData returned {} bytes to caller {}.", i2cDataSize, control.getName()); for (int i = 0; i < i2cDataSize; i++) { buffer[i] = i2cData[i]; log.debug("i2cReturnData returned ix {} value {}", i, buffer[i]); } return i2cDataSize; } // Time out, no data returned return -1; } /** * This methods is called by the i2cBus object when data is returned from the * i2cRead It populates the i2cData area and sets the i2cDataReturned flag to * true so that the loop in i2cRead can return the data to the caller * */ @Override public void i2cReturnData(int[] rawData) { i2cDataSize = rawData.length; for (int i = 0; i < i2cDataSize; i++) { i2cData[i] = (byte) (rawData[i] & 0xff); } log.debug("i2cReturnData invoked. i2cDataSize = {}", i2cDataSize); i2cDataReturned = true; } @Override // > i2cWrite/deviceId/deviceAddress/[] data public void i2cWrite(I2CControl control, int busAddress, int deviceAddress, byte[] buffer, int size) { String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); int data[] = new int[size]; for (int i = 0; i < size; ++i) { data[i] = buffer[i];// guess you want -128 to 127 ?? [ ] == unsigned // char & 0xff; } msg.i2cWrite(deviceId, deviceAddress, data); } @Override // > i2cWriteRead/deviceId/deviceAddress/readSize/writeValue public int i2cWriteRead(I2CControl control, int busAddress, int deviceAddress, byte[] writeBuffer, int writeSize, byte[] readBuffer, int readSize) { if (writeSize != 1) { i2cWrite(control, busAddress, deviceAddress, writeBuffer, writeSize); return i2cRead(control, busAddress, deviceAddress, readBuffer, readSize); } else { i2cDataReturned = false; // Get the device index to the MRL i2c bus String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); int msgBuffer[] = new int[4]; msgBuffer[0] = deviceId; msgBuffer[1] = deviceAddress; msgBuffer[2] = readSize; msgBuffer[3] = writeBuffer[0]; msg.i2cWriteRead(deviceId, deviceAddress, readSize, writeBuffer[0] & 0xFF); int retry = 0; int retryMax = 1000; // ( About 1000ms = s) try { /** * We will wait up to retryMax times to get the i2c data back from * MrlComm.c and wait 1 ms between each try. A blocking queue is not * needed, as this is only a single data element - and blocking is not * necessary. */ while ((retry < retryMax) && (!i2cDataReturned)) { sleep(1); ++retry; } } catch (Exception e) { Logging.logError(e); } if (i2cDataReturned) { log.debug("i2cReturnData returned %s bytes to caller {}.", i2cDataSize, control.getName()); for (int i = 0; i < i2cDataSize; i++) { readBuffer[i] = i2cData[i]; log.debug("i2cReturnData returned ix {} value {}", i, readBuffer[i]); } return i2cDataSize; } // Time out, no data returned return -1; } } private void initSerial() { if (msg == null) { serial = (Serial) startPeer("serial"); msg = new Msg(this, serial); serial.addByteListener(this); } else { log.warn("Init serial called and we already have a msg class!"); } } @Override public boolean isAttached(Attachable device) { return deviceList.containsKey(device.getName()); } @Override public boolean isAttached(String name) { return deviceList.containsKey(name); } @Override public boolean isConnected() { // include that we must have gotten a valid MrlComm version number. if (serial != null && serial.isConnected() && boardInfo != null && boardInfo.getVersion() != null) { return true; } // just to force serial arduino conected if it is a serialX com // usefull to enable pin on the remote arduino // @Deprecated FIXME - this is "bad" if ((controllerAttachAs == MRL_IO_SERIAL_1 || controllerAttachAs == MRL_IO_SERIAL_2 || controllerAttachAs == MRL_IO_SERIAL_3) && boardInfo.getVersion() == MRLCOMM_VERSION) { return true; } return false; } // FIXME put recording in generated message structure !!! @Override public boolean isRecording() { return msg.isRecording(); } // not used currently - should be refactored to use these methods for motor // control @Override public double motorCalcOutput(MotorControl mc) { double value = mc.calcControllerOutput(); return value; } @Override public void motorMove(MotorControl mc) { Class<?> type = mc.getClass(); double powerOutput = motorPowerMapper.calcOutput(mc.getPowerLevel()); // log.info(mc.getPowerLevel()+" "+powerOutput); if (Motor.class == type) { Motor config = (Motor) mc; msg.digitalWrite(getAddress(config.getDirPin()), (powerOutput < 0) ? MOTOR_BACKWARD : MOTOR_FORWARD); msg.analogWrite(getAddress(config.getPwrPin()), (int) Math.abs(powerOutput)); } else if (MotorDualPwm.class == type) { MotorDualPwm config = (MotorDualPwm) mc; if (powerOutput < 0) { msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), (int) Math.abs(powerOutput)); } else if (powerOutput > 0) { msg.analogWrite(getAddress(config.getRightPwmPin()), 0); msg.analogWrite(getAddress(config.getLeftPwmPin()), (int) Math.abs(powerOutput)); } else { msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), 0); } } else { error("motorMove for motor type %s not supported", type); } } // FIXME - clean or remove ... // ========== pulsePin begin ============= // FIXME - MasterBlaster had a pulse motor which could support MoveTo // We need a Motor + encoder (analog or digital) DiyServo does this... @Override public void motorMoveTo(MotorControl mc) { // speed parameter? // modulo - if < 1 // speed = 1 else log.info("motorMoveTo targetPos {} powerLevel {}", mc.getTargetPos(), mc.getPowerLevel()); Class<?> type = mc.getClass(); // if pulser (with or without fake encoder // send a series of pulses ! // with current direction if (Motor.class == type) { Motor motor = (Motor) mc; // check motor direction // send motor direction // TODO powerLevel = 100 * powerlevel // FIXME !!! - this will have to send a Long for targetPos at some // point !!!! double target = Math.abs(motor.getTargetPos()); int b0 = (int) target & 0xff; int b1 = ((int) target >> 8) & 0xff; int b2 = ((int) target >> 16) & 0xff; int b3 = ((int) target >> 24) & 0xff; // TODO FIXME // sendMsg(PULSE, deviceList.get(motor.getName()).id, b3, b2, b1, // b0, (int) motor.getPowerLevel(), feedbackRate); } } @Override public void motorReset(MotorControl motor) { // perhaps this should be in the motor control // motor.reset(); // opportunity to reset variables on the controller // sendMsg(MOTOR_RESET, motor.getind); } @Override public void motorStop(MotorControl mc) { Class<?> type = mc.getClass(); if (Motor.class == type) { Motor config = (Motor) mc; msg.analogWrite(getAddress(config.getPwrPin()), 0); } else if (MotorDualPwm.class == type) { MotorDualPwm config = (MotorDualPwm) mc; msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), 0); } } @Override // > neoPixelAttach/deviceId/pin/b32 numPixels public void neoPixelAttach(NeoPixel neopixel, int pin, int numPixels) { DeviceMapping dm = attachDevice(neopixel, new Object[] { pin, numPixels }); Integer deviceId = dm.getId(); msg.neoPixelAttach(getDeviceId(neopixel)/* byte */, pin/* byte */, numPixels/* b32 */); } @Override // > neoPixelSetAnimation/deviceId/animation/red/green/blue/b16 speed public void neoPixelSetAnimation(NeoPixel neopixel, int animation, int red, int green, int blue, int speed) { msg.neoPixelSetAnimation(getDeviceId(neopixel), animation, red, green, blue, speed); } /** * neoPixelWriteMatrix/deviceId/[] buffer */ @Override public void neoPixelWriteMatrix(NeoPixel neopixel, List<Integer> data) { int[] buffer = new int[data.size()]; for (int i = 0; i < data.size(); ++i) { buffer[i] = data.get(i); } msg.neoPixelWriteMatrix(getDeviceId(neopixel), buffer); } /** * Callback for Serial service - local (not remote) although a * publish/subscribe could be created - this method is called by a thread * waiting on the Serial's RX BlockingQueue * * Other services may use the same technique or subscribe to a Serial's * publishByte method * * it might be worthwhile to look in optimizing reads into arrays vs single * byte processing .. but maybe there would be no gain * */ public synchronized void onBytes(byte[] bytes) { // log.info("On Bytes called in Arduino. {}", bytes); // These bytes arrived from the serial port data, push them down into the msg parser. // if a full message is detected, the publish(Function) method will be directly called on // this arduino instance. msg.onBytes(bytes); } @Override public synchronized void onConnect(String portName) { // Pass this serial port notification down to the msg parser msg.onConnect(portName); log.info("{} onConnect for port {}", getName(), portName); info("%s connected to %s", getName(), portName); // chained... invoke("publishConnect", portName); } public void onCustomMsg(Integer ax, Integer ay, Integer az) { log.info("onCustomMsg"); } @Override public void onDisconnect(String portName) { msg.onDisconnect(portName); info("%s disconnected from %s", getName(), portName); enableBoardInfo(false); // chained... invoke("publishDisconnect", portName); } public void openMrlComm(String path) { try { if (!setArduinoPath(path)) { return; } String mrlCommFiles = null; if (FileIO.isJar()) { mrlCommFiles = Util.getResourceDir() + "/Arduino/MrlComm"; // FIXME - don't do this every time :P Zip.extractFromSelf(Util.getResourceDir() + File.separator + "Arduino" + File.separator + "MrlComm", "resource/Arduino/MrlComm"); } else { // running in IDE ? mrlCommFiles = Util.getResourceDir() + File.separator + "Arduino" + File.separator + "MrlComm"; } File mrlCommDir = new File(mrlCommFiles); if (!mrlCommDir.exists() || !mrlCommDir.isDirectory()) { error("mrlcomm script directory %s is not a valid", mrlCommDir); return; } String exePath = arduinoPath + File.separator + ArduinoUtils.getExeName(); String inoPath = mrlCommDir.getAbsolutePath() + File.separator + "/MrlComm.ino"; List<String> cmd = new ArrayList<String>(); cmd.add(exePath); cmd.add(inoPath); ProcessBuilder builder = new ProcessBuilder(cmd); builder.start(); } catch (Exception e) { error(String.format("%s %s", e.getClass().getSimpleName(), e.getMessage())); log.error("openMrlComm threw", e); } } public String getBase64ZippedMrlComm() { return Base64.getEncoder().encodeToString((getZippedMrlComm())); } public byte[] getZippedMrlComm() { try { // get resource location String filename = getDataDir() + File.separator + "MrlComm.zip"; File f = new File(filename); if (f.exists()) { f.delete(); } // zip resource Zip.zip(new String[] { getResourceDir() + File.separator + "MrlComm" }, filename); // return zip file return FileIO.toByteArray(new File(filename)); } catch (Exception e) { error("could not get zipped mrl comm %s", e); } return null; } @Override /** * // > pinMode/pin/mode */ public void pinMode(int address, String modeStr) { pinMode(address, modeStr.equalsIgnoreCase("INPUT") ? Arduino.INPUT : Arduino.OUTPUT); } public void pinMode(int address, int mode) { msg.pinMode(address, mode); } /** * With Arduino we want to be able to do pinMode("D7", "INPUT"), but it should * not be part of the PinArrayControl interface - because when it comes down * to it .. a pin MUST ALWAYS have an address regardless what you label or * name it... * */ public void pinMode(String pin, String mode) { PinDefinition pinDef = getPin(pin); pinMode(pinDef.getAddress(), mode); } // < publishAck/function public void publishAck(Integer function/* byte */) { if (msg.debug) { log.info("{} Message Ack received: =={}==", getName(), Msg.methodToString(function)); } } // < publishBoardInfo/version/boardType/b16 microsPerLoop/b16 sram/[] // deviceSummary public BoardInfo publishBoardInfo(Integer version/* byte */, Integer boardTypeId/* byte */, Integer microsPerLoop/* b16 */, Integer sram/* b16 */, Integer activePins, int[] deviceSummary/* [] */) { String boardTypeName = getBoardType(boardTypeId); boardInfo = new BoardInfo(version, boardTypeId, boardTypeName, microsPerLoop, sram, activePins, arrayToDeviceSummary(deviceSummary), boardInfoRequestTs); boardInfoRequestTs = System.currentTimeMillis(); log.debug("Version return by Arduino: {}", boardInfo.getVersion()); log.debug("Board type currently set: {} => {}", boardTypeId, boardTypeName); if (lastBoardInfo == null || !lastBoardInfo.getBoardTypeName().equals(board)) { log.warn("setting board to type {}", board); this.board = boardInfo.getBoardTypeName(); // we don't invoke, because // it might get into a race condition // in some gui getPinList(); // invoke("getPinList"); broadcastState(); } // TODO: consider, can we really just re-sync when we see begin only.. ? feels better/safer. // if (boardInfo != null) { // DeviceSummary[] ds = boardInfo.getDeviceSummary(); // if (deviceList.size() - 1 > ds.length) { /* -1 for self */ // log.info("Invoking Sync DeviceList: {} and DeviceSummary: {}", deviceList, ds); // invoke("sync"); // } // } // we send here - because this is a "command" message, and we don't want the // possibility of // block this "status" msgs lastBoardInfo = boardInfo; return boardInfo; } @Override public String publishConnect(String portName) { return portName; } // < publishCustomMsg/[] msg public int[] publishCustomMsg(int[] msg/* [] */) { return msg; } // < publishDebug/str debugMsg public String publishDebug(String debugMsg/* str */) { log.info("publishDebug {}", debugMsg); return debugMsg; } @Override public String publishDisconnect(String portName) { return portName; } /** * publishEcho/b32 sInt/str name1/b8/bu32 bui32/b32 bi32/b9/str name2/[] * * @param myFloat * @param myByte * @param secondFloat */ public void publishEcho(float myFloat, int myByte, float secondFloat) { log.info("myFloat {} {} {} ", myFloat, myByte, secondFloat); } @Override public EncoderData publishEncoderData(EncoderData data) { return data; } // callback for generated method from arduinoMsg.schema public EncoderData publishEncoderData(Integer deviceId, Integer position) { EncoderControl ec = (EncoderControl) getDevice(deviceId); String pin = null; if (ec instanceof Amt203Encoder) { // type = 0; pin = ((Amt203Encoder) ec).getPin(); } else if (ec instanceof As5048AEncoder) { // type = 1; pin = ((As5048AEncoder) ec).getPin(); } else { error("unknown encoder type {}", ec.getClass().getName()); } EncoderData data = new EncoderData(ec.getName(), pin, position); return data; } /* * DeviceControl methods. In this case they represents the I2CBusControl Not * sure if this is good to use the Arduino as an I2CBusControl Exploring * different alternatives. I may have to rethink. Alternate solutions are * welcome. /Mats. */ /** * @param deviceId * - mrl device identifier * @param data * - data to publish from I2c */ // < publishI2cData/deviceId/[] data public void publishI2cData(Integer deviceId, int[] data) { log.info("publishI2cData"); i2cReturnData(data); } /** * error from mrlcom in string form * * @param errorMsg * @return */ // < publishMRLCommError/str errorMsg public String publishMRLCommError(String errorMsg/* str */) { warn("MrlCommError: " + errorMsg); log.error("MRLCommError: {}", errorMsg); return errorMsg; } // < publishPinArray/[] data public PinData[] publishPinArray(int[] data) { log.debug("publishPinArray {}", data); // if subscribers - // look for subscribed pins and publish them int pinDataCnt = data.length / 3; PinData[] pinArray = new PinData[pinDataCnt]; // parse sort reduce ... for (int i = 0; i < pinArray.length; ++i) { int address = data[3 * i]; PinDefinition pinDef = getPin(address); if (pinDef == null) { log.error("not a valid pin address {}", address); continue; } int value = Serial.bytesToInt(data, (3 * i) + 1, 2); PinData pinData = new PinData(pinDef.getPinName(), value); // update def with last value pinDef.setValue(value); pinArray[i] = pinData; // handle individual pins if (pinListeners.containsKey(address)) { Set<PinListener> set = pinListeners.get(address); for (PinListener pinListner : set) { if (pinListner.isLocal()) { pinListner.onPin(pinData); } else { invoke("publishPin", pinData); } } } } // TODO: improve this logic so it doesn't something more effecient. HashMap<String, PinData> pinDataMap = new HashMap<String, PinData>(); for (int i = 0; i < pinArray.length; i++) { if (pinArray[i] != null && pinArray[i].pin != null) { pinDataMap.put(pinArray[i].pin, pinArray[i]); } } for (String name : pinArrayListeners.keySet()) { // put the pin data into a map for quick lookup PinArrayListener pal = pinArrayListeners.get(name); if (pal.getActivePins() != null && pal.getActivePins().length > 0) { int numActive = pal.getActivePins().length; PinData[] subArray = new PinData[numActive]; for (int i = 0; i < numActive; i++) { String key = pal.getActivePins()[i]; if (pinDataMap.containsKey(key)) { subArray[i] = pinDataMap.get(key); } else { subArray[i] = null; } } // only the values that the listener is asking for. pal.onPinArray(subArray); } else { // the full array pal.onPinArray(pinArray); } } return pinArray; } public List<String> publishPortNames(List<String> portNames) { return portNames; } /** * FIXME - I bet this doesnt work - test it * * @param deviceId * @param data * @return */ public SerialRelayData publishSerialData(Integer deviceId, int[] data) { SerialRelayData serialData = new SerialRelayData(deviceId, data); return serialData; } @Deprecated /** * Controllers should publish EncoderData - Servos can change that * into ServoData and publish REMOVED BY GROG - use TimeEncoder ! */ public Integer publishServoEvent(Integer deviceId, Integer eventType, Integer currentPos, Integer targetPos) { if (getDevice(deviceId) != null) { // REMOVED BY GROG - use time encoder !((ServoControl) // getDevice(deviceId)).publishServoData(ServoStatus.SERVO_POSITION_UPDATE, // (double) currentPos); } else { error("no servo found at device id %d", deviceId); } return currentPos; } // FIXME should be in Control interface - for callback // < publishUltrasonicSensorData/deviceId/b16 echoTime public Integer publishUltrasonicSensorData(Integer deviceId, Integer echoTime) { // log.info("echoTime {}", echoTime); ((UltrasonicSensor) getDevice(deviceId)).onUltrasonicSensorData(echoTime.doubleValue()); return echoTime; } // FIXME put recording into generated Msg @Override public void record() throws Exception { msg.record(); } @Override public void releaseService() { super.releaseService(); if (virtual != null) { virtual.releaseService(); } sleep(300); disconnect(); } /** * resets both MrlComm-land &amp; Java-land */ public void reset() { log.info("reset - resetting all devices"); // reset MrlComm-land softReset(); for (String name : deviceList.keySet()) { DeviceMapping dmap = deviceList.get(name); Attachable device = dmap.getDevice(); log.info("unsetting device {}", name); try { device.detach(name); } catch (Exception e) { log.error("detaching threw", e); } } // reset Java-land deviceIndex.clear(); deviceList.clear(); } /** * Requesting board information from the board */ public void sendBoardInfoRequest() { boardInfoRequestTs = System.currentTimeMillis(); msg.getBoardInfo(); } public void serialAttach(SerialRelay serialRelay, int controllerAttachAs) { DeviceMapping dm = attachDevice(serialRelay, new Object[] { controllerAttachAs }); Integer deviceId = dm.getId(); msg.serialAttach(deviceId, controllerAttachAs); } // > servoDetachPin/deviceId public void onServoDisable(ServoControl servo) { msg.servoDetachPin(getDeviceId(servo)); } @Override public void onServoEnable(ServoControl servo) { Integer deviceId = getDeviceId(servo); if (deviceId == null) { log.warn("servoEnable servo {} does not have a corresponding device currently - did you attach?", servo.getName()); } if (isConnected()) { msg.servoAttachPin(deviceId, getAddress(servo.getPin())); } else { log.info("not currently connected"); } } /** * servo.write(angle) https://www.arduino.cc/en/Reference/ServoWrite The msg * to mrl will always contain microseconds - but this method will (like the * Arduino Servo.write) accept both degrees or microseconds. The code is * ported from Arduino's Servo.cpp */ @Override // > servoWrite/deviceId/target public void onServoMoveTo(ServoControl servo) { Integer deviceId = getDeviceId(servo); if (deviceId == null) { log.warn("servoMoveTo servo {} does not have a corresponding device currently - did you attach?", servo.getName()); return; } // getTargetOutput ALWAYS ALWAYS Degrees ! // so we convert to microseconds int us = degreeToMicroseconds(servo.getTargetOutput()); log.debug("servoMoveToMicroseconds servo {} id {} {}->{} us", servo.getName(), deviceId, servo.getPos(), us); msg.servoMoveToMicroseconds(deviceId, us); } @Override // > servoSetVelocity/deviceId/b16 velocity public void onServoSetSpeed(ServoControl servo) { int speed = -1; if (servo.getSpeed() != null) { speed = servo.getSpeed().intValue(); } log.info("servoSetVelocity {} id {} velocity {}", servo.getName(), getDeviceId(servo), speed); Integer i = getDeviceId(servo); if (i == null) { log.error("{} has null deviceId", servo); return; } msg.servoSetVelocity(i, speed); } /** * On standard servos a parameter value of 1000 is fully counter-clockwise, * 2000 is fully clockwise, and 1500 is in the middle. */ @Override // > servoWriteMicroseconds/deviceId/b16 ms public void onServoWriteMicroseconds(ServoControl servo, int uS) { int deviceId = getDeviceId(servo); log.debug("writeMicroseconds {} {} id {}", servo.getName(), uS, deviceId); msg.servoMoveToMicroseconds(deviceId, uS); } public boolean setArduinoPath(String path) { path = path.replace("\\", "/"); path = path.trim(); if (!path.endsWith("/")) { path += "/"; } File dir = new File(path); if (!dir.exists() || !dir.isDirectory()) { error(String.format("%s is not a valid directory", path)); return false; } arduinoPath = path; ArduinoUtils.arduinoPath = arduinoPath; // THIS IS SILLY AND NOT // NORMALIZED ! save(); return true; } public void setAref(String aref) { aref = aref.toUpperCase(); if (this.getBoard().contains("mega")) { if (aref == "INTERNAL") { error("Aref " + aref + " is not compatible with your board " + this.getBoard()); aref = "DEFAULT"; } } else { if (aref == "INTERNAL1V1" || aref == "INTERNAL2V56") { error("Aref INTERNALxV is not compatible with your board " + this.getBoard()); aref = "DEFAULT"; } } int arefInt = 1; switch (aref) { case "EXTERNAL": arefInt = 0; break; case "DEFAULT": arefInt = 1; break; case "INTERNAL1V1": arefInt = 2; break; case "INTERNAL": arefInt = 3; break; case "INTERNAL2V56": arefInt = 3; break; default: log.error("Aref " + aref + " is unknown"); } log.info("set aref to " + aref); this.aref = aref; msg.setAref(arefInt); } public void setBoardMega() { setBoard(BOARD_TYPE_MEGA); } public void setBoardMegaADK() { setBoard(BOARD_TYPE_MEGA_ADK); } public void setBoardNano() { setBoard(BOARD_TYPE_NANO); } public void setBoardUno() { setBoard(BOARD_TYPE_UNO); } /* * Debounce ensures that only a single signal will be acted upon for a single * opening or closing of a contact. the delay is the min number of pc cycles * must occur before a reading is taken * * Affects all reading of pins setting to 0 sets it off * * TODO - implement on MrlComm side ... * */ // > setDebounce/pin/delay public void setDebounce(int pin, int delay) { msg.setDebounce(pin, delay); } // > setDebug/bool enabled public void setDebug(boolean b) { msg.setDebug(b); } /* * dynamically change the serial rate TODO - shouldn't this change Arduino * service serial rate too to match? * */ // > setSerialRate/b32 rate public void setSerialRate(int rate) { msg.setSerialRate(rate); } public void setSketch(Sketch sketch) { this.sketch = sketch; broadcastState(); } /* * set a pin trigger where a value will be sampled and an event will be signal * when the pin turns into a different state. * * TODO - implement on MrlComm side... */ // > setTrigger/pin/triggerValue public void setTrigger(int pin, int value) { msg.setTrigger(pin, value); } @Override public void setZeroPoint(EncoderControl encoder) { // send the set zero point command to the encoder msg.setZeroPoint(getDeviceId(encoder.getName())); } /** * send a reset to MrlComm - all devices removed, all polling is stopped and * all other counters are reset */ // > softReset public void softReset() { msg.softReset(); } @Override public void startService() { super.startService(); try { initSerial(); } catch (Exception e) { log.error("Arduino.startService threw", e); } } @Override public void stopRecording() { msg.stopRecording(); } @Override public void stopService() { super.stopService(); detachI2CControls(); disconnect(); } @Override // > ultrasonicSensorStartRanging/deviceId/b32 timeout public void ultrasonicSensorStartRanging(UltrasonicSensorControl sensor) { msg.ultrasonicSensorStartRanging(getDeviceId(sensor)); } @Override // > ultrasonicSensorStopRanging/deviceId public void ultrasonicSensorStopRanging(UltrasonicSensorControl sensor) { msg.ultrasonicSensorStopRanging(getDeviceId(sensor)); } public void uploadSketch(String arduinoPath) throws IOException { uploadSketch(arduinoPath, serial.getLastPortName()); } public void uploadSketch(String arudinoPath, String comPort) throws IOException { uploadSketch(arudinoPath, comPort, getBoard()); } static public String getBoardType(int boardId) { String boardName; switch (boardId) { case BOARD_TYPE_ID_MEGA: boardName = BOARD_TYPE_MEGA; break; case BOARD_TYPE_ID_UNO: boardName = BOARD_TYPE_UNO; break; case BOARD_TYPE_ID_ADK_MEGA: boardName = BOARD_TYPE_MEGA_ADK; break; case BOARD_TYPE_ID_NANO: boardName = BOARD_TYPE_NANO; break; case BOARD_TYPE_ID_PRO_MINI: boardName = BOARD_TYPE_PRO_MINI; break; default: // boardName = "unknown"; boardName = BOARD_TYPE_UNO; break; } return boardName; } static public int getBoardTypeId(String boardName) { Integer boardId = null; switch (boardName) { case BOARD_TYPE_MEGA: boardId = BOARD_TYPE_ID_MEGA; break; case BOARD_TYPE_UNO: boardId = BOARD_TYPE_ID_UNO; break; case BOARD_TYPE_MEGA_ADK: boardId = BOARD_TYPE_ID_ADK_MEGA; break; case BOARD_TYPE_NANO: boardId = BOARD_TYPE_ID_NANO; break; case BOARD_TYPE_PRO_MINI: boardId = BOARD_TYPE_ID_PRO_MINI; break; default: // boardName = "unknown"; boardId = BOARD_TYPE_ID_UNO; break; } return boardId; } public void uploadSketch(String arduinoIdePath, String port, String type) throws IOException { log.info("uploadSketch ({}, {}, {})", arduinoIdePath, port, type); if (!setArduinoPath(arduinoIdePath)) { return; } // hail mary - if we have no idea // guess uno if (type == null || type.equals("")) { type = BOARD_TYPE_UNO; } log.info("arduino IDE Path={}", arduinoIdePath); log.info("Port={}", port); log.info("type={}", type); /* * not needed if (arduinoIdePath != null && * !arduinoIdePath.equals(ArduinoUtils.arduinoPath)) { this.arduinoPath = * arduinoIdePath; ArduinoUtils.arduinoPath = arduinoIdePath; save(); } */ uploadSketchResult = String.format("Uploaded %s ", new Date()); boolean connectedState = isConnected(); try { if (connectedState) { log.info("disconnecting..."); disconnect(); } ArduinoUtils.uploadSketch(port, type.toLowerCase()); } catch (Exception e) { log.info("ArduinoUtils threw trying to upload", e); } if (connectedState) { log.info("reconnecting..."); serial.connect(); } // perhaps you can reduce the inter-process information // to succeed | fail .. perhaps you can't // I would prefer transparency - send all output to the ui uploadSketchResult += ArduinoUtils.getOutput(); log.info(uploadSketchResult); broadcastState(); } /** * this is what Arduino firmware 'should' have done - a simplified * write(address, value) which follows the convention of 'all' device * operations at the lowest level * http://codewiki.wikidot.com/c:system-calls:write PinArrayControl method */ @Override public void write(int address, int value) { info("write (%d,%d) to %s", address, value, serial.getName()); PinDefinition pinDef = getPin(address); pinMode(address, "OUTPUT"); if (pinDef.isPwm() && value > 1) { // CHEESEY HACK !! analogWrite(address, value); } else { digitalWrite(address, value); } // cache value pinDef.setValue(value); } public Map<String, DeviceMapping> getDeviceList() { return deviceList; } public void ackTimeout() { log.warn("Ack Timeout seen. TODO: consider resetting the com port, reconnecting and re syncing all devices."); } public void publishMrlCommBegin(Integer version) { // If we were already connected up and clear to send.. this is a problem.. it means the board was reset on it. if (mrlCommBegin > 0) { error("arduino %s has reset - does it have a separate power supply?", getName()); // At this point we need to reset! mrlCommBegin = 0; } ++mrlCommBegin; // log.info("Skipping Sync! TODO: uncomment me."); // This needs to be non-blocking // If we have devices, we need to sync them. // The device list always has "Arduino" in it for some reason.. if (deviceList.size() > 1) { log.info("Need to sync devices to mrlcomm. Num Devices: {} Devices: {}", deviceList.size(), deviceList); invoke("sync"); } else { log.info("no devices to sync, clear to resume."); } } /** * DO NOT FORGET INSTALL AND VMARGS !!! * * -Djava.library.path=libraries/native -Djna.library.path=libraries/native * -Dfile.encoding=UTF-8 * * @param args */ public static void main(String[] args) { try { // Platform.setVirtual(true); Runtime.main(new String[] { "--interactive", "--id", "id"}); LoggingFactory.init(Level.INFO); // Platform.setVirtual(true); /* WebGui webgui = (WebGui) Runtime.create("webgui", "WebGui"); webgui.autoStartBrowser(false); webgui.setPort(8887); webgui.startService(); */ // Runtime.start("gui", "SwingGui"); Serial.listPorts(); Arduino hub = (Arduino) Runtime.start("hub", "Arduino"); hub.connect("/dev/ttyACM0"); // hub.enableAck(false); ServoControl sc = (ServoControl) Runtime.start("s1", "Servo"); sc.setPin(3); hub.attach(sc); sc = (ServoControl) Runtime.start("s2", "Servo"); sc.setPin(9); hub.attach(sc); // hub.enableAck(true); /* * sc = (ServoControl) Runtime.start("s3", "Servo"); sc.setPin(12); * hub.attach(sc); */ log.info("here"); // hub.connect("COM6"); // uno // hub.startTcpServer(); boolean isDone = true; if (isDone) { return; } VirtualArduino vmega = null; vmega = (VirtualArduino) Runtime.start("vmega", "VirtualArduino"); vmega.connect("COM7"); Serial sd = (Serial) vmega.getSerial(); sd.startTcpServer(); // Runtime.start("webgui", "WebGui"); Arduino mega = (Arduino) Runtime.start("mega", "Arduino"); if (mega.isVirtual()) { vmega = mega.getVirtual(); vmega.setBoardMega(); } // mega.getBoardTypes(); // mega.setBoardMega(); // mega.setBoardUno(); mega.connect("COM7"); /* * Arduino uno = (Arduino) Runtime.start("uno", "Arduino"); * uno.connect("COM6"); */ // log.info("port names {}", mega.getPortNames()); Servo servo = (Servo) Runtime.start("servo", "Servo"); // servo.load(); log.info("rest is {}", servo.getRest()); servo.save(); // servo.setPin(8); servo.attach(mega, 13); servo.moveTo(90.0); /* * servo.moveTo(3); sleep(300); servo.moveTo(130); sleep(300); * servo.moveTo(90); sleep(300); * * * // minmax checking * * servo.invoke("moveTo", 120); */ /* * mega.attach(servo); * * servo.moveTo(3); * * servo.moveTo(30); * * mega.enablePin("A4"); * * // arduino.setBoardMega(); * * Adafruit16CServoDriver adafruit = (Adafruit16CServoDriver) * Runtime.start("adafruit", "Adafruit16CServoDriver"); * adafruit.attach(mega); mega.attach(adafruit); */ // servo.attach(arduino, 8, 90); // Runtime.start("webgui", "WebGui"); // Service.sleep(3000); // remote.startListening(); // Runtime.start("webgui", "WebGui"); } catch (Exception e) { log.error("main threw", e); } } /** * stops the servo sweeping or moving with speed control */ @Override public void onServoStop(ServoControl servo) { msg.servoStop(getDeviceId(servo)); } }
src/main/java/org/myrobotlab/service/Arduino.java
package org.myrobotlab.service; import static org.myrobotlab.arduino.Msg.MAX_MSG_SIZE; import static org.myrobotlab.arduino.Msg.MRLCOMM_VERSION; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import org.myrobotlab.arduino.ArduinoUtils; import org.myrobotlab.arduino.BoardInfo; import org.myrobotlab.arduino.BoardType; import org.myrobotlab.arduino.DeviceSummary; import org.myrobotlab.arduino.Msg; import org.myrobotlab.framework.ServiceType; import org.myrobotlab.framework.interfaces.Attachable; import org.myrobotlab.framework.interfaces.NameProvider; import org.myrobotlab.i2c.I2CBus; import org.myrobotlab.image.Util; import org.myrobotlab.io.FileIO; import org.myrobotlab.io.Zip; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.Logging; import org.myrobotlab.logging.LoggingFactory; import org.myrobotlab.math.interfaces.Mapper; import org.myrobotlab.math.MapperLinear; import org.myrobotlab.sensor.EncoderData; import org.myrobotlab.service.abstracts.AbstractMicrocontroller; import org.myrobotlab.service.data.DeviceMapping; import org.myrobotlab.service.data.PinData; import org.myrobotlab.service.data.SerialRelayData; import org.myrobotlab.service.interfaces.EncoderControl; import org.myrobotlab.service.interfaces.EncoderController; import org.myrobotlab.service.interfaces.I2CBusControl; import org.myrobotlab.service.interfaces.I2CBusController; import org.myrobotlab.service.interfaces.I2CControl; import org.myrobotlab.service.interfaces.I2CController; import org.myrobotlab.service.interfaces.MotorControl; import org.myrobotlab.service.interfaces.MotorController; import org.myrobotlab.service.interfaces.MrlCommPublisher; import org.myrobotlab.service.interfaces.NeoPixelController; import org.myrobotlab.service.interfaces.PinArrayListener; import org.myrobotlab.service.interfaces.PinArrayPublisher; import org.myrobotlab.service.interfaces.PinDefinition; import org.myrobotlab.service.interfaces.PinListener; import org.myrobotlab.service.interfaces.PortConnector; import org.myrobotlab.service.interfaces.PortListener; import org.myrobotlab.service.interfaces.PortPublisher; import org.myrobotlab.service.interfaces.RecordControl; import org.myrobotlab.service.interfaces.SerialDataListener; import org.myrobotlab.service.interfaces.ServoControl; import org.myrobotlab.service.interfaces.ServoController; import org.myrobotlab.service.interfaces.UltrasonicSensorControl; import org.myrobotlab.service.interfaces.UltrasonicSensorController; import org.slf4j.Logger; public class Arduino extends AbstractMicrocontroller implements I2CBusController, I2CController, SerialDataListener, ServoController, MotorController, NeoPixelController, UltrasonicSensorController, PortConnector, RecordControl, /* SerialRelayListener, */PortListener, PortPublisher, EncoderController, PinArrayPublisher, MrlCommPublisher { transient public final static Logger log = LoggerFactory.getLogger(Arduino.class); public static class I2CDeviceMap { public String busAddress; public transient I2CControl control; public String deviceAddress; } public static class Sketch implements Serializable { private static final long serialVersionUID = 1L; public String data; public String name; public Sketch(String name, String data) { this.name = name; this.data = data; } } public static final int ANALOG = 1; public transient static final int BOARD_TYPE_ID_ADK_MEGA = 3; public transient static final int BOARD_TYPE_ID_MEGA = 1; public transient static final int BOARD_TYPE_ID_NANO = 4; public transient static final int BOARD_TYPE_ID_PRO_MINI = 5; public transient static final int BOARD_TYPE_ID_UNKNOWN = 0; public transient static final int BOARD_TYPE_ID_UNO = 2; public transient static final String BOARD_TYPE_MEGA = "mega.atmega2560"; public transient static final String BOARD_TYPE_MEGA_ADK = "megaADK"; public transient static final String BOARD_TYPE_NANO = "nano"; public transient static final String BOARD_TYPE_PRO_MINI = "pro mini"; public transient static final String BOARD_TYPE_UNO = "uno"; public static final int DIGITAL = 0; public static final int INPUT = 0x0; public static final int MOTOR_BACKWARD = 0; public static final int MOTOR_FORWARD = 1; public static final int MOTOR_TYPE_DUAL_PWM = 2; public static final int MOTOR_TYPE_SIMPLE = 1; public static final int MRL_IO_NOT_DEFINED = 0; public static final int MRL_IO_SERIAL_0 = 1; public static final int MRL_IO_SERIAL_1 = 2; public static final int MRL_IO_SERIAL_2 = 3; public static final int MRL_IO_SERIAL_3 = 4; public static final int OUTPUT = 0x1; private static final long serialVersionUID = 1L; /** * This static method returns all the details of the class without it having * to be constructed. It has description, categories, dependencies, and peer * definitions. * * @return ServiceType - returns all the data * */ static public ServiceType getMetaData() { ServiceType meta = new ServiceType(Arduino.class.getCanonicalName()); meta.addDescription("controls an Arduino microcontroller as a slave, which allows control of all the devices the Arduino is attached to, such as servos, motors and sensors"); meta.addCategory("microcontroller"); meta.addPeer("serial", "Serial", "serial device for this Arduino"); return meta; } /** * path of the Arduino IDE must be set by user should not be static - since * gson will not serialize it, and it won't be 'saved()' */ public String arduinoPath; String aref; @Deprecated /* * should be just another attachable - this is a bad * implementation */ transient Map<Integer, Arduino> attachedController = new ConcurrentHashMap<Integer, Arduino>(); /** * board info "from" MrlComm - which can be different from what the user say's * it is - if there is a difference the "user" should be notified - but not * forced to use the mrlBoardInfo. */ volatile BoardInfo boardInfo = null; volatile BoardInfo lastBoardInfo = null; boolean boardInfoEnabled = true; private long boardInfoRequestTs; @Deprecated /* * should develop a MrlSerial on Arduinos and * Arduino.getSerial("s1") */ public transient int controllerAttachAs = MRL_IO_NOT_DEFINED; /** * id reference of sensor, key is the MrlComm device id */ transient Map<Integer, DeviceMapping> deviceIndex = new ConcurrentHashMap<Integer, DeviceMapping>(); /** * Devices - string name index of device we need 2 indexes for sensors because * they will be referenced by name OR by index */ transient Map<String, DeviceMapping> deviceList = new ConcurrentHashMap<String, DeviceMapping>(); I2CBus i2cBus = null; volatile byte[] i2cData = new byte[64]; /** * i2c This needs to be volatile because it will be updated in a different * threads */ volatile boolean i2cDataReturned = false; volatile int i2cDataSize; Map<String, I2CDeviceMap> i2cDevices = new ConcurrentHashMap<String, I2CDeviceMap>(); transient int[] ioCmd = new int[MAX_MSG_SIZE]; @Deprecated /* * use attachables like everything else - power mapping should be * inside the motorcontrol */ transient Mapper motorPowerMapper = new MapperLinear(-1.0, 1.0, -255.0, 255.0); public transient Msg msg; Integer nextDeviceId = 0; /** * Serial service - the Arduino's serial connection */ transient Serial serial; /** * MrlComm sketch */ public Sketch sketch; public String uploadSketchResult = ""; transient private VirtualArduino virtual; int mrlCommBegin = 0; long syncStartTypeTs = System.currentTimeMillis(); public Arduino(String n, String id) { super(n, id); // config - if saved is loaded - if not default to uno if (board == null) { board = "uno"; } // board is set // now we can create a pin list getPinList(); // get list of board types getBoardTypes(); // FIXME - load from unzipped resource directory ? - no more jar access like // below String mrlcomm = FileIO.resourceToString("Arduino/MrlComm/MrlComm.ino"); setSketch(new Sketch("MrlComm", mrlcomm)); // add self as an attached device // to handle pin events attachDevice(this, (Object[]) null); } // > analogWrite/address/value public void analogWrite(int address, int value) { log.info("analogWrite({},{})", address, value); msg.analogWrite(address, value); } public void analogWrite(String pin, Integer value) { PinDefinition pinDef = getPin(pin); analogWrite(pinDef.getAddress(), value); } DeviceSummary[] arrayToDeviceSummary(int[] deviceSummary) { log.debug("mds - {}", Arrays.toString(deviceSummary)); DeviceSummary[] ds = new DeviceSummary[deviceSummary.length]; for (int i = 0; i < deviceSummary.length; i++) { int id = deviceSummary[i]; DeviceSummary ds0 = new DeviceSummary(getDeviceName(id), id); ds[i] = ds0; } // log.error("ds - {}", Arrays.toString(ds)); return ds; } /** * Routing Attach - routes ServiceInterface.attach(service) to appropriate * methods for this class */ @Override public void attach(Attachable service) throws Exception { if (ServoControl.class.isAssignableFrom(service.getClass())) { attachServoControl((ServoControl) service); ((ServoControl) service).attach(this); return; } else if (MotorControl.class.isAssignableFrom(service.getClass())) { attachMotorControl((MotorControl) service); return; } else if (EncoderControl.class.isAssignableFrom(service.getClass())) { // need to determine the encoder type! attach((EncoderControl) service); return; } error("%s doesn't know how to attach a %s", getClass().getSimpleName(), service.getClass().getSimpleName()); } @Override public void attach(ServoControl servo, int pin) throws Exception { servo.setPin(pin); attachServoControl(servo); } /** * String interface - this allows you to easily use url api requests like * /attach/nameOfListener/3 */ public void attach(String listener, int address) { attach((PinListener) Runtime.getService(listener), address); } @Override public void attach(UltrasonicSensorControl sensor, Integer triggerPin, Integer echoPin) throws Exception { // refer to // http://myrobotlab.org/content/control-controller-manifesto if (isAttached(sensor)) { log.info("{} already attached", sensor.getName()); return; } // critical init code DeviceMapping dm = attachDevice(sensor, new Object[] { triggerPin, echoPin }); Integer deviceId = dm.getId(); msg.ultrasonicSensorAttach(deviceId, triggerPin, echoPin); // call the other service's attach sensor.attach(this, triggerPin, echoPin); } synchronized private DeviceMapping attachDevice(Attachable device, Object[] attachConfig) { DeviceMapping map = new DeviceMapping(device, attachConfig); map.setId(nextDeviceId); log.info("DEVICE LIST PUT ------ Name: {} Class: {} Map: {}", device.getName(), device.getClass().getSimpleName(), map); deviceList.put(device.getName(), map); deviceIndex.put(nextDeviceId, map); ++nextDeviceId; // return map.getId(); return map; } /** * Attach an encoder to the arduino * * @param encoder * - the encoder control to attach * @throws Exception */ @Override public void attach(EncoderControl encoder) throws Exception { Integer deviceId = null; // send data to micro-controller // TODO: update this with some enum of various encoder types.. // for now it's just AMT203 ... int type = 0; Integer address = null; if (encoder instanceof Amt203Encoder) { type = 0; address = getAddress(((Amt203Encoder) encoder).getPin()); } else if (encoder instanceof As5048AEncoder) { type = 1; address = getAddress(((As5048AEncoder) encoder).getPin()); } else { error("unknown encoder type {}", encoder.getClass().getName()); } attachDevice(encoder, new Object[] { address }); // FIXME - don't know why // this is necessary - // Attachable is only // needed msg.encoderAttach(deviceId, type, address); encoder.attach(this); } @Override public void attachI2CControl(I2CControl control) { // Create the i2c bus device in MrlComm the first time this method is // invoked. // Add the i2c device to the list of i2cDevices // Pattern: deviceAttach(device, Object... config) // To add the i2c bus to the deviceList I need an device that represents // the i2c bus here and in MrlComm // This will only handle the creation of i2cBus. if (i2cBus == null) { i2cBus = new I2CBus(String.format("I2CBus%s", control.getDeviceBus())); i2cBusAttach(i2cBus, Integer.parseInt(control.getDeviceBus())); } // This part adds the service to the mapping between // busAddress||DeviceAddress // and the service name to be able to send data back to the invoker String key = String.format("%s.%s", control.getDeviceBus(), control.getDeviceAddress()); I2CDeviceMap devicedata = new I2CDeviceMap(); if (i2cDevices.containsKey(key)) { log.error("Device {} {} {} already exists.", control.getDeviceBus(), control.getDeviceAddress(), control.getName()); } else { devicedata.busAddress = control.getDeviceBus(); devicedata.deviceAddress = control.getDeviceAddress(); devicedata.control = control; i2cDevices.put(key, devicedata); control.attachI2CController(this); } } // @Override public void attachMotorControl(MotorControl motor) throws Exception { if (isAttached(motor)) { log.info("motor {} already attached", motor.getName()); return; } Integer motorType = null; int[] pins = null; if (motor.getClass().equals(Motor.class)) { motorType = MOTOR_TYPE_SIMPLE; Motor m = (Motor) motor; pins = new int[] { getAddress(m.getPwrPin()), getAddress(m.getDirPin()) }; } else if (motor.getClass().equals(MotorDualPwm.class)) { motorType = MOTOR_TYPE_DUAL_PWM; MotorDualPwm m = (MotorDualPwm) motor; pins = new int[] { getAddress(m.getLeftPwmPin()), getAddress(m.getRightPwmPin()) }; // } else if (motor.getClass().equals(MotorStepper)){ // FIXME implement } else { throw new IOException(String.format("do not know how to attach Motor type %s", motor.getClass().getSimpleName())); } // this saves original "attach" configuration - and maintains internal // data // structures // and does DeviceControl.attach(this) DeviceMapping dm = attachDevice(motor, new Object[] { motorType, pins }); Integer deviceId = dm.getId(); // send data to micro-controller - convert degrees to microseconds // int uS = degreeToMicroseconds(targetOutput); msg.motorAttach(deviceId, motorType, pins); // the callback - motor better have a check // isAttached(MotorControl) to prevent infinite loop // motor.attach(this, pin, targetOutput, velocity); motor.attachMotorController(this); } @Override public void attachServoControl(ServoControl servo) { if (isAttached(servo)) { log.info("servo {} already attached", servo.getName()); return; } int pin = getAddress(servo.getPin()); // targetOutput is ALWAYS ALWAYS degrees double targetOutput = servo.getTargetOutput(); double speed = (servo.getSpeed() == null) ? -1 : servo.getSpeed(); // add a device to our deviceList DeviceMapping dm = attachDevice(servo, new Object[] { pin, targetOutput, speed }); if (isConnected()) { int uS = degreeToMicroseconds(servo.getTargetOutput()); msg.servoAttach(dm.getId(), pin, uS, (int) speed, servo.getName()); if (servo.isEnabled()) { msg.servoAttachPin(dm.getId(), pin); } } servo.attach(this); } /** * * @param dm */ public void reattach(DeviceMapping dm) { Attachable attachable = dm.getDevice(); if (attachable instanceof Servo) { Servo servo = (Servo) attachable; int uS = degreeToMicroseconds(servo.getTargetOutput()); double speed = (servo.getSpeed() == null) ? -1 : servo.getSpeed(); int pin = getAddress(servo.getPin()); log.info("================ re-attaching {} {} {} ================", servo.getName(), dm.getId(), pin); msg.servoAttach(dm.getId(), pin, uS, (int) speed, servo.getName()); // if (servo.isEnabled()) { msg.servoAttachPin(dm.getId(), pin); } } } public void connect(String port) { connect(port, Serial.BAUD_115200, 8, 1, 0); } @Override public void connect(String port, int rate) throws Exception { connect(port, rate, 8, 1, 0); } public VirtualArduino getVirtual() { return virtual; } /** * default params to connect to Arduino &amp; MrlComm.ino FIXME - remove the * parameters except rate as they are not allowed to change with MRLComm */ @Override public void connect(String port, int rate, int databits, int stopbits, int parity) { // test to see if we've been started. the serial might be null try { initSerial(); if (isConnected() && port.equals(serial.getPortName())) { log.info("already connected to port {}", port); return; } if (isVirtual()) { if (virtual == null) { virtual = (VirtualArduino) Runtime.start("v" + getName(), "VirtualArduino"); } virtual.connect(port); } serial.connect(port, rate, databits, stopbits, parity); // most likely on a real board this send will never get to // mrlcomm - because the board is not ready - but it doesnt hurt // and in fact it helps VirtualArduino - since we currently do not // have a DTR CDR line in the virtual port as use this as a signal // of // connection // by default ack'ing is now on.. // but with this first msg there is no msg before it, // and there is a high probability that the board is not really // ready // and this msg along with the ack will be ignored // so we turn off ack'ing locally // TODO - can we re-enable acks ? msg.enableAcks(true); long startBoardRequestTs = System.currentTimeMillis(); // start the heartbeat enableBoardInfo(boardInfoEnabled); log.info("waiting for boardInfo .........."); // long waitTime = System.currentTimeMillis(); // while ts < startedRequest && < 4.5 sec wait 30 try again // if timeout report error while ((boardInfo == null || boardInfo.getReceiveTs() < startBoardRequestTs) && System.currentTimeMillis() - startBoardRequestTs < 4500) { sleep(30); } log.info("waited {} ms for Arduino {} to say hello", System.currentTimeMillis() - startBoardRequestTs, getName()); // we might be connected now // see what our version is like... if (boardInfo != null) { Integer version = boardInfo.getVersion(); if (version == null) { error("%s did not get response from arduino....", serial.getPortName()); } else if (!version.equals(MRLCOMM_VERSION)) { error("MrlComm.ino responded with version %s expected version is %s", version, MRLCOMM_VERSION); } else { info("%s connected on %s responded version %s ... goodtimes...", serial.getName(), serial.getPortName(), version); } } else { log.error("board info is null ! - has MrlComm.ino been loaded ?"); } } catch (Exception e) { log.error("serial open threw", e); error(e.getMessage()); } broadcastState(); } /** * sync our device list with mrlcomm */ public void sync() { long now = System.currentTimeMillis(); if (now - syncStartTypeTs < 5000) { log.error("===== we are in the middle of synching ... ==== talk to us in {} ms", 5000 - (now - syncStartTypeTs)); return; } syncStartTypeTs = System.currentTimeMillis(); log.warn("================================ sync !!! =============================="); try { for (DeviceMapping device : deviceList.values()) { reattach(device); } List<PinDefinition> list = getPinList(); for (PinDefinition pindef : list) { if (pindef.isEnabled()) { enablePin(pindef.getPinName()); } } } catch (Exception e) { log.error("sync threw", e); } } // > customMsg/[] msg public void customMsg(int... params) { msg.customMsg(params); } // @Override // > deviceDetach/deviceId public void detach(Attachable device) { log.info("{} detaching {}", getName(), device.getName()); // if this service doesn't think its attached, we are done if (!isAttached(device)) { log.info("device {} not attached", device.getName()); return; } // Servo requirements if (device instanceof ServoControl && device.isAttached(this)) { // if the other service thinks its attached - give it a chance to detach // this is important for Servo - because servo will want to disable() // before // detaching - and it needs the controller to do so... device.detach(this); } log.info("detaching device {}", device.getName()); Integer id = getDeviceId(device); if (id != null) { msg.deviceDetach(id); deviceIndex.remove(id); } deviceList.remove(device.getName()); } @Override public void detach(String controllerName) { detach(Runtime.getService(controllerName)); } @Override public void detachI2CControl(I2CControl control) { // This method should delete the i2c device entry from the list of // I2CDevices // The order of the detach is important because the higher level service may // want to execute something that // needs this service to still be availabe if (i2cDevices.containsKey(control.getName())) { i2cDevices.remove(control.getName()); control.detachI2CController(this); } } public void detachI2CControls() { for (Map.Entry<String, I2CDeviceMap> i2cDevice : i2cDevices.entrySet()) { I2CControl i2cControl = i2cDevice.getValue().control; i2cControl.detach(this); } } /** * silly Arduino implementation - but keeping it since its familiar * digitalWrite/pin/value */ public void digitalWrite(int address, int value) { log.info("digitalWrite {} {}", address, value); msg.digitalWrite(address, value); } public void digitalWrite(String pin, int value) { PinDefinition pinDef = getPin(pin); digitalWrite(pinDef.getAddress(), value); } @Override public Integer getAddress(String pin) { PinDefinition pinDef = getPin(pin); if (pinDef != null) { return pinDef.getAddress(); } try { return Integer.parseInt(pin); } catch (Exception e) { } return null; } /** * disablePin/address */ @Override public void disablePin(int address) { PinDefinition pinDef = getPin(address); pinDef.setEnabled(false); msg.disablePin(address); } /** * disablePin/address */ @Override public void disablePin(String pinName) { // PinDefinition pinDef = getPin(address); PinDefinition pinDef = getPin(pinName); pinDef.setEnabled(false); msg.disablePin(pinDef.getAddress()); } /** * disable all pins */ public void disablePins() { msg.disablePins(); } public void disconnect() { // FIXED - all don in 'onDisconnect()' // enableBoardInfo(false); // boardInfo is not valid after disconnect // because we might be connecting to a different Arduino // boardInfo.reset(); for (Arduino controller : attachedController.values()) { controller.disconnect(); } attachedController.clear(); if (controllerAttachAs != MRL_IO_NOT_DEFINED) { controllerAttachAs = MRL_IO_NOT_DEFINED; serial = (Serial) createPeer("serial"); } else { if (serial != null) { serial.disconnect(); } } broadcastState(); } public void echo(float myFloat, int myByte, float secondFloat) { msg.echo(myFloat, myByte, secondFloat); } // > enableAck/bool enabled public void enableAck(boolean enabled) { msg.enableAcks(enabled); } transient BoardInfoPoller poller = new BoardInfoPoller(); public class BoardInfoPoller implements Runnable { boolean running = false; Thread thread = null; public void run() { try { running = true; while (running) { sendBoardInfoRequest(); sleep(1000); } } catch (Exception e) { log.info("board info stopping {}", e.getMessage()); } thread = null; running = false; } public void start() { if (thread == null) { thread = new Thread(this, "boardInfoPoller"); thread.start(); } } public void stop() { if (thread != null) { thread.interrupt(); } } } // TODO - remove // MrlComm now constantantly sends a stream of BoardInfo // > enableBoardInfo/bool enabled - no point to this public void enableBoardInfo(Boolean enabled) { /* * if (enabled) { poller.start(); } else { poller.stop(); } */ boardInfoEnabled = enabled; } @Override public void enablePin(int address) { enablePin(address, 0); } // > enablePin/address/type/b16 rate public void enablePin(int address, int rate) { PinDefinition pinDef = getPin(address); msg.enablePin(address, getMrlPinType(pinDef), rate); pinDef.setEnabled(true); invoke("publishPinDefinition", pinDef); // broadcast pin change } /** * start polling reads of selected pin enablePin/address/type/b16 rate */ public void enablePin(String pin, int rate) { if (!isConnected()) { error("must be connected to enable pins"); return; } PinDefinition pinDef = getPin(pin); enablePin(pinDef.getAddress(), rate); } public String getArduinoPath() { return arduinoPath; } public String getAref() { return aref; } @Override public Set<String> getAttached() { return deviceList.keySet(); } public int getAttachedCount() { return deviceList.size(); } /** * Heart-beat method on time, driven by the Arduino service to get information * from the board its currently connected. This is the "last" boardInfo * returned from the task inserted with addTask("getBoardInfo", 1000, 0, * "sendBoardInfoRequest"); * * getBoardInfo */ public BoardInfo getBoardInfo() { return boardInfo; } @Override // override to get Arduino board types public List<BoardType> getBoardTypes() { List<BoardType> boardTypes = new ArrayList<BoardType>(); try { String b = FileIO.resourceToString("Arduino" + File.separator + "boards.txt"); Properties boardProps = new Properties(); boardProps.load(new ByteArrayInputStream(b.getBytes())); Enumeration<?> e = boardProps.propertyNames(); Set<String> distinct = new TreeSet<String>(); Set<String> hasProcessorTypes = new TreeSet<String>(); while (e.hasMoreElements()) { String keyLine = (String) e.nextElement(); String[] parts = keyLine.split("\\."); String key = parts[0]; if (key.startsWith("menu")) { continue; } if (keyLine.contains("menu.cpu")) { hasProcessorTypes.add(key); // split - remove previous if (distinct.contains(key)) { distinct.remove(key); } // for diecimila.atmega328 try { key = parts[0] + "." + parts[3]; } catch (Exception e2) { log.error("board.txt is weird", e2); } distinct.add(key); } else if (!hasProcessorTypes.contains(key)) { distinct.add(key); } } for (String longKey : distinct) { String[] parts = longKey.split("\\."); String key = parts[0]; String processorType = null; if (parts.length > 1) { processorType = parts[1]; } BoardType boardType = new BoardType(); if (processorType != null) { boardType.setName(boardProps.getProperty(String.format("%s.name", key)) + " - " + processorType); } else { boardType.setName(boardProps.getProperty(String.format("%s.name", key))); } boardType.setBoard(longKey); boardType.setId(longKey.hashCode()); boardTypes.add(boardType); } } catch (Exception e) { log.error("getBoards threw", e); } return boardTypes; } @Override public org.myrobotlab.math.interfaces.Mapper getDefaultMapper() { // best guess :P MapperLinear mapper = new MapperLinear(); mapper.map(-1.0, 1.0, 0.0, 255.0); return mapper; } public Attachable getDevice(Integer deviceId) { DeviceMapping dm = deviceIndex.get(deviceId); if (dm == null) { log.error("no device with deviceId {}", deviceId); return null; } return dm.getDevice(); } Integer getDeviceId(NameProvider device) { return getDeviceId(device.getName()); } Integer getDeviceId(String name) { if (deviceList.containsKey(name)) { Integer id = deviceList.get(name).getId(); if (id == null) { error("cannot get device id for %s - device attempetd to attach - but I suspect something went wrong", name); } return id; } log.error("getDeviceId could not find device {}", name); return null; } private String getDeviceName(int deviceId) { if (getDevice(deviceId) == null) { log.error("getDeviceName({}) is null", deviceId); return null; } return getDevice(deviceId).getName(); } /** * int type to describe the pin defintion to Pin.h 0 digital 1 analog * */ public Integer getMrlPinType(PinDefinition pin) { if (board == null) { error("must have pin board type to determin pin definition"); return null; } if (pin == null) { log.error("pin definition null"); return null; } if (pin.isAnalog()) { return 1; } return 0; } /** * FIXME - have local This creates the pin definitions based on boardType Not * sure how many pin definition sets there are. Currently there are only 2 * supported - Mega-Like 70 pins &amp; Uno-Like 20 pins (14 digital 6 analog) * FIXME - sync with VirtualArduino FIXME - String boardType */ @Override // override for arduino to get pin list public List<PinDefinition> getPinList() { // 2 board types have been identified (perhaps this is based on // processor?) // mega-like & uno like // if no change - just return the values if ((pinIndex != null && board.contains("mega") && pinIndex.size() == 70) || (pinIndex != null && !board.contains("mega") && pinIndex.size() == 20)) { return new ArrayList<PinDefinition>(pinIndex.values()); } // create 2 indexes for fast retrieval // based on "name" or "address" pinMap.clear(); pinIndex.clear(); List<PinDefinition> pinList = new ArrayList<PinDefinition>(); if (board.contains("mega")) { for (int i = 0; i < 70; ++i) { PinDefinition pindef = new PinDefinition(getName(), i); // begin wacky pin def logic String pinName = null; if (i == 0) { pindef.setRx(true); } if (i == 1) { pindef.setTx(true); } if (i < 1 || (i > 13 && i < 54)) { pinName = String.format("D%d", i); pindef.setDigital(true); } else if (i > 53) { pinName = String.format("A%d", i - 54); pindef.setAnalog(true); pindef.setDigital(true); pindef.canWrite(true); } else { pinName = String.format("D%d", i); pindef.setPwm(true); } pindef.setPinName(pinName); pindef.setAddress(i); pinMap.put(pinName, pindef); pinIndex.put(pindef.getAddress(), pindef); pinList.add(pindef); } } else { for (int i = 0; i < 20; ++i) { PinDefinition pindef = new PinDefinition(getName(), i); String pinName = null; if (i == 0) { pindef.setRx(true); } if (i == 1) { pindef.setTx(true); } if (i < 14) { pinName = String.format("D%d", i); pindef.setDigital(true); } else { pindef.setAnalog(true); pindef.canWrite(false); pindef.setDigital(false); pinName = String.format("A%d", i - 14); } if (i == 3 || i == 5 || i == 6 || i == 9 || i == 10 || i == 11) { pindef.setPwm(true); pinName = String.format("D%d", i); } pindef.setPinName(pinName); pindef.setAddress(i); pinMap.put(pinName, pindef); pinIndex.put(pindef.getAddress(), pindef); pinList.add(pindef); } // FIXME - nano pico other ??? if (board.contains("nano")) { /* * int i = 20; pinName = String.format("A%d", i - 14); PinDefinition * pindef = new PinDefinition(getName(), i); pindef.setDigital(false); * pindef.setPwm(false); pindef.setAnalog(true); pindef.canWrite(false); * pinIndex.put(i, pindef); pinMap.put(pinName, pindef); */ } } return pinList; } public String getPortName() { return serial.getPortName(); } @Override public List<String> getPortNames() { if (serial != null) { return serial.getPortNames(); } return new ArrayList<String>(); } @Override public List<String> getPorts() { // we use pins not ports List<String> ret = new ArrayList<String>(); return ret; } /* * Use the serial service for serial activities ! No reason to replicate * methods */ public Serial getSerial() { return serial; } public Sketch getSketch() { return sketch; } /** * Internal Arduino method to create an i2cBus object in MrlComm that is * shared between all i2c devices * * @param control * @param busAddress */ // > i2cBusAttach/deviceId/i2cBus private void i2cBusAttach(I2CBusControl control, int busAddress) { DeviceMapping dm = attachDevice(i2cBus, new Object[] { busAddress }); Integer deviceId = dm.getId(); msg.i2cBusAttach(deviceId, busAddress); } @Override // > i2cRead/deviceId/deviceAddress/size public int i2cRead(I2CControl control, int busAddress, int deviceAddress, byte[] buffer, int size) { i2cDataReturned = false; // Get the device index to the MRL i2c bus String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); log.info("i2cRead requesting {} bytes", size); msg.i2cRead(deviceId, deviceAddress, size); int retry = 0; int retryMax = 1000; // ( About 1000ms = s) try { /** * We will wait up to retryMax times to get the i2c data back from * MrlComm.c and wait 1 ms between each try. A blocking queue is not * needed, as this is only a single data element - and blocking is not * necessary. */ while ((retry < retryMax) && (!i2cDataReturned)) { sleep(1); ++retry; } } catch (Exception e) { Logging.logError(e); } if (i2cDataReturned) { log.debug("i2cReturnData returned {} bytes to caller {}.", i2cDataSize, control.getName()); for (int i = 0; i < i2cDataSize; i++) { buffer[i] = i2cData[i]; log.debug("i2cReturnData returned ix {} value {}", i, buffer[i]); } return i2cDataSize; } // Time out, no data returned return -1; } /** * This methods is called by the i2cBus object when data is returned from the * i2cRead It populates the i2cData area and sets the i2cDataReturned flag to * true so that the loop in i2cRead can return the data to the caller * */ @Override public void i2cReturnData(int[] rawData) { i2cDataSize = rawData.length; for (int i = 0; i < i2cDataSize; i++) { i2cData[i] = (byte) (rawData[i] & 0xff); } log.debug("i2cReturnData invoked. i2cDataSize = {}", i2cDataSize); i2cDataReturned = true; } @Override // > i2cWrite/deviceId/deviceAddress/[] data public void i2cWrite(I2CControl control, int busAddress, int deviceAddress, byte[] buffer, int size) { String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); int data[] = new int[size]; for (int i = 0; i < size; ++i) { data[i] = buffer[i];// guess you want -128 to 127 ?? [ ] == unsigned // char & 0xff; } msg.i2cWrite(deviceId, deviceAddress, data); } @Override // > i2cWriteRead/deviceId/deviceAddress/readSize/writeValue public int i2cWriteRead(I2CControl control, int busAddress, int deviceAddress, byte[] writeBuffer, int writeSize, byte[] readBuffer, int readSize) { if (writeSize != 1) { i2cWrite(control, busAddress, deviceAddress, writeBuffer, writeSize); return i2cRead(control, busAddress, deviceAddress, readBuffer, readSize); } else { i2cDataReturned = false; // Get the device index to the MRL i2c bus String i2cBus = String.format("I2CBus%s", busAddress); int deviceId = getDeviceId(i2cBus); int msgBuffer[] = new int[4]; msgBuffer[0] = deviceId; msgBuffer[1] = deviceAddress; msgBuffer[2] = readSize; msgBuffer[3] = writeBuffer[0]; msg.i2cWriteRead(deviceId, deviceAddress, readSize, writeBuffer[0] & 0xFF); int retry = 0; int retryMax = 1000; // ( About 1000ms = s) try { /** * We will wait up to retryMax times to get the i2c data back from * MrlComm.c and wait 1 ms between each try. A blocking queue is not * needed, as this is only a single data element - and blocking is not * necessary. */ while ((retry < retryMax) && (!i2cDataReturned)) { sleep(1); ++retry; } } catch (Exception e) { Logging.logError(e); } if (i2cDataReturned) { log.debug("i2cReturnData returned %s bytes to caller {}.", i2cDataSize, control.getName()); for (int i = 0; i < i2cDataSize; i++) { readBuffer[i] = i2cData[i]; log.debug("i2cReturnData returned ix {} value {}", i, readBuffer[i]); } return i2cDataSize; } // Time out, no data returned return -1; } } private void initSerial() { if (msg == null) { serial = (Serial) startPeer("serial"); msg = new Msg(this, serial); serial.addByteListener(this); } else { log.warn("Init serial called and we already have a msg class!"); } } @Override public boolean isAttached(Attachable device) { return deviceList.containsKey(device.getName()); } @Override public boolean isAttached(String name) { return deviceList.containsKey(name); } @Override public boolean isConnected() { // include that we must have gotten a valid MrlComm version number. if (serial != null && serial.isConnected() && boardInfo != null && boardInfo.getVersion() != null) { return true; } // just to force serial arduino conected if it is a serialX com // usefull to enable pin on the remote arduino // @Deprecated FIXME - this is "bad" if ((controllerAttachAs == MRL_IO_SERIAL_1 || controllerAttachAs == MRL_IO_SERIAL_2 || controllerAttachAs == MRL_IO_SERIAL_3) && boardInfo.getVersion() == MRLCOMM_VERSION) { return true; } return false; } // FIXME put recording in generated message structure !!! @Override public boolean isRecording() { return msg.isRecording(); } // not used currently - should be refactored to use these methods for motor // control @Override public double motorCalcOutput(MotorControl mc) { double value = mc.calcControllerOutput(); return value; } @Override public void motorMove(MotorControl mc) { Class<?> type = mc.getClass(); double powerOutput = motorPowerMapper.calcOutput(mc.getPowerLevel()); // log.info(mc.getPowerLevel()+" "+powerOutput); if (Motor.class == type) { Motor config = (Motor) mc; msg.digitalWrite(getAddress(config.getDirPin()), (powerOutput < 0) ? MOTOR_BACKWARD : MOTOR_FORWARD); msg.analogWrite(getAddress(config.getPwrPin()), (int) Math.abs(powerOutput)); } else if (MotorDualPwm.class == type) { MotorDualPwm config = (MotorDualPwm) mc; if (powerOutput < 0) { msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), (int) Math.abs(powerOutput)); } else if (powerOutput > 0) { msg.analogWrite(getAddress(config.getRightPwmPin()), 0); msg.analogWrite(getAddress(config.getLeftPwmPin()), (int) Math.abs(powerOutput)); } else { msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), 0); } } else { error("motorMove for motor type %s not supported", type); } } // FIXME - clean or remove ... // ========== pulsePin begin ============= // FIXME - MasterBlaster had a pulse motor which could support MoveTo // We need a Motor + encoder (analog or digital) DiyServo does this... @Override public void motorMoveTo(MotorControl mc) { // speed parameter? // modulo - if < 1 // speed = 1 else log.info("motorMoveTo targetPos {} powerLevel {}", mc.getTargetPos(), mc.getPowerLevel()); Class<?> type = mc.getClass(); // if pulser (with or without fake encoder // send a series of pulses ! // with current direction if (Motor.class == type) { Motor motor = (Motor) mc; // check motor direction // send motor direction // TODO powerLevel = 100 * powerlevel // FIXME !!! - this will have to send a Long for targetPos at some // point !!!! double target = Math.abs(motor.getTargetPos()); int b0 = (int) target & 0xff; int b1 = ((int) target >> 8) & 0xff; int b2 = ((int) target >> 16) & 0xff; int b3 = ((int) target >> 24) & 0xff; // TODO FIXME // sendMsg(PULSE, deviceList.get(motor.getName()).id, b3, b2, b1, // b0, (int) motor.getPowerLevel(), feedbackRate); } } @Override public void motorReset(MotorControl motor) { // perhaps this should be in the motor control // motor.reset(); // opportunity to reset variables on the controller // sendMsg(MOTOR_RESET, motor.getind); } @Override public void motorStop(MotorControl mc) { Class<?> type = mc.getClass(); if (Motor.class == type) { Motor config = (Motor) mc; msg.analogWrite(getAddress(config.getPwrPin()), 0); } else if (MotorDualPwm.class == type) { MotorDualPwm config = (MotorDualPwm) mc; msg.analogWrite(getAddress(config.getLeftPwmPin()), 0); msg.analogWrite(getAddress(config.getRightPwmPin()), 0); } } @Override // > neoPixelAttach/deviceId/pin/b32 numPixels public void neoPixelAttach(NeoPixel neopixel, int pin, int numPixels) { DeviceMapping dm = attachDevice(neopixel, new Object[] { pin, numPixels }); Integer deviceId = dm.getId(); msg.neoPixelAttach(getDeviceId(neopixel)/* byte */, pin/* byte */, numPixels/* b32 */); } @Override // > neoPixelSetAnimation/deviceId/animation/red/green/blue/b16 speed public void neoPixelSetAnimation(NeoPixel neopixel, int animation, int red, int green, int blue, int speed) { msg.neoPixelSetAnimation(getDeviceId(neopixel), animation, red, green, blue, speed); } /** * neoPixelWriteMatrix/deviceId/[] buffer */ @Override public void neoPixelWriteMatrix(NeoPixel neopixel, List<Integer> data) { int[] buffer = new int[data.size()]; for (int i = 0; i < data.size(); ++i) { buffer[i] = data.get(i); } msg.neoPixelWriteMatrix(getDeviceId(neopixel), buffer); } /** * Callback for Serial service - local (not remote) although a * publish/subscribe could be created - this method is called by a thread * waiting on the Serial's RX BlockingQueue * * Other services may use the same technique or subscribe to a Serial's * publishByte method * * it might be worthwhile to look in optimizing reads into arrays vs single * byte processing .. but maybe there would be no gain * */ public synchronized void onBytes(byte[] bytes) { // log.info("On Bytes called in Arduino. {}", bytes); // These bytes arrived from the serial port data, push them down into the msg parser. // if a full message is detected, the publish(Function) method will be directly called on // this arduino instance. msg.onBytes(bytes); } @Override public synchronized void onConnect(String portName) { // Pass this serial port notification down to the msg parser msg.onConnect(portName); log.info("{} onConnect for port {}", getName(), portName); info("%s connected to %s", getName(), portName); // chained... invoke("publishConnect", portName); } public void onCustomMsg(Integer ax, Integer ay, Integer az) { log.info("onCustomMsg"); } @Override public void onDisconnect(String portName) { msg.onDisconnect(portName); info("%s disconnected from %s", getName(), portName); enableBoardInfo(false); // chained... invoke("publishDisconnect", portName); } public void openMrlComm(String path) { try { if (!setArduinoPath(path)) { return; } String mrlCommFiles = null; if (FileIO.isJar()) { mrlCommFiles = Util.getResourceDir() + "/Arduino/MrlComm"; // FIXME - don't do this every time :P Zip.extractFromSelf(Util.getResourceDir() + File.separator + "Arduino" + File.separator + "MrlComm", "resource/Arduino/MrlComm"); } else { // running in IDE ? mrlCommFiles = Util.getResourceDir() + File.separator + "Arduino" + File.separator + "MrlComm"; } File mrlCommDir = new File(mrlCommFiles); if (!mrlCommDir.exists() || !mrlCommDir.isDirectory()) { error("mrlcomm script directory %s is not a valid", mrlCommDir); return; } String exePath = arduinoPath + File.separator + ArduinoUtils.getExeName(); String inoPath = mrlCommDir.getAbsolutePath() + File.separator + "/MrlComm.ino"; List<String> cmd = new ArrayList<String>(); cmd.add(exePath); cmd.add(inoPath); ProcessBuilder builder = new ProcessBuilder(cmd); builder.start(); } catch (Exception e) { error(String.format("%s %s", e.getClass().getSimpleName(), e.getMessage())); log.error("openMrlComm threw", e); } } public String getBase64ZippedMrlComm() { return Base64.getEncoder().encodeToString((getZippedMrlComm())); } public byte[] getZippedMrlComm() { try { // get resource location String filename = getDataDir() + File.separator + "MrlComm.zip"; File f = new File(filename); if (f.exists()) { f.delete(); } // zip resource Zip.zip(new String[] { getResourceDir() + File.separator + "MrlComm" }, filename); // return zip file return FileIO.toByteArray(new File(filename)); } catch (Exception e) { error("could not get zipped mrl comm %s", e); } return null; } @Override /** * // > pinMode/pin/mode */ public void pinMode(int address, String modeStr) { pinMode(address, modeStr.equalsIgnoreCase("INPUT") ? Arduino.INPUT : Arduino.OUTPUT); } public void pinMode(int address, int mode) { msg.pinMode(address, mode); } /** * With Arduino we want to be able to do pinMode("D7", "INPUT"), but it should * not be part of the PinArrayControl interface - because when it comes down * to it .. a pin MUST ALWAYS have an address regardless what you label or * name it... * */ public void pinMode(String pin, String mode) { PinDefinition pinDef = getPin(pin); pinMode(pinDef.getAddress(), mode); } // < publishAck/function public void publishAck(Integer function/* byte */) { if (msg.debug) { log.info("{} Message Ack received: =={}==", getName(), Msg.methodToString(function)); } } // < publishBoardInfo/version/boardType/b16 microsPerLoop/b16 sram/[] // deviceSummary public BoardInfo publishBoardInfo(Integer version/* byte */, Integer boardTypeId/* byte */, Integer microsPerLoop/* b16 */, Integer sram/* b16 */, Integer activePins, int[] deviceSummary/* [] */) { String boardTypeName = getBoardType(boardTypeId); boardInfo = new BoardInfo(version, boardTypeId, boardTypeName, microsPerLoop, sram, activePins, arrayToDeviceSummary(deviceSummary), boardInfoRequestTs); boardInfoRequestTs = System.currentTimeMillis(); log.debug("Version return by Arduino: {}", boardInfo.getVersion()); log.debug("Board type currently set: {} => {}", boardTypeId, boardTypeName); if (lastBoardInfo == null || !lastBoardInfo.getBoardTypeName().equals(board)) { log.warn("setting board to type {}", board); this.board = boardInfo.getBoardTypeName(); // we don't invoke, because // it might get into a race condition // in some gui getPinList(); // invoke("getPinList"); broadcastState(); } // TODO: consider, can we really just re-sync when we see begin only.. ? feels better/safer. // if (boardInfo != null) { // DeviceSummary[] ds = boardInfo.getDeviceSummary(); // if (deviceList.size() - 1 > ds.length) { /* -1 for self */ // log.info("Invoking Sync DeviceList: {} and DeviceSummary: {}", deviceList, ds); // invoke("sync"); // } // } // we send here - because this is a "command" message, and we don't want the // possibility of // block this "status" msgs lastBoardInfo = boardInfo; return boardInfo; } @Override public String publishConnect(String portName) { return portName; } // < publishCustomMsg/[] msg public int[] publishCustomMsg(int[] msg/* [] */) { return msg; } // < publishDebug/str debugMsg public String publishDebug(String debugMsg/* str */) { log.info("publishDebug {}", debugMsg); return debugMsg; } @Override public String publishDisconnect(String portName) { return portName; } /** * publishEcho/b32 sInt/str name1/b8/bu32 bui32/b32 bi32/b9/str name2/[] * * @param myFloat * @param myByte * @param secondFloat */ public void publishEcho(float myFloat, int myByte, float secondFloat) { log.info("myFloat {} {} {} ", myFloat, myByte, secondFloat); } @Override public EncoderData publishEncoderData(EncoderData data) { return data; } // callback for generated method from arduinoMsg.schema public EncoderData publishEncoderData(Integer deviceId, Integer position) { EncoderControl ec = (EncoderControl) getDevice(deviceId); String pin = null; if (ec instanceof Amt203Encoder) { // type = 0; pin = ((Amt203Encoder) ec).getPin(); } else if (ec instanceof As5048AEncoder) { // type = 1; pin = ((As5048AEncoder) ec).getPin(); } else { error("unknown encoder type {}", ec.getClass().getName()); } EncoderData data = new EncoderData(ec.getName(), pin, position); return data; } /* * DeviceControl methods. In this case they represents the I2CBusControl Not * sure if this is good to use the Arduino as an I2CBusControl Exploring * different alternatives. I may have to rethink. Alternate solutions are * welcome. /Mats. */ /** * @param deviceId * - mrl device identifier * @param data * - data to publish from I2c */ // < publishI2cData/deviceId/[] data public void publishI2cData(Integer deviceId, int[] data) { log.info("publishI2cData"); i2cReturnData(data); } /** * error from mrlcom in string form * * @param errorMsg * @return */ // < publishMRLCommError/str errorMsg public String publishMRLCommError(String errorMsg/* str */) { warn("MrlCommError: " + errorMsg); log.error("MRLCommError: {}", errorMsg); return errorMsg; } // < publishPinArray/[] data public PinData[] publishPinArray(int[] data) { log.debug("publishPinArray {}", data); // if subscribers - // look for subscribed pins and publish them int pinDataCnt = data.length / 3; PinData[] pinArray = new PinData[pinDataCnt]; // parse sort reduce ... for (int i = 0; i < pinArray.length; ++i) { int address = data[3 * i]; PinDefinition pinDef = getPin(address); if (pinDef == null) { log.error("not a valid pin address {}", address); continue; } int value = Serial.bytesToInt(data, (3 * i) + 1, 2); PinData pinData = new PinData(pinDef.getPinName(), value); // update def with last value pinDef.setValue(value); pinArray[i] = pinData; // handle individual pins if (pinListeners.containsKey(address)) { Set<PinListener> set = pinListeners.get(address); for (PinListener pinListner : set) { if (pinListner.isLocal()) { pinListner.onPin(pinData); } else { invoke("publishPin", pinData); } } } } // TODO: improve this logic so it doesn't something more effecient. HashMap<String, PinData> pinDataMap = new HashMap<String, PinData>(); for (int i = 0; i < pinArray.length; i++) { if (pinArray[i] != null && pinArray[i].pin != null) { pinDataMap.put(pinArray[i].pin, pinArray[i]); } } for (String name : pinArrayListeners.keySet()) { // put the pin data into a map for quick lookup PinArrayListener pal = pinArrayListeners.get(name); if (pal.getActivePins() != null && pal.getActivePins().length > 0) { int numActive = pal.getActivePins().length; PinData[] subArray = new PinData[numActive]; for (int i = 0; i < numActive; i++) { String key = pal.getActivePins()[i]; if (pinDataMap.containsKey(key)) { subArray[i] = pinDataMap.get(key); } else { subArray[i] = null; } } // only the values that the listener is asking for. pal.onPinArray(subArray); } else { // the full array pal.onPinArray(pinArray); } } return pinArray; } public List<String> publishPortNames(List<String> portNames) { return portNames; } /** * FIXME - I bet this doesnt work - test it * * @param deviceId * @param data * @return */ public SerialRelayData publishSerialData(Integer deviceId, int[] data) { SerialRelayData serialData = new SerialRelayData(deviceId, data); return serialData; } @Deprecated /** * Controllers should publish EncoderData - Servos can change that * into ServoData and publish REMOVED BY GROG - use TimeEncoder ! */ public Integer publishServoEvent(Integer deviceId, Integer eventType, Integer currentPos, Integer targetPos) { if (getDevice(deviceId) != null) { // REMOVED BY GROG - use time encoder !((ServoControl) // getDevice(deviceId)).publishServoData(ServoStatus.SERVO_POSITION_UPDATE, // (double) currentPos); } else { error("no servo found at device id %d", deviceId); } return currentPos; } // FIXME should be in Control interface - for callback // < publishUltrasonicSensorData/deviceId/b16 echoTime public Integer publishUltrasonicSensorData(Integer deviceId, Integer echoTime) { // log.info("echoTime {}", echoTime); ((UltrasonicSensor) getDevice(deviceId)).onUltrasonicSensorData(echoTime.doubleValue()); return echoTime; } // FIXME put recording into generated Msg @Override public void record() throws Exception { msg.record(); } @Override public void releaseService() { super.releaseService(); if (virtual != null) { virtual.releaseService(); } sleep(300); disconnect(); } /** * resets both MrlComm-land &amp; Java-land */ public void reset() { log.info("reset - resetting all devices"); // reset MrlComm-land softReset(); for (String name : deviceList.keySet()) { DeviceMapping dmap = deviceList.get(name); Attachable device = dmap.getDevice(); log.info("unsetting device {}", name); try { device.detach(name); } catch (Exception e) { log.error("detaching threw", e); } } // reset Java-land deviceIndex.clear(); deviceList.clear(); } /** * Requesting board information from the board */ public void sendBoardInfoRequest() { boardInfoRequestTs = System.currentTimeMillis(); msg.getBoardInfo(); } public void serialAttach(SerialRelay serialRelay, int controllerAttachAs) { DeviceMapping dm = attachDevice(serialRelay, new Object[] { controllerAttachAs }); Integer deviceId = dm.getId(); msg.serialAttach(deviceId, controllerAttachAs); } // > servoDetachPin/deviceId public void onServoDisable(ServoControl servo) { msg.servoDetachPin(getDeviceId(servo)); } @Override public void onServoEnable(ServoControl servo) { Integer deviceId = getDeviceId(servo); if (deviceId == null) { log.warn("servoEnable servo {} does not have a corresponding device currently - did you attach?", servo.getName()); } if (isConnected()) { msg.servoAttachPin(deviceId, getAddress(servo.getPin())); } else { log.info("not currently connected"); } } /** * servo.write(angle) https://www.arduino.cc/en/Reference/ServoWrite The msg * to mrl will always contain microseconds - but this method will (like the * Arduino Servo.write) accept both degrees or microseconds. The code is * ported from Arduino's Servo.cpp */ @Override // > servoWrite/deviceId/target public void onServoMoveTo(ServoControl servo) { Integer deviceId = getDeviceId(servo); if (deviceId == null) { log.warn("servoMoveTo servo {} does not have a corresponding device currently - did you attach?", servo.getName()); return; } // getTargetOutput ALWAYS ALWAYS Degrees ! // so we convert to microseconds int us = degreeToMicroseconds(servo.getTargetOutput()); log.debug("servoMoveToMicroseconds servo {} id {} {}->{} us", servo.getName(), deviceId, servo.getPos(), us); msg.servoMoveToMicroseconds(deviceId, us); } @Override // > servoSetVelocity/deviceId/b16 velocity public void onServoSetSpeed(ServoControl servo) { int speed = -1; if (servo.getSpeed() != null) { speed = servo.getSpeed().intValue(); } log.info("servoSetVelocity {} id {} velocity {}", servo.getName(), getDeviceId(servo), speed); Integer i = getDeviceId(servo); if (i == null) { log.error("{} has null deviceId", servo); return; } msg.servoSetVelocity(i, speed); } /** * On standard servos a parameter value of 1000 is fully counter-clockwise, * 2000 is fully clockwise, and 1500 is in the middle. */ @Override // > servoWriteMicroseconds/deviceId/b16 ms public void onServoWriteMicroseconds(ServoControl servo, int uS) { int deviceId = getDeviceId(servo); log.debug("writeMicroseconds {} {} id {}", servo.getName(), uS, deviceId); msg.servoMoveToMicroseconds(deviceId, uS); } public boolean setArduinoPath(String path) { path = path.replace("\\", "/"); path = path.trim(); if (!path.endsWith("/")) { path += "/"; } File dir = new File(path); if (!dir.exists() || !dir.isDirectory()) { error(String.format("%s is not a valid directory", path)); return false; } arduinoPath = path; ArduinoUtils.arduinoPath = arduinoPath; // THIS IS SILLY AND NOT // NORMALIZED ! save(); return true; } public void setAref(String aref) { aref = aref.toUpperCase(); if (this.getBoard().contains("mega")) { if (aref == "INTERNAL") { error("Aref " + aref + " is not compatible with your board " + this.getBoard()); aref = "DEFAULT"; } } else { if (aref == "INTERNAL1V1" || aref == "INTERNAL2V56") { error("Aref INTERNALxV is not compatible with your board " + this.getBoard()); aref = "DEFAULT"; } } int arefInt = 1; switch (aref) { case "EXTERNAL": arefInt = 0; break; case "DEFAULT": arefInt = 1; break; case "INTERNAL1V1": arefInt = 2; break; case "INTERNAL": arefInt = 3; break; case "INTERNAL2V56": arefInt = 3; break; default: log.error("Aref " + aref + " is unknown"); } log.info("set aref to " + aref); this.aref = aref; msg.setAref(arefInt); } public void setBoardMega() { setBoard(BOARD_TYPE_MEGA); } public void setBoardMegaADK() { setBoard(BOARD_TYPE_MEGA_ADK); } public void setBoardNano() { setBoard(BOARD_TYPE_NANO); } public void setBoardUno() { setBoard(BOARD_TYPE_UNO); } /* * Debounce ensures that only a single signal will be acted upon for a single * opening or closing of a contact. the delay is the min number of pc cycles * must occur before a reading is taken * * Affects all reading of pins setting to 0 sets it off * * TODO - implement on MrlComm side ... * */ // > setDebounce/pin/delay public void setDebounce(int pin, int delay) { msg.setDebounce(pin, delay); } // > setDebug/bool enabled public void setDebug(boolean b) { msg.setDebug(b); } /* * dynamically change the serial rate TODO - shouldn't this change Arduino * service serial rate too to match? * */ // > setSerialRate/b32 rate public void setSerialRate(int rate) { msg.setSerialRate(rate); } public void setSketch(Sketch sketch) { this.sketch = sketch; broadcastState(); } /* * set a pin trigger where a value will be sampled and an event will be signal * when the pin turns into a different state. * * TODO - implement on MrlComm side... */ // > setTrigger/pin/triggerValue public void setTrigger(int pin, int value) { msg.setTrigger(pin, value); } @Override public void setZeroPoint(EncoderControl encoder) { // send the set zero point command to the encoder msg.setZeroPoint(getDeviceId(encoder.getName())); } /** * send a reset to MrlComm - all devices removed, all polling is stopped and * all other counters are reset */ // > softReset public void softReset() { msg.softReset(); } @Override public void startService() { super.startService(); try { initSerial(); } catch (Exception e) { log.error("Arduino.startService threw", e); } } @Override public void stopRecording() { msg.stopRecording(); } @Override public void stopService() { super.stopService(); detachI2CControls(); disconnect(); } @Override // > ultrasonicSensorStartRanging/deviceId/b32 timeout public void ultrasonicSensorStartRanging(UltrasonicSensorControl sensor) { msg.ultrasonicSensorStartRanging(getDeviceId(sensor)); } @Override // > ultrasonicSensorStopRanging/deviceId public void ultrasonicSensorStopRanging(UltrasonicSensorControl sensor) { msg.ultrasonicSensorStopRanging(getDeviceId(sensor)); } public void uploadSketch(String arduinoPath) throws IOException { uploadSketch(arduinoPath, serial.getLastPortName()); } public void uploadSketch(String arudinoPath, String comPort) throws IOException { uploadSketch(arudinoPath, comPort, getBoard()); } static public String getBoardType(int boardId) { String boardName; switch (boardId) { case BOARD_TYPE_ID_MEGA: boardName = BOARD_TYPE_MEGA; break; case BOARD_TYPE_ID_UNO: boardName = BOARD_TYPE_UNO; break; case BOARD_TYPE_ID_ADK_MEGA: boardName = BOARD_TYPE_MEGA_ADK; break; case BOARD_TYPE_ID_NANO: boardName = BOARD_TYPE_NANO; break; case BOARD_TYPE_ID_PRO_MINI: boardName = BOARD_TYPE_PRO_MINI; break; default: // boardName = "unknown"; boardName = BOARD_TYPE_UNO; break; } return boardName; } static public int getBoardTypeId(String boardName) { Integer boardId = null; switch (boardName) { case BOARD_TYPE_MEGA: boardId = BOARD_TYPE_ID_MEGA; break; case BOARD_TYPE_UNO: boardId = BOARD_TYPE_ID_UNO; break; case BOARD_TYPE_MEGA_ADK: boardId = BOARD_TYPE_ID_ADK_MEGA; break; case BOARD_TYPE_NANO: boardId = BOARD_TYPE_ID_NANO; break; case BOARD_TYPE_PRO_MINI: boardId = BOARD_TYPE_ID_PRO_MINI; break; default: // boardName = "unknown"; boardId = BOARD_TYPE_ID_UNO; break; } return boardId; } public void uploadSketch(String arduinoIdePath, String port, String type) throws IOException { log.info("uploadSketch ({}, {}, {})", arduinoIdePath, port, type); if (!setArduinoPath(arduinoIdePath)) { return; } // hail mary - if we have no idea // guess uno if (type == null || type.equals("")) { type = BOARD_TYPE_UNO; } log.info("arduino IDE Path={}", arduinoIdePath); log.info("Port={}", port); log.info("type={}", type); /* * not needed if (arduinoIdePath != null && * !arduinoIdePath.equals(ArduinoUtils.arduinoPath)) { this.arduinoPath = * arduinoIdePath; ArduinoUtils.arduinoPath = arduinoIdePath; save(); } */ uploadSketchResult = String.format("Uploaded %s ", new Date()); boolean connectedState = isConnected(); try { if (connectedState) { log.info("disconnecting..."); disconnect(); } ArduinoUtils.uploadSketch(port, type.toLowerCase()); } catch (Exception e) { log.info("ArduinoUtils threw trying to upload", e); } if (connectedState) { log.info("reconnecting..."); serial.connect(); } // perhaps you can reduce the inter-process information // to succeed | fail .. perhaps you can't // I would prefer transparency - send all output to the ui uploadSketchResult += ArduinoUtils.getOutput(); log.info(uploadSketchResult); broadcastState(); } /** * this is what Arduino firmware 'should' have done - a simplified * write(address, value) which follows the convention of 'all' device * operations at the lowest level * http://codewiki.wikidot.com/c:system-calls:write PinArrayControl method */ @Override public void write(int address, int value) { info("write (%d,%d) to %s", address, value, serial.getName()); PinDefinition pinDef = getPin(address); pinMode(address, "OUTPUT"); if (pinDef.isPwm() && value > 1) { // CHEESEY HACK !! analogWrite(address, value); } else { digitalWrite(address, value); } // cache value pinDef.setValue(value); } public Map<String, DeviceMapping> getDeviceList() { return deviceList; } public void ackTimeout() { log.warn("Ack Timeout seen. TODO: consider resetting the com port, reconnecting and re syncing all devices."); } public void publishMrlCommBegin(Integer version) { // If we were already connected up and clear to send.. this is a problem.. it means the board was reset on it. if (mrlCommBegin > 0) { error("arduino %s has reset - does it have a separate power supply?", getName()); // At this point we need to reset! mrlCommBegin = 0; } ++mrlCommBegin; // log.info("Skipping Sync! TODO: uncomment me."); // This needs to be non-blocking // If we have devices, we need to sync them. // The device list always has "Arduino" in it for some reason.. if (deviceList.size() > 1) { log.info("Need to sync devices to mrlcomm. Num Devices: {} Devices: {}", deviceList.size(), deviceList); invoke("sync"); } else { log.info("no devices to sync, clear to resume."); } } /** * DO NOT FORGET INSTALL AND VMARGS !!! * * -Djava.library.path=libraries/native -Djna.library.path=libraries/native * -Dfile.encoding=UTF-8 * * @param args */ public static void main(String[] args) { try { // Platform.setVirtual(true); Runtime.main(new String[] { "--interactive", "--id", "id"}); LoggingFactory.init(Level.INFO); // Platform.setVirtual(true); /* WebGui webgui = (WebGui) Runtime.create("webgui", "WebGui"); webgui.autoStartBrowser(false); webgui.setPort(8887); webgui.startService(); */ // Runtime.start("gui", "SwingGui"); Serial.listPorts(); Arduino hub = (Arduino) Runtime.start("hub", "Arduino"); hub.connect("/dev/ttyACM0"); // hub.enableAck(false); ServoControl sc = (ServoControl) Runtime.start("s1", "Servo"); sc.setPin(3); hub.attach(sc); sc = (ServoControl) Runtime.start("s2", "Servo"); sc.setPin(9); hub.attach(sc); // hub.enableAck(true); /* * sc = (ServoControl) Runtime.start("s3", "Servo"); sc.setPin(12); * hub.attach(sc); */ log.info("here"); // hub.connect("COM6"); // uno // hub.startTcpServer(); boolean isDone = true; if (isDone) { return; } VirtualArduino vmega = null; vmega = (VirtualArduino) Runtime.start("vmega", "VirtualArduino"); vmega.connect("COM7"); Serial sd = (Serial) vmega.getSerial(); sd.startTcpServer(); // Runtime.start("webgui", "WebGui"); Arduino mega = (Arduino) Runtime.start("mega", "Arduino"); if (mega.isVirtual()) { vmega = mega.getVirtual(); vmega.setBoardMega(); } // mega.getBoardTypes(); // mega.setBoardMega(); // mega.setBoardUno(); mega.connect("COM7"); /* * Arduino uno = (Arduino) Runtime.start("uno", "Arduino"); * uno.connect("COM6"); */ // log.info("port names {}", mega.getPortNames()); Servo servo = (Servo) Runtime.start("servo", "Servo"); // servo.load(); log.info("rest is {}", servo.getRest()); servo.save(); // servo.setPin(8); servo.attach(mega, 13); servo.moveTo(90.0); /* * servo.moveTo(3); sleep(300); servo.moveTo(130); sleep(300); * servo.moveTo(90); sleep(300); * * * // minmax checking * * servo.invoke("moveTo", 120); */ /* * mega.attach(servo); * * servo.moveTo(3); * * servo.moveTo(30); * * mega.enablePin("A4"); * * // arduino.setBoardMega(); * * Adafruit16CServoDriver adafruit = (Adafruit16CServoDriver) * Runtime.start("adafruit", "Adafruit16CServoDriver"); * adafruit.attach(mega); mega.attach(adafruit); */ // servo.attach(arduino, 8, 90); // Runtime.start("webgui", "WebGui"); // Service.sleep(3000); // remote.startListening(); // Runtime.start("webgui", "WebGui"); } catch (Exception e) { log.error("main threw", e); } } /** * stops the servo sweeping or moving with speed control */ @Override public void onServoStop(ServoControl servo) { msg.servoStop(getDeviceId(servo)); } }
making resync be non-blocking to avoid msg onByte processing deadlock.
src/main/java/org/myrobotlab/service/Arduino.java
making resync be non-blocking to avoid msg onByte processing deadlock.
<ide><path>rc/main/java/org/myrobotlab/service/Arduino.java <ide> import org.myrobotlab.arduino.BoardType; <ide> import org.myrobotlab.arduino.DeviceSummary; <ide> import org.myrobotlab.arduino.Msg; <add>import org.myrobotlab.framework.Message; <ide> import org.myrobotlab.framework.ServiceType; <ide> import org.myrobotlab.framework.interfaces.Attachable; <ide> import org.myrobotlab.framework.interfaces.NameProvider; <ide> <ide> int mrlCommBegin = 0; <ide> <del> long syncStartTypeTs = System.currentTimeMillis(); <add> private volatile boolean syncInProgress = false; <ide> <ide> public Arduino(String n, String id) { <ide> super(n, id); <ide> * sync our device list with mrlcomm <ide> */ <ide> public void sync() { <del> long now = System.currentTimeMillis(); <del> if (now - syncStartTypeTs < 5000) { <del> log.error("===== we are in the middle of synching ... ==== talk to us in {} ms", 5000 - (now - syncStartTypeTs)); <add> if (syncInProgress) { <add> log.warn("Alreadying calling sync! Skipping this request"); <ide> return; <ide> } <del> syncStartTypeTs = System.currentTimeMillis(); <add> syncInProgress = true; <ide> log.warn("================================ sync !!! =============================="); <ide> try { <del> <ide> for (DeviceMapping device : deviceList.values()) { <del> reattach(device); <add> // invoke("reattach", device); <add> send(getName(), "reattach", device); <add> // Thread.sleep(1000); <add> <add> // reattach(device); <ide> } <ide> <ide> List<PinDefinition> list = getPinList(); <ide> } catch (Exception e) { <ide> log.error("sync threw", e); <ide> } <add> syncInProgress = false; <add> log.info("Sync completed"); <add> <ide> } <ide> <ide> // > customMsg/[] msg
Java
apache-2.0
6f20458adfb5ebb41b92b2720ca250087f8d9f6d
0
firebase/FirebaseUI-Android,SUPERCILEX/FirebaseUI-Android,firebase/FirebaseUI-Android,SUPERCILEX/FirebaseUI-Android,samtstern/FirebaseUI-Android,JosefHruska/FirebaseUI-Android,JosefHruska/FirebaseUI-Android,samtstern/FirebaseUI-Android,SUPERCILEX/FirebaseUI-Android,samtstern/FirebaseUI-Android,firebase/FirebaseUI-Android,firebase/FirebaseUI-Android,SUPERCILEX/FirebaseUI-Android
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.firebase.ui.auth.ui.phone; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.RestrictTo; import android.support.annotation.VisibleForTesting; import android.support.v4.app.FragmentTransaction; import android.support.v7.app.AlertDialog; import android.text.TextUtils; import android.util.Log; import com.firebase.ui.auth.IdpResponse; import com.firebase.ui.auth.R; import com.firebase.ui.auth.ResultCodes; import com.firebase.ui.auth.ui.AppCompatBase; import com.firebase.ui.auth.ui.BaseHelper; import com.firebase.ui.auth.ui.ExtraConstants; import com.firebase.ui.auth.ui.FlowParameters; import com.firebase.ui.auth.util.signincontainer.SaveSmartLock; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.FirebaseException; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.FirebaseAuthException; import com.google.firebase.auth.FirebaseAuthInvalidCredentialsException; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.auth.PhoneAuthCredential; import com.google.firebase.auth.PhoneAuthProvider; import java.util.concurrent.TimeUnit; /** * Activity to control the entire phone verification flow. Plays host to * {@link VerifyPhoneNumberFragment} and {@link SubmitConfirmationCodeFragment} */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public class PhoneVerificationActivity extends AppCompatBase { private static final String PHONE_VERIFICATION_LOG_TAG = "PhoneVerification"; static final long SHORT_DELAY_MILLIS = 750; static final long AUTO_RETRIEVAL_TIMEOUT_MILLIS = 120000; static final String ERROR_INVALID_PHONE = "ERROR_INVALID_PHONE_NUMBER"; static final String ERROR_INVALID_VERIFICATION = "ERROR_INVALID_VERIFICATION_CODE"; static final String ERROR_TOO_MANY_REQUESTS = "ERROR_TOO_MANY_REQUESTS"; static final String ERROR_QUOTA_EXCEEDED = "ERROR_QUOTA_EXCEEDED"; static final String ERROR_SESSION_EXPIRED = "ERROR_SESSION_EXPIRED"; static final String KEY_VERIFICATION_PHONE = "KEY_VERIFICATION_PHONE"; static final String KEY_STATE = "KEY_STATE"; enum VerificationState { VERIFICATION_NOT_STARTED, VERIFICATION_STARTED, VERIFIED; } private AlertDialog mAlertDialog; private SaveSmartLock mSaveSmartLock; private CompletableProgressDialog mProgressDialog; private Handler mHandler; private String mPhoneNumber; private String mVerificationId; private Boolean mIsDestroyed = false; private PhoneAuthProvider.ForceResendingToken mForceResendingToken; private VerificationState mVerificationState; public static Intent createIntent(Context context, FlowParameters flowParams, String phone) { return BaseHelper.createBaseIntent(context, PhoneVerificationActivity.class, flowParams) .putExtra(ExtraConstants.EXTRA_PHONE, phone); } @Override protected void onCreate(final Bundle savedInstance) { super.onCreate(savedInstance); setContentView(R.layout.activity_register_phone); mSaveSmartLock = mActivityHelper.getSaveSmartLockInstance(); mHandler = new Handler(); mVerificationState = VerificationState.VERIFICATION_NOT_STARTED; if (savedInstance != null && !savedInstance.isEmpty()) { mPhoneNumber = savedInstance.getString(KEY_VERIFICATION_PHONE); if (savedInstance.getSerializable(KEY_STATE) != null) { mVerificationState = (VerificationState) savedInstance.getSerializable(KEY_STATE); } return; } String phone = getIntent().getExtras().getString(ExtraConstants.EXTRA_PHONE); VerifyPhoneNumberFragment fragment = VerifyPhoneNumberFragment.getInstance (mActivityHelper.getFlowParams(), phone); getSupportFragmentManager().beginTransaction().replace(R.id.fragment_verify_phone, fragment, VerifyPhoneNumberFragment.TAG).disallowAddToBackStack().commit(); } @Override protected void onStart() { super.onStart(); //Actvitiy can be restarted in any of the following states // 1) VERIFICATION_STARTED // 2) SMS_RETRIEVED // 3) INSTANT_VERIFIED // 4) VERIFIED // For the first three cases, we can simply resubscribe to the // OnVerificationStateChangedCallbacks // For 4, we simply finish the activity if (mVerificationState.equals(VerificationState.VERIFICATION_STARTED)) { sendCode(mPhoneNumber, false); } else if (mVerificationState == VerificationState.VERIFIED) { // activity was recreated when verified dialog was displayed finish(mActivityHelper.getFirebaseAuth().getCurrentUser()); } } @Override public void onBackPressed() { if (getSupportFragmentManager().getBackStackEntryCount() > 0) { mVerificationState = VerificationState.VERIFICATION_NOT_STARTED; getSupportFragmentManager().popBackStack(); } else { super.onBackPressed(); } } @Override protected void onSaveInstanceState(Bundle outState) { outState.putSerializable(KEY_STATE, mVerificationState); outState.putString(KEY_VERIFICATION_PHONE, mPhoneNumber); super.onSaveInstanceState(outState); } @Override protected void onDestroy() { mIsDestroyed = true; mHandler.removeCallbacksAndMessages(null); dismissLoadingDialog(); super.onDestroy(); } void verifyPhoneNumber(String phoneNumber, boolean forceResend) { sendCode(phoneNumber, forceResend); if (forceResend) { showLoadingDialog(getString(R.string.resending)); } else { showLoadingDialog(getString(R.string.verifying)); } } public void submitConfirmationCode(String confirmationCode) { showLoadingDialog(getString(R.string.verifying)); signingWithCreds(PhoneAuthProvider.getCredential(mVerificationId, confirmationCode)); } void onVerificationSuccess(@NonNull final PhoneAuthCredential phoneAuthCredential) { if (TextUtils.isEmpty(phoneAuthCredential.getSmsCode())) { signingWithCreds(phoneAuthCredential); } else { //Show Fragment if it is not already visible showSubmitCodeFragment(); SubmitConfirmationCodeFragment submitConfirmationCodeFragment = getSubmitConfirmationCodeFragment(); showLoadingDialog(getString(R.string.retrieving_sms)); if (submitConfirmationCodeFragment != null) { submitConfirmationCodeFragment.setConfirmationCode(String.valueOf (phoneAuthCredential.getSmsCode())); } signingWithCreds(phoneAuthCredential); } } void onCodeSent() { completeLoadingDialog(getString(R.string.code_sent)); mHandler.postDelayed(new Runnable() { @Override public void run() { dismissLoadingDialog(); showSubmitCodeFragment(); } }, SHORT_DELAY_MILLIS); } void onVerificationFailed(@NonNull FirebaseException ex) { VerifyPhoneNumberFragment verifyPhoneNumberFragment = (VerifyPhoneNumberFragment) getSupportFragmentManager().findFragmentByTag(VerifyPhoneNumberFragment.TAG); if (verifyPhoneNumberFragment == null) { return; } if (ex instanceof FirebaseAuthException) { FirebaseAuthException firebaseAuthException = (FirebaseAuthException) ex; switch (firebaseAuthException.getErrorCode()) { case ERROR_INVALID_PHONE: verifyPhoneNumberFragment.showError(getString(R.string.invalid_phone_number)); dismissLoadingDialog(); break; case ERROR_TOO_MANY_REQUESTS: showAlertDialog(getString(R.string.error_too_many_attempts), null); dismissLoadingDialog(); break; case ERROR_QUOTA_EXCEEDED: showAlertDialog(getString(R.string.error_quota_exceeded), null); dismissLoadingDialog(); break; default: Log.w(PHONE_VERIFICATION_LOG_TAG, ex.getLocalizedMessage()); dismissLoadingDialog(); showAlertDialog(ex.getLocalizedMessage(), null); } } else { Log.w(PHONE_VERIFICATION_LOG_TAG, ex.getLocalizedMessage()); dismissLoadingDialog(); showAlertDialog(ex.getLocalizedMessage(), null); } } private void sendCode(String phoneNumber, boolean forceResend) { mPhoneNumber = phoneNumber; mVerificationState = VerificationState.VERIFICATION_STARTED; mActivityHelper.getPhoneAuthProviderInstance().verifyPhoneNumber(phoneNumber, AUTO_RETRIEVAL_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS, this, new PhoneAuthProvider .OnVerificationStateChangedCallbacks() { @Override public void onVerificationCompleted(@NonNull PhoneAuthCredential phoneAuthCredential) { if (!mIsDestroyed) { PhoneVerificationActivity.this.onVerificationSuccess(phoneAuthCredential); } } @Override public void onVerificationFailed(FirebaseException ex) { if (!mIsDestroyed) { PhoneVerificationActivity.this.onVerificationFailed(ex); } } @Override public void onCodeSent(@NonNull String verificationId, @NonNull PhoneAuthProvider .ForceResendingToken forceResendingToken) { mVerificationId = verificationId; mForceResendingToken = forceResendingToken; if (!mIsDestroyed) { PhoneVerificationActivity.this.onCodeSent(); } } }, forceResend ? mForceResendingToken : null); } @VisibleForTesting(otherwise = VisibleForTesting.NONE) protected AlertDialog getAlertDialog() { // It is hard to test AlertDialogs currently with robo electric. See: // https://github.com/robolectric/robolectric/issues/1944 // We just test that the error was not displayed inline return mAlertDialog; } private void showSubmitCodeFragment() { // idempotent function if (getSubmitConfirmationCodeFragment() == null) { SubmitConfirmationCodeFragment f = SubmitConfirmationCodeFragment.getInstance (mActivityHelper.getFlowParams(), mPhoneNumber); FragmentTransaction t = getSupportFragmentManager().beginTransaction().replace(R.id .fragment_verify_phone, f, SubmitConfirmationCodeFragment.TAG).addToBackStack (null); if (!isFinishing() && !mIsDestroyed) { t.commitAllowingStateLoss(); } } } private void finish(FirebaseUser user) { IdpResponse response = new IdpResponse.Builder(PhoneAuthProvider.PROVIDER_ID, null) .setPhoneNumber(user.getPhoneNumber()) .build(); setResult(ResultCodes.OK, response.toIntent()); finish(); } private void showAlertDialog(@NonNull String s, DialogInterface.OnClickListener onClickListener) { mAlertDialog = new AlertDialog.Builder(this) .setMessage(s) .setPositiveButton(R.string.incorrect_code_dialog_positive_button_text, onClickListener) .show(); } private void signingWithCreds(@NonNull PhoneAuthCredential phoneAuthCredential) { mActivityHelper.getFirebaseAuth().signInWithCredential(phoneAuthCredential) .addOnSuccessListener(this, new OnSuccessListener<AuthResult>() { @Override public void onSuccess(final AuthResult authResult) { mVerificationState = VerificationState.VERIFIED; completeLoadingDialog(getString(R.string.verified)); // Activity can be recreated before this message is handled mHandler.postDelayed(new Runnable() { @Override public void run() { if (!mIsDestroyed) { dismissLoadingDialog(); finish(authResult.getUser()); } } }, SHORT_DELAY_MILLIS); } }).addOnFailureListener(this, new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { dismissLoadingDialog(); //incorrect confirmation code if (e instanceof FirebaseAuthInvalidCredentialsException) { FirebaseAuthInvalidCredentialsException firebaseAuthInvalidCredentialsException = (FirebaseAuthInvalidCredentialsException) e; switch (firebaseAuthInvalidCredentialsException.getErrorCode()) { case ERROR_INVALID_VERIFICATION: showAlertDialog(getString(R.string.incorrect_code_dialog_body), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SubmitConfirmationCodeFragment f = getSubmitConfirmationCodeFragment(); f.setConfirmationCode(""); } }); break; case ERROR_SESSION_EXPIRED: showAlertDialog(getString(R.string.error_session_expired), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SubmitConfirmationCodeFragment f = getSubmitConfirmationCodeFragment(); f.setConfirmationCode(""); } }); break; default: showAlertDialog(e.getLocalizedMessage(), null); } } else { showAlertDialog(e.getLocalizedMessage(), null); } } }); } private void completeLoadingDialog(String content) { if (mProgressDialog != null) { mProgressDialog.complete(content); } } private void showLoadingDialog(String message) { dismissLoadingDialog(); if (mProgressDialog == null) { mProgressDialog = new CompletableProgressDialog(this); mProgressDialog.setIndeterminate(true); mProgressDialog.setTitle(""); } mProgressDialog.setMessage(message); mProgressDialog.show(); } private void dismissLoadingDialog() { if (mProgressDialog != null) { mProgressDialog.dismiss(); mProgressDialog = null; } } private SubmitConfirmationCodeFragment getSubmitConfirmationCodeFragment() { return (SubmitConfirmationCodeFragment) getSupportFragmentManager().findFragmentByTag (SubmitConfirmationCodeFragment.TAG); } }
auth/src/main/java/com/firebase/ui/auth/ui/phone/PhoneVerificationActivity.java
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.firebase.ui.auth.ui.phone; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.RestrictTo; import android.support.annotation.VisibleForTesting; import android.support.v4.app.FragmentTransaction; import android.support.v7.app.AlertDialog; import android.text.TextUtils; import android.util.Log; import com.firebase.ui.auth.IdpResponse; import com.firebase.ui.auth.R; import com.firebase.ui.auth.ResultCodes; import com.firebase.ui.auth.ui.AppCompatBase; import com.firebase.ui.auth.ui.BaseHelper; import com.firebase.ui.auth.ui.ExtraConstants; import com.firebase.ui.auth.ui.FlowParameters; import com.firebase.ui.auth.util.signincontainer.SaveSmartLock; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.FirebaseException; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.FirebaseAuthException; import com.google.firebase.auth.FirebaseAuthInvalidCredentialsException; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.auth.PhoneAuthCredential; import com.google.firebase.auth.PhoneAuthProvider; import java.util.concurrent.TimeUnit; /** * Activity to control the entire phone verification flow. Plays host to * {@link VerifyPhoneNumberFragment} and {@link SubmitConfirmationCodeFragment} */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public class PhoneVerificationActivity extends AppCompatBase { private static final String PHONE_VERIFICATION_LOG_TAG = "PhoneVerification"; static final long SHORT_DELAY_MILLIS = 750; static final long AUTO_RETRIEVAL_TIMEOUT_MILLIS = 90000; static final String ERROR_INVALID_PHONE = "ERROR_INVALID_PHONE_NUMBER"; static final String ERROR_INVALID_VERIFICATION = "ERROR_INVALID_VERIFICATION_CODE"; static final String ERROR_TOO_MANY_REQUESTS = "ERROR_TOO_MANY_REQUESTS"; static final String ERROR_QUOTA_EXCEEDED = "ERROR_QUOTA_EXCEEDED"; static final String ERROR_SESSION_EXPIRED = "ERROR_SESSION_EXPIRED"; static final String KEY_VERIFICATION_PHONE = "KEY_VERIFICATION_PHONE"; static final String KEY_STATE = "KEY_STATE"; enum VerificationState { VERIFICATION_NOT_STARTED, VERIFICATION_STARTED, VERIFIED; } private AlertDialog mAlertDialog; private SaveSmartLock mSaveSmartLock; private CompletableProgressDialog mProgressDialog; private Handler mHandler; private String mPhoneNumber; private String mVerificationId; private Boolean mIsDestroyed = false; private PhoneAuthProvider.ForceResendingToken mForceResendingToken; private VerificationState mVerificationState; public static Intent createIntent(Context context, FlowParameters flowParams, String phone) { return BaseHelper.createBaseIntent(context, PhoneVerificationActivity.class, flowParams) .putExtra(ExtraConstants.EXTRA_PHONE, phone); } @Override protected void onCreate(final Bundle savedInstance) { super.onCreate(savedInstance); setContentView(R.layout.activity_register_phone); mSaveSmartLock = mActivityHelper.getSaveSmartLockInstance(); mHandler = new Handler(); mVerificationState = VerificationState.VERIFICATION_NOT_STARTED; if (savedInstance != null && !savedInstance.isEmpty()) { mPhoneNumber = savedInstance.getString(KEY_VERIFICATION_PHONE); if (savedInstance.getSerializable(KEY_STATE) != null) { mVerificationState = (VerificationState) savedInstance.getSerializable(KEY_STATE); } return; } String phone = getIntent().getExtras().getString(ExtraConstants.EXTRA_PHONE); VerifyPhoneNumberFragment fragment = VerifyPhoneNumberFragment.getInstance (mActivityHelper.getFlowParams(), phone); getSupportFragmentManager().beginTransaction().replace(R.id.fragment_verify_phone, fragment, VerifyPhoneNumberFragment.TAG).disallowAddToBackStack().commit(); } @Override protected void onStart() { super.onStart(); //Actvitiy can be restarted in any of the following states // 1) VERIFICATION_STARTED // 2) SMS_RETRIEVED // 3) INSTANT_VERIFIED // 4) VERIFIED // For the first three cases, we can simply resubscribe to the // OnVerificationStateChangedCallbacks // For 4, we simply finish the activity if (mVerificationState.equals(VerificationState.VERIFICATION_STARTED)) { sendCode(mPhoneNumber, false); } else if (mVerificationState == VerificationState.VERIFIED) { // activity was recreated when verified dialog was displayed finish(mActivityHelper.getFirebaseAuth().getCurrentUser()); } } @Override public void onBackPressed() { if (getSupportFragmentManager().getBackStackEntryCount() > 0) { mVerificationState = VerificationState.VERIFICATION_NOT_STARTED; getSupportFragmentManager().popBackStack(); } else { super.onBackPressed(); } } @Override protected void onSaveInstanceState(Bundle outState) { outState.putSerializable(KEY_STATE, mVerificationState); outState.putString(KEY_VERIFICATION_PHONE, mPhoneNumber); super.onSaveInstanceState(outState); } @Override protected void onDestroy() { mIsDestroyed = true; mHandler.removeCallbacksAndMessages(null); dismissLoadingDialog(); super.onDestroy(); } void verifyPhoneNumber(String phoneNumber, boolean forceResend) { sendCode(phoneNumber, forceResend); if (forceResend) { showLoadingDialog(getString(R.string.resending)); } else { showLoadingDialog(getString(R.string.verifying)); } } public void submitConfirmationCode(String confirmationCode) { showLoadingDialog(getString(R.string.verifying)); signingWithCreds(PhoneAuthProvider.getCredential(mVerificationId, confirmationCode)); } void onVerificationSuccess(@NonNull final PhoneAuthCredential phoneAuthCredential) { if (TextUtils.isEmpty(phoneAuthCredential.getSmsCode())) { signingWithCreds(phoneAuthCredential); } else { //Show Fragment if it is not already visible showSubmitCodeFragment(); SubmitConfirmationCodeFragment submitConfirmationCodeFragment = getSubmitConfirmationCodeFragment(); showLoadingDialog(getString(R.string.retrieving_sms)); if (submitConfirmationCodeFragment != null) { submitConfirmationCodeFragment.setConfirmationCode(String.valueOf (phoneAuthCredential.getSmsCode())); } signingWithCreds(phoneAuthCredential); } } void onCodeSent() { completeLoadingDialog(getString(R.string.code_sent)); mHandler.postDelayed(new Runnable() { @Override public void run() { dismissLoadingDialog(); showSubmitCodeFragment(); } }, SHORT_DELAY_MILLIS); } void onVerificationFailed(@NonNull FirebaseException ex) { VerifyPhoneNumberFragment verifyPhoneNumberFragment = (VerifyPhoneNumberFragment) getSupportFragmentManager().findFragmentByTag(VerifyPhoneNumberFragment.TAG); if (verifyPhoneNumberFragment == null) { return; } if (ex instanceof FirebaseAuthException) { FirebaseAuthException firebaseAuthException = (FirebaseAuthException) ex; switch (firebaseAuthException.getErrorCode()) { case ERROR_INVALID_PHONE: verifyPhoneNumberFragment.showError(getString(R.string.invalid_phone_number)); dismissLoadingDialog(); break; case ERROR_TOO_MANY_REQUESTS: showAlertDialog(getString(R.string.error_too_many_attempts), null); dismissLoadingDialog(); break; case ERROR_QUOTA_EXCEEDED: showAlertDialog(getString(R.string.error_quota_exceeded), null); dismissLoadingDialog(); break; default: Log.w(PHONE_VERIFICATION_LOG_TAG, ex.getLocalizedMessage()); dismissLoadingDialog(); showAlertDialog(ex.getLocalizedMessage(), null); } } else { Log.w(PHONE_VERIFICATION_LOG_TAG, ex.getLocalizedMessage()); dismissLoadingDialog(); showAlertDialog(ex.getLocalizedMessage(), null); } } private void sendCode(String phoneNumber, boolean forceResend) { mPhoneNumber = phoneNumber; mVerificationState = VerificationState.VERIFICATION_STARTED; mActivityHelper.getPhoneAuthProviderInstance().verifyPhoneNumber(phoneNumber, AUTO_RETRIEVAL_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS, this, new PhoneAuthProvider .OnVerificationStateChangedCallbacks() { @Override public void onVerificationCompleted(@NonNull PhoneAuthCredential phoneAuthCredential) { if (!mIsDestroyed) { PhoneVerificationActivity.this.onVerificationSuccess(phoneAuthCredential); } } @Override public void onVerificationFailed(FirebaseException ex) { if (!mIsDestroyed) { PhoneVerificationActivity.this.onVerificationFailed(ex); } } @Override public void onCodeSent(@NonNull String verificationId, @NonNull PhoneAuthProvider .ForceResendingToken forceResendingToken) { mVerificationId = verificationId; mForceResendingToken = forceResendingToken; if (!mIsDestroyed) { PhoneVerificationActivity.this.onCodeSent(); } } }, forceResend ? mForceResendingToken : null); } @VisibleForTesting(otherwise = VisibleForTesting.NONE) protected AlertDialog getAlertDialog() { // It is hard to test AlertDialogs currently with robo electric. See: // https://github.com/robolectric/robolectric/issues/1944 // We just test that the error was not displayed inline return mAlertDialog; } private void showSubmitCodeFragment() { // idempotent function if (getSubmitConfirmationCodeFragment() == null) { SubmitConfirmationCodeFragment f = SubmitConfirmationCodeFragment.getInstance (mActivityHelper.getFlowParams(), mPhoneNumber); FragmentTransaction t = getSupportFragmentManager().beginTransaction().replace(R.id .fragment_verify_phone, f, SubmitConfirmationCodeFragment.TAG).addToBackStack (null); if (!isFinishing() && !mIsDestroyed) { t.commitAllowingStateLoss(); } } } private void finish(FirebaseUser user) { IdpResponse response = new IdpResponse.Builder(PhoneAuthProvider.PROVIDER_ID, null) .setPhoneNumber(user.getPhoneNumber()) .build(); setResult(ResultCodes.OK, response.toIntent()); finish(); } private void showAlertDialog(@NonNull String s, DialogInterface.OnClickListener onClickListener) { mAlertDialog = new AlertDialog.Builder(this) .setMessage(s) .setPositiveButton(R.string.incorrect_code_dialog_positive_button_text, onClickListener) .show(); } private void signingWithCreds(@NonNull PhoneAuthCredential phoneAuthCredential) { mActivityHelper.getFirebaseAuth().signInWithCredential(phoneAuthCredential) .addOnSuccessListener(this, new OnSuccessListener<AuthResult>() { @Override public void onSuccess(final AuthResult authResult) { mVerificationState = VerificationState.VERIFIED; completeLoadingDialog(getString(R.string.verified)); // Activity can be recreated before this message is handled mHandler.postDelayed(new Runnable() { @Override public void run() { if (!mIsDestroyed) { dismissLoadingDialog(); finish(authResult.getUser()); } } }, SHORT_DELAY_MILLIS); } }).addOnFailureListener(this, new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { dismissLoadingDialog(); //incorrect confirmation code if (e instanceof FirebaseAuthInvalidCredentialsException) { FirebaseAuthInvalidCredentialsException firebaseAuthInvalidCredentialsException = (FirebaseAuthInvalidCredentialsException) e; switch (firebaseAuthInvalidCredentialsException.getErrorCode()) { case ERROR_INVALID_VERIFICATION: showAlertDialog(getString(R.string.incorrect_code_dialog_body), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SubmitConfirmationCodeFragment f = getSubmitConfirmationCodeFragment(); f.setConfirmationCode(""); } }); break; case ERROR_SESSION_EXPIRED: showAlertDialog(getString(R.string.error_session_expired), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { SubmitConfirmationCodeFragment f = getSubmitConfirmationCodeFragment(); f.setConfirmationCode(""); } }); break; default: showAlertDialog(e.getLocalizedMessage(), null); } } else { showAlertDialog(e.getLocalizedMessage(), null); } } }); } private void completeLoadingDialog(String content) { if (mProgressDialog != null) { mProgressDialog.complete(content); } } private void showLoadingDialog(String message) { dismissLoadingDialog(); if (mProgressDialog == null) { mProgressDialog = new CompletableProgressDialog(this); mProgressDialog.setIndeterminate(true); mProgressDialog.setTitle(""); } mProgressDialog.setMessage(message); mProgressDialog.show(); } private void dismissLoadingDialog() { if (mProgressDialog != null) { mProgressDialog.dismiss(); mProgressDialog = null; } } private SubmitConfirmationCodeFragment getSubmitConfirmationCodeFragment() { return (SubmitConfirmationCodeFragment) getSupportFragmentManager().findFragmentByTag (SubmitConfirmationCodeFragment.TAG); } }
Increasing auto retrieval timeout to 2 mins Change-Id: I1b07ed83741832730b4277dd65934cdf11d658db
auth/src/main/java/com/firebase/ui/auth/ui/phone/PhoneVerificationActivity.java
Increasing auto retrieval timeout to 2 mins
<ide><path>uth/src/main/java/com/firebase/ui/auth/ui/phone/PhoneVerificationActivity.java <ide> public class PhoneVerificationActivity extends AppCompatBase { <ide> private static final String PHONE_VERIFICATION_LOG_TAG = "PhoneVerification"; <ide> static final long SHORT_DELAY_MILLIS = 750; <del> static final long AUTO_RETRIEVAL_TIMEOUT_MILLIS = 90000; <add> static final long AUTO_RETRIEVAL_TIMEOUT_MILLIS = 120000; <ide> static final String ERROR_INVALID_PHONE = "ERROR_INVALID_PHONE_NUMBER"; <ide> static final String ERROR_INVALID_VERIFICATION = "ERROR_INVALID_VERIFICATION_CODE"; <ide> static final String ERROR_TOO_MANY_REQUESTS = "ERROR_TOO_MANY_REQUESTS";
Java
apache-2.0
8745ffd42f84a1b16fdd5c3de02462d98787e853
0
i2p/i2p.itoopie,i2p/i2p.itoopie,i2p/i2p.itoopie,i2p/i2p.itoopie
package net.i2p.router.web; import net.i2p.router.Router; import net.i2p.router.transport.FIFOBandwidthRefiller; import net.i2p.router.transport.TransportManager; import net.i2p.router.transport.udp.UDPTransport; import net.i2p.router.web.ConfigServiceHandler; /** * Handler to deal with form submissions from the main config form and act * upon the values. * */ public class ConfigNetHandler extends FormHandler { private String _hostname; private boolean _reseedRequested; private boolean _saveRequested; private boolean _recheckReachabilityRequested; private boolean _timeSyncEnabled; private boolean _requireIntroductions; private boolean _hiddenMode; private boolean _dynamicKeys; private String _ntcpHostname; private String _ntcpPort; private String _tcpPort; private String _udpHost1; private String _udpHost2; private String _udpPort; private String _udpAutoIP; private String _ntcpAutoIP; private boolean _ntcpAutoPort; private boolean _upnp; private boolean _laptop; private String _inboundRate; private String _inboundBurstRate; private String _inboundBurst; private String _outboundRate; private String _outboundBurstRate; private String _outboundBurst; private String _reseedFrom; private boolean _enableLoadTesting; private String _sharePct; private static final boolean _ratesOnly = false; // always false - delete me private static final String PROP_HIDDEN = Router.PROP_HIDDEN_HIDDEN; // see Router for other choice @Override protected void processForm() { if (_saveRequested || ( (_action != null) && (_("Save changes").equals(_action)) )) { saveChanges(); } else if (_recheckReachabilityRequested) { recheckReachability(); } else { // noop } } public void setSave(String moo) { _saveRequested = true; } public void setEnabletimesync(String moo) { _timeSyncEnabled = true; } public void setRecheckReachability(String moo) { _recheckReachabilityRequested = true; } public void setRequireIntroductions(String moo) { _requireIntroductions = true; } public void setDynamicKeys(String moo) { _dynamicKeys = true; } public void setEnableloadtesting(String moo) { _enableLoadTesting = true; } public void setUdpAutoIP(String mode) { _udpAutoIP = mode; _hiddenMode = "hidden".equals(mode); } public void setNtcpAutoIP(String mode) { _ntcpAutoIP = mode; } public void setNtcpAutoPort(String mode) { _ntcpAutoPort = mode.equals("2"); } public void setUpnp(String moo) { _upnp = true; } public void setLaptop(String moo) { _laptop = true; } public void setHostname(String hostname) { _hostname = (hostname != null ? hostname.trim() : null); } public void setTcpPort(String port) { _tcpPort = (port != null ? port.trim() : null); } public void setNtcphost(String host) { _ntcpHostname = (host != null ? host.trim() : null); } public void setNtcpport(String port) { _ntcpPort = (port != null ? port.trim() : null); } public void setUdpHost1(String host) { _udpHost1 = (host != null ? host.trim() : null); } public void setUdpHost2(String host) { _udpHost2 = (host != null ? host.trim() : null); } public void setUdpPort(String port) { _udpPort = (port != null ? port.trim() : null); } public void setInboundrate(String rate) { _inboundRate = (rate != null ? rate.trim() : null); } public void setInboundburstrate(String rate) { _inboundBurstRate = (rate != null ? rate.trim() : null); } public void setInboundburstfactor(String factor) { _inboundBurst = (factor != null ? factor.trim() : null); } public void setOutboundrate(String rate) { _outboundRate = (rate != null ? rate.trim() : null); } public void setOutboundburstrate(String rate) { _outboundBurstRate = (rate != null ? rate.trim() : null); } public void setOutboundburstfactor(String factor) { _outboundBurst = (factor != null ? factor.trim() : null); } public void setSharePercentage(String pct) { _sharePct = (pct != null ? pct.trim() : null); } private void recheckReachability() { _context.commSystem().recheckReachability(); addFormNotice(_("Rechecking router reachability...")); } /** * The user made changes to the network config and wants to save them, so * lets go ahead and do so. * */ private void saveChanges() { boolean restartRequired = false; if (!_ratesOnly) { // IP Settings String oldUdp = _context.getProperty(UDPTransport.PROP_SOURCES, UDPTransport.DEFAULT_SOURCES); String oldUHost = _context.getProperty(UDPTransport.PROP_EXTERNAL_HOST, ""); if (_udpAutoIP != null) { String uhost = ""; if (_udpAutoIP.equals("fixed")) { if (_udpHost1 != null && _udpHost1.length() > 0) uhost = _udpHost1; else if (_udpHost2 != null && _udpHost2.length() > 0) uhost = _udpHost2; else _udpAutoIP = UDPTransport.DEFAULT_SOURCES; } _context.router().setConfigSetting(UDPTransport.PROP_SOURCES, _udpAutoIP); // Todo: Catch local IPs right here rather than complaining later _context.router().setConfigSetting(UDPTransport.PROP_EXTERNAL_HOST, uhost); if ((!oldUdp.equals(_udpAutoIP)) || (!oldUHost.equals(uhost))) { addFormNotice(_("Updating IP address")); restartRequired = true; } } // NTCP Settings // Normalize some things to make the following code a little easier... String oldNHost = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME, ""); String oldNPort = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_PORT, ""); String oldAutoHost = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_IP, "true"); String sAutoPort = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_PORT, "true"); boolean oldAutoPort = "true".equalsIgnoreCase(sAutoPort); if (_ntcpHostname == null) _ntcpHostname = ""; if (_ntcpPort == null) _ntcpPort = ""; if (_ntcpAutoIP == null) _ntcpAutoIP = "true"; if ((!oldAutoHost.equals(_ntcpAutoIP)) || ! oldNHost.equalsIgnoreCase(_ntcpHostname)) { if ("disabled".equals(_ntcpAutoIP)) { addFormNotice(_("Disabling TCP completely")); } else if ("false".equals(_ntcpAutoIP) && _ntcpHostname.length() > 0) { // Todo: Catch local IPs right here rather than complaining later _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME, _ntcpHostname); addFormNotice(_("Updating inbound TCP address to") + " " + _ntcpHostname); } else { _context.router().removeConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME); if ("false".equals(_ntcpAutoIP)) addFormNotice(_("Disabling inbound TCP")); else addFormNotice(_("Updating inbound TCP address to auto")); // true or always } _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_IP, _ntcpAutoIP); _context.router().setConfigSetting(TransportManager.PROP_ENABLE_NTCP, "" + !"disabled".equals(_ntcpAutoIP)); restartRequired = true; } if (oldAutoPort != _ntcpAutoPort || ! oldNPort.equals(_ntcpPort)) { if (_ntcpPort.length() > 0 && !_ntcpAutoPort) { _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_PORT, _ntcpPort); addFormNotice(_("Updating inbound TCP port to") + " " + _ntcpPort); } else { _context.router().removeConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_PORT); addFormNotice(_("Updating inbound TCP port to auto")); } _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_PORT, "" + _ntcpAutoPort); restartRequired = true; } // UDP Settings if ( (_udpPort != null) && (_udpPort.length() > 0) ) { String oldPort = "" + _context.getProperty(UDPTransport.PROP_INTERNAL_PORT, UDPTransport.DEFAULT_INTERNAL_PORT); if (!oldPort.equals(_udpPort)) { _context.router().setConfigSetting(UDPTransport.PROP_INTERNAL_PORT, _udpPort); _context.router().setConfigSetting(UDPTransport.PROP_EXTERNAL_PORT, _udpPort); addFormNotice(_("Updating UDP port from") + " " + oldPort + " " + _("to") + " " + _udpPort); restartRequired = true; } } } updateRates(); boolean switchRequired = false; if (!_ratesOnly) { // If hidden mode value changes, restart is required switchRequired = _hiddenMode != _context.router().isHidden(); if (switchRequired) { _context.router().setConfigSetting(PROP_HIDDEN, "" + _hiddenMode); if (_hiddenMode) addFormError(_("Gracefully restarting into Hidden Router Mode")); else addFormError(_("Gracefully restarting to exit Hidden Router Mode")); } _context.router().setConfigSetting(Router.PROP_DYNAMIC_KEYS, "" + _dynamicKeys); if (Boolean.valueOf(_context.getProperty(TransportManager.PROP_ENABLE_UPNP)).booleanValue() != _upnp) { // This is minor, don't set restartRequired if (_upnp) addFormNotice(_("Enabling UPnP, restart required to take effect")); else addFormNotice(_("Disabling UPnP, restart required to take effect")); } _context.router().setConfigSetting(TransportManager.PROP_ENABLE_UPNP, "" + _upnp); if (Boolean.valueOf(_context.getProperty(UDPTransport.PROP_LAPTOP_MODE)).booleanValue() != _laptop) { // This is minor, don't set restartRequired if (_laptop) addFormNotice(_("Enabling laptop mode")); else addFormNotice(_("Disabling laptop mode")); } _context.router().setConfigSetting(UDPTransport.PROP_LAPTOP_MODE, "" + _laptop); if (_requireIntroductions) { _context.router().setConfigSetting(UDPTransport.PROP_FORCE_INTRODUCERS, "true"); addFormNotice(_("Requiring SSU introducers")); } else { _context.router().removeConfigSetting(UDPTransport.PROP_FORCE_INTRODUCERS); } // Time sync enable, means NOT disabled // Hmm router sets this at startup, not required here //_context.router().setConfigSetting(Timestamper.PROP_DISABLED, "false"); // Hidden in the GUI //LoadTestManager.setEnableLoadTesting(_context, _enableLoadTesting); } boolean saved = _context.router().saveConfig(); if ( (_action != null) && (_("Save changes").equals(_action)) ) { if (saved) addFormNotice(_("Configuration saved successfully")); else addFormNotice(_("Error saving the configuration (applied but not saved) - please see the error logs")); } if (switchRequired) { hiddenSwitch(); } else if (restartRequired) { if (System.getProperty("wrapper.version") == null) { // Wow this dumps all conns immediately and really isn't nice addFormNotice("Performing a soft restart"); _context.router().restart(); addFormNotice("Soft restart complete"); // Most of the time we aren't changing addresses, just enabling or disabling // things, so let's try just a new routerInfo and see how that works. // Maybe we should restart if we change addresses though? // No, this doesn't work well, really need to call SSU Transport externalAddressReceived(), // but that's hard to get to, and doesn't handle port changes, etc. // So don't do this... //_context.router().rebuildRouterInfo(); //addFormNotice("Router Info rebuilt"); } else { // There's a few changes that don't really require restart (e.g. enabling inbound TCP) // But it would be hard to get right, so just do a restart. addFormError(_("Gracefully restarting I2P to change published router address")); _context.addShutdownTask(new ConfigServiceHandler.UpdateWrapperManagerTask(Router.EXIT_GRACEFUL_RESTART)); _context.router().shutdownGracefully(Router.EXIT_GRACEFUL_RESTART); } } } private void hiddenSwitch() { // Full restart required to generate new keys _context.addShutdownTask(new ConfigServiceHandler.UpdateWrapperManagerAndRekeyTask(Router.EXIT_GRACEFUL_RESTART)); _context.router().shutdownGracefully(Router.EXIT_GRACEFUL_RESTART); } private static final int DEF_BURST_PCT = 10; private static final int DEF_BURST_TIME = 20; private void updateRates() { boolean updated = false; if (_sharePct != null) { String old = _context.router().getConfigSetting(Router.PROP_BANDWIDTH_SHARE_PERCENTAGE); if ( (old == null) || (!old.equalsIgnoreCase(_sharePct)) ) { _context.router().setConfigSetting(Router.PROP_BANDWIDTH_SHARE_PERCENTAGE, _sharePct); addFormNotice(_("Updating bandwidth share percentage")); updated = true; } } // Since burst is now hidden in the gui, set burst to +10% for 20 seconds if ( (_inboundRate != null) && (_inboundRate.length() > 0) && !_inboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_INBOUND_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, _inboundRate); try { int rate = Integer.parseInt(_inboundRate) * (100 + DEF_BURST_PCT) / 100; int kb = DEF_BURST_TIME * rate; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, "" + rate); _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH_PEAK, "" + kb); } catch (NumberFormatException nfe) {} updated = true; } if ( (_outboundRate != null) && (_outboundRate.length() > 0) && !_outboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_OUTBOUND_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, _outboundRate); try { int rate = Integer.parseInt(_outboundRate) * (100 + DEF_BURST_PCT) / 100; int kb = DEF_BURST_TIME * rate; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, "" + rate); _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH_PEAK, "" + kb); } catch (NumberFormatException nfe) {} updated = true; } /******* These aren't in the GUI for now if ( (_inboundBurstRate != null) && (_inboundBurstRate.length() > 0) && !_inboundBurstRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_INBOUND_BURST_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, _inboundBurstRate); updated = true; } if ( (_outboundBurstRate != null) && (_outboundBurstRate.length() > 0) && !_outboundBurstRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_OUTBOUND_BURST_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, _outboundBurstRate); updated = true; } String inBurstRate = _context.router().getConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH); if (_inboundBurst != null) { int rateKBps = 0; int burstSeconds = 0; try { rateKBps = Integer.parseInt(inBurstRate); burstSeconds = Integer.parseInt(_inboundBurst); } catch (NumberFormatException nfe) { // ignore } if ( (rateKBps > 0) && (burstSeconds > 0) ) { int kb = rateKBps * burstSeconds; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH_PEAK, "" + kb); updated = true; } } String outBurstRate = _context.router().getConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH); if (_outboundBurst != null) { int rateKBps = 0; int burstSeconds = 0; try { rateKBps = Integer.parseInt(outBurstRate); burstSeconds = Integer.parseInt(_outboundBurst); } catch (NumberFormatException nfe) { // ignore } if ( (rateKBps > 0) && (burstSeconds > 0) ) { int kb = rateKBps * burstSeconds; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH_PEAK, "" + kb); updated = true; } } ***********/ if (updated && !_ratesOnly) { _context.bandwidthLimiter().reinitialize(); addFormNotice(_("Updated bandwidth limits")); } } }
apps/routerconsole/java/src/net/i2p/router/web/ConfigNetHandler.java
package net.i2p.router.web; import net.i2p.router.Router; import net.i2p.router.transport.FIFOBandwidthRefiller; import net.i2p.router.transport.TransportManager; import net.i2p.router.transport.udp.UDPTransport; import net.i2p.router.web.ConfigServiceHandler; /** * Handler to deal with form submissions from the main config form and act * upon the values. * */ public class ConfigNetHandler extends FormHandler { private String _hostname; private boolean _reseedRequested; private boolean _saveRequested; private boolean _recheckReachabilityRequested; private boolean _timeSyncEnabled; private boolean _requireIntroductions; private boolean _hiddenMode; private boolean _dynamicKeys; private String _ntcpHostname; private String _ntcpPort; private String _tcpPort; private String _udpHost1; private String _udpHost2; private String _udpPort; private String _udpAutoIP; private String _ntcpAutoIP; private boolean _ntcpAutoPort; private boolean _upnp; private boolean _laptop; private String _inboundRate; private String _inboundBurstRate; private String _inboundBurst; private String _outboundRate; private String _outboundBurstRate; private String _outboundBurst; private String _reseedFrom; private boolean _enableLoadTesting; private String _sharePct; private static final boolean _ratesOnly = false; // always false - delete me private static final String PROP_HIDDEN = Router.PROP_HIDDEN_HIDDEN; // see Router for other choice @Override protected void processForm() { if (_saveRequested || ( (_action != null) && (_("Save changes").equals(_action)) )) { saveChanges(); } else if (_recheckReachabilityRequested) { recheckReachability(); } else { // noop } } public void setSave(String moo) { _saveRequested = true; } public void setEnabletimesync(String moo) { _timeSyncEnabled = true; } public void setRecheckReachability(String moo) { _recheckReachabilityRequested = true; } public void setRequireIntroductions(String moo) { _requireIntroductions = true; } public void setDynamicKeys(String moo) { _dynamicKeys = true; } public void setEnableloadtesting(String moo) { _enableLoadTesting = true; } public void setUdpAutoIP(String mode) { _udpAutoIP = mode; _hiddenMode = "hidden".equals(mode); } public void setNtcpAutoIP(String mode) { _ntcpAutoIP = mode; } public void setNtcpAutoPort(String mode) { _ntcpAutoPort = mode.equals("2"); } public void setUpnp(String moo) { _upnp = true; } public void setLaptop(String moo) { _laptop = true; } public void setHostname(String hostname) { _hostname = (hostname != null ? hostname.trim() : null); } public void setTcpPort(String port) { _tcpPort = (port != null ? port.trim() : null); } public void setNtcphost(String host) { _ntcpHostname = (host != null ? host.trim() : null); } public void setNtcpport(String port) { _ntcpPort = (port != null ? port.trim() : null); } public void setUdpHost1(String host) { _udpHost1 = (host != null ? host.trim() : null); } public void setUdpHost2(String host) { _udpHost2 = (host != null ? host.trim() : null); } public void setUdpPort(String port) { _udpPort = (port != null ? port.trim() : null); } public void setInboundrate(String rate) { _inboundRate = (rate != null ? rate.trim() : null); } public void setInboundburstrate(String rate) { _inboundBurstRate = (rate != null ? rate.trim() : null); } public void setInboundburstfactor(String factor) { _inboundBurst = (factor != null ? factor.trim() : null); } public void setOutboundrate(String rate) { _outboundRate = (rate != null ? rate.trim() : null); } public void setOutboundburstrate(String rate) { _outboundBurstRate = (rate != null ? rate.trim() : null); } public void setOutboundburstfactor(String factor) { _outboundBurst = (factor != null ? factor.trim() : null); } public void setSharePercentage(String pct) { _sharePct = (pct != null ? pct.trim() : null); } private void recheckReachability() { _context.commSystem().recheckReachability(); addFormNotice(_("Rechecking router reachability...")); } /** * The user made changes to the network config and wants to save them, so * lets go ahead and do so. * */ private void saveChanges() { boolean restartRequired = false; if (!_ratesOnly) { // IP Settings String oldUdp = _context.getProperty(UDPTransport.PROP_SOURCES, UDPTransport.DEFAULT_SOURCES); String oldUHost = _context.getProperty(UDPTransport.PROP_EXTERNAL_HOST, ""); if (_udpAutoIP != null) { String uhost = ""; if (_udpAutoIP.equals("fixed")) { if (_udpHost1 != null && _udpHost1.length() > 0) uhost = _udpHost1; else if (_udpHost2 != null && _udpHost2.length() > 0) uhost = _udpHost2; else _udpAutoIP = UDPTransport.DEFAULT_SOURCES; } _context.router().setConfigSetting(UDPTransport.PROP_SOURCES, _udpAutoIP); // Todo: Catch local IPs right here rather than complaining later _context.router().setConfigSetting(UDPTransport.PROP_EXTERNAL_HOST, uhost); if ((!oldUdp.equals(_udpAutoIP)) || (!oldUHost.equals(uhost))) { addFormNotice(_("Updating IP address")); restartRequired = true; } } // NTCP Settings // Normalize some things to make the following code a little easier... String oldNHost = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME, ""); String oldNPort = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_PORT, ""); String oldAutoHost = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_IP, "true"); String sAutoPort = _context.getProperty(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_PORT, "true"); boolean oldAutoPort = "true".equalsIgnoreCase(sAutoPort); if (_ntcpHostname == null) _ntcpHostname = ""; if (_ntcpPort == null) _ntcpPort = ""; if (_ntcpAutoIP == null) _ntcpAutoIP = "true"; if ((!oldAutoHost.equals(_ntcpAutoIP)) || ! oldNHost.equalsIgnoreCase(_ntcpHostname)) { if ("disabled".equals(_ntcpAutoIP)) { addFormNotice(_("Disabling TCP completely")); } else if ("false".equals(_ntcpAutoIP) && _ntcpHostname.length() > 0) { // Todo: Catch local IPs right here rather than complaining later _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME, _ntcpHostname); addFormNotice(_("Updating inbound TCP address to") + " " + _ntcpHostname); } else { _context.router().removeConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_HOSTNAME); if ("false".equals(_ntcpAutoIP)) addFormNotice(_("Disabling inbound TCP")); else addFormNotice(_("Updating inbound TCP address to auto")); // true or always } _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_IP, _ntcpAutoIP); _context.router().setConfigSetting(TransportManager.PROP_ENABLE_NTCP, "" + !"disabled".equals(_ntcpAutoIP)); restartRequired = true; } if (oldAutoPort != _ntcpAutoPort || ! oldNPort.equals(_ntcpPort)) { if (_ntcpPort.length() > 0 && !_ntcpAutoPort) { _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_PORT, _ntcpPort); addFormNotice(_("Updating inbound TCP port to") + " " + _ntcpPort); } else { _context.router().removeConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_PORT); addFormNotice(_("Updating inbound TCP port to auto")); } _context.router().setConfigSetting(ConfigNetHelper.PROP_I2NP_NTCP_AUTO_PORT, "" + _ntcpAutoPort); restartRequired = true; } // UDP Settings if ( (_udpPort != null) && (_udpPort.length() > 0) ) { String oldPort = "" + _context.getProperty(UDPTransport.PROP_INTERNAL_PORT, UDPTransport.DEFAULT_INTERNAL_PORT); if (!oldPort.equals(_udpPort)) { _context.router().setConfigSetting(UDPTransport.PROP_INTERNAL_PORT, _udpPort); _context.router().setConfigSetting(UDPTransport.PROP_EXTERNAL_PORT, _udpPort); addFormNotice(_("Updating UDP port from") + " " + oldPort + " " + _("to") + " " + _udpPort); restartRequired = true; } } } updateRates(); boolean switchRequired = false; if (!_ratesOnly) { // If hidden mode value changes, restart is required switchRequired = _hiddenMode != _context.router().isHidden(); if (switchRequired) { _context.router().setConfigSetting(PROP_HIDDEN, "" + _hiddenMode); if (_hiddenMode) addFormError(_("Gracefully restarting into Hidden Router Mode")); else addFormError(_("Gracefully restarting to exit Hidden Router Mode")); } _context.router().setConfigSetting(Router.PROP_DYNAMIC_KEYS, "" + _dynamicKeys); if (Boolean.valueOf(_context.getProperty(TransportManager.PROP_ENABLE_UPNP)).booleanValue() != _upnp) { // This is minor, don't set restartRequired if (_upnp) addFormNotice(_("Enabling UPnP, restart required to take effect")); else addFormNotice(_("Disabling UPnP, restart required to take effect")); } _context.router().setConfigSetting(TransportManager.PROP_ENABLE_UPNP, "" + _upnp); if (Boolean.valueOf(_context.getProperty(UDPTransport.PROP_LAPTOP_MODE)).booleanValue() != _laptop) { // This is minor, don't set restartRequired if (_laptop) addFormNotice(_("Enabling laptop mode")); else addFormNotice(_("Disabling laptop mode")); } _context.router().setConfigSetting(UDPTransport.PROP_LAPTOP_MODE, "" + _laptop); if (_requireIntroductions) { _context.router().setConfigSetting(UDPTransport.PROP_FORCE_INTRODUCERS, "true"); addFormNotice(_("Requiring SSU introducers")); } else { _context.router().removeConfigSetting(UDPTransport.PROP_FORCE_INTRODUCERS); } // Time sync enable, means NOT disabled // Hmm router sets this at startup, not required here //_context.router().setConfigSetting(Timestamper.PROP_DISABLED, "false"); // Hidden in the GUI //LoadTestManager.setEnableLoadTesting(_context, _enableLoadTesting); } boolean saved = _context.router().saveConfig(); if ( (_action != null) && (_("Save changes").equals(_action)) ) { if (saved) addFormNotice(_("Configuration saved successfully")); else addFormNotice(_("Error saving the configuration (applied but not saved) - please see the error logs")); } if (switchRequired) { hiddenSwitch(); } else if (restartRequired) { if (System.getProperty("wrapper.version") == null) { // Wow this dumps all conns immediately and really isn't nice addFormNotice("Performing a soft restart"); _context.router().restart(); addFormNotice("Soft restart complete"); // Most of the time we aren't changing addresses, just enabling or disabling // things, so let's try just a new routerInfo and see how that works. // Maybe we should restart if we change addresses though? // No, this doesn't work well, really need to call SSU Transport externalAddressReceived(), // but that's hard to get to, and doesn't handle port changes, etc. // So don't do this... //_context.router().rebuildRouterInfo(); //addFormNotice("Router Info rebuilt"); } else { // There's a few changes that don't really require restart (e.g. enabling inbound TCP) // But it would be hard to get right, so just do a restart. addFormError(_("Gracefully restarting I2P to change published router address")); _context.addShutdownTask(new ConfigServiceHandler.UpdateWrapperManagerTask(Router.EXIT_GRACEFUL_RESTART)); _context.router().shutdownGracefully(Router.EXIT_GRACEFUL_RESTART); } } } private void hiddenSwitch() { // Full restart required to generate new keys _context.addShutdownTask(new ConfigServiceHandler.UpdateWrapperManagerAndRekeyTask(Router.EXIT_GRACEFUL_RESTART)); _context.router().shutdownGracefully(Router.EXIT_GRACEFUL_RESTART); } private void updateRates() { boolean updated = false; if (_sharePct != null) { String old = _context.router().getConfigSetting(Router.PROP_BANDWIDTH_SHARE_PERCENTAGE); if ( (old == null) || (!old.equalsIgnoreCase(_sharePct)) ) { _context.router().setConfigSetting(Router.PROP_BANDWIDTH_SHARE_PERCENTAGE, _sharePct); addFormNotice(_("Updating bandwidth share percentage")); updated = true; } } if ( (_inboundRate != null) && (_inboundRate.length() > 0) && !_inboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_INBOUND_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, _inboundRate); updated = true; } if ( (_outboundRate != null) && (_outboundRate.length() > 0) && !_outboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_OUTBOUND_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, _outboundRate); updated = true; } /******* These aren't in the GUI for now if ( (_inboundBurstRate != null) && (_inboundBurstRate.length() > 0) && !_inboundBurstRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_INBOUND_BURST_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, _inboundBurstRate); updated = true; } if ( (_outboundBurstRate != null) && (_outboundBurstRate.length() > 0) && !_outboundBurstRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_OUTBOUND_BURST_BANDWIDTH))) { _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, _outboundBurstRate); updated = true; } String inBurstRate = _context.router().getConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH); if (_inboundBurst != null) { int rateKBps = 0; int burstSeconds = 0; try { rateKBps = Integer.parseInt(inBurstRate); burstSeconds = Integer.parseInt(_inboundBurst); } catch (NumberFormatException nfe) { // ignore } if ( (rateKBps > 0) && (burstSeconds > 0) ) { int kb = rateKBps * burstSeconds; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH_PEAK, "" + kb); updated = true; } } String outBurstRate = _context.router().getConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH); if (_outboundBurst != null) { int rateKBps = 0; int burstSeconds = 0; try { rateKBps = Integer.parseInt(outBurstRate); burstSeconds = Integer.parseInt(_outboundBurst); } catch (NumberFormatException nfe) { // ignore } if ( (rateKBps > 0) && (burstSeconds > 0) ) { int kb = rateKBps * burstSeconds; _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH_PEAK, "" + kb); updated = true; } } ***********/ if (updated && !_ratesOnly) { _context.bandwidthLimiter().reinitialize(); addFormNotice(_("Updated bandwidth limits")); } } }
* config.jsp: Set burst to +10% for 20s by default, to fix bug where the burst stays high when limits are reduced.
apps/routerconsole/java/src/net/i2p/router/web/ConfigNetHandler.java
* config.jsp: Set burst to +10% for 20s by default, to fix bug where the burst stays high when limits are reduced.
<ide><path>pps/routerconsole/java/src/net/i2p/router/web/ConfigNetHandler.java <ide> _context.router().shutdownGracefully(Router.EXIT_GRACEFUL_RESTART); <ide> } <ide> <add> private static final int DEF_BURST_PCT = 10; <add> private static final int DEF_BURST_TIME = 20; <add> <ide> private void updateRates() { <ide> boolean updated = false; <ide> <ide> } <ide> } <ide> <add> // Since burst is now hidden in the gui, set burst to +10% for 20 seconds <ide> if ( (_inboundRate != null) && (_inboundRate.length() > 0) && <ide> !_inboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_INBOUND_BANDWIDTH))) { <ide> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH, _inboundRate); <add> try { <add> int rate = Integer.parseInt(_inboundRate) * (100 + DEF_BURST_PCT) / 100; <add> int kb = DEF_BURST_TIME * rate; <add> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BURST_BANDWIDTH, "" + rate); <add> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_INBOUND_BANDWIDTH_PEAK, "" + kb); <add> } catch (NumberFormatException nfe) {} <ide> updated = true; <ide> } <ide> if ( (_outboundRate != null) && (_outboundRate.length() > 0) && <ide> !_outboundRate.equals(_context.getProperty(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, "" + FIFOBandwidthRefiller.DEFAULT_OUTBOUND_BANDWIDTH))) { <ide> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH, _outboundRate); <add> try { <add> int rate = Integer.parseInt(_outboundRate) * (100 + DEF_BURST_PCT) / 100; <add> int kb = DEF_BURST_TIME * rate; <add> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BURST_BANDWIDTH, "" + rate); <add> _context.router().setConfigSetting(FIFOBandwidthRefiller.PROP_OUTBOUND_BANDWIDTH_PEAK, "" + kb); <add> } catch (NumberFormatException nfe) {} <ide> updated = true; <ide> } <ide>
Java
apache-2.0
bb763cdd4e62d06ec030d3e5b7b0257cd61e7976
0
apache/commons-compress,apache/commons-compress,apache/commons-compress
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.compressors.gzip; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.EOFException; import java.io.InputStream; import java.io.DataInputStream; import java.io.BufferedInputStream; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; import java.util.zip.CRC32; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.CharsetNames; /** * Input stream that decompresses .gz files. * This supports decompressing concatenated .gz files which is important * when decompressing standalone .gz files. * <p> * {@link java.util.zip.GZIPInputStream} doesn't decompress concatenated .gz * files: it stops after the first member and silently ignores the rest. * It doesn't leave the read position to point to the beginning of the next * member, which makes it difficult workaround the lack of concatenation * support. * <p> * Instead of using <code>GZIPInputStream</code>, this class has its own .gz * container format decoder. The actual decompression is done with * {@link java.util.zip.Inflater}. */ public class GzipCompressorInputStream extends CompressorInputStream { // Header flags // private static final int FTEXT = 0x01; // Uninteresting for us private static final int FHCRC = 0x02; private static final int FEXTRA = 0x04; private static final int FNAME = 0x08; private static final int FCOMMENT = 0x10; private static final int FRESERVED = 0xE0; // Compressed input stream, possibly wrapped in a BufferedInputStream private final InputStream in; // True if decompressing multi member streams. private final boolean decompressConcatenated; // Buffer to hold the input data private final byte[] buf = new byte[8192]; // Amount of data in buf. private int bufUsed; // Decompressor private Inflater inf = new Inflater(true); // CRC32 from uncompressed data private final CRC32 crc = new CRC32(); // True once everything has been decompressed private boolean endReached = false; // used in no-arg read method private final byte[] oneByte = new byte[1]; private final GzipParameters parameters = new GzipParameters(); /** * Constructs a new input stream that decompresses gzip-compressed data * from the specified input stream. * <p> * This is equivalent to * <code>GzipCompressorInputStream(inputStream, false)</code> and thus * will not decompress concatenated .gz files. * * @param inputStream the InputStream from which this object should * be created of * * @throws IOException if the stream could not be created */ public GzipCompressorInputStream(final InputStream inputStream) throws IOException { this(inputStream, false); } /** * Constructs a new input stream that decompresses gzip-compressed data * from the specified input stream. * <p> * If <code>decompressConcatenated</code> is {@code false}: * This decompressor might read more input than it will actually use. * If <code>inputStream</code> supports <code>mark</code> and * <code>reset</code>, then the input position will be adjusted * so that it is right after the last byte of the compressed stream. * If <code>mark</code> isn't supported, the input position will be * undefined. * * @param inputStream the InputStream from which this object should * be created of * @param decompressConcatenated * if true, decompress until the end of the input; * if false, stop after the first .gz member * * @throws IOException if the stream could not be created */ public GzipCompressorInputStream(final InputStream inputStream, final boolean decompressConcatenated) throws IOException { // Mark support is strictly needed for concatenated files only, // but it's simpler if it is always available. if (inputStream.markSupported()) { in = inputStream; } else { in = new BufferedInputStream(inputStream); } this.decompressConcatenated = decompressConcatenated; init(true); } /** * Provides the stream's meta data - may change with each stream * when decompressing concatenated streams. * @return the stream's meta data * @since 1.8 */ public GzipParameters getMetaData() { return parameters; } private boolean init(final boolean isFirstMember) throws IOException { assert isFirstMember || decompressConcatenated; // Check the magic bytes without a possibility of EOFException. final int magic0 = in.read(); final int magic1 = in.read(); // If end of input was reached after decompressing at least // one .gz member, we have reached the end of the file successfully. if (magic0 == -1 && !isFirstMember) { return false; } if (magic0 != 31 || magic1 != 139) { throw new IOException(isFirstMember ? "Input is not in the .gz format" : "Garbage after a valid .gz stream"); } // Parsing the rest of the header may throw EOFException. final DataInputStream inData = new DataInputStream(in); final int method = inData.readUnsignedByte(); if (method != Deflater.DEFLATED) { throw new IOException("Unsupported compression method " + method + " in the .gz header"); } final int flg = inData.readUnsignedByte(); if ((flg & FRESERVED) != 0) { throw new IOException( "Reserved flags are set in the .gz header"); } parameters.setModificationTime(readLittleEndianInt(inData) * 1000); switch (inData.readUnsignedByte()) { // extra flags case 2: parameters.setCompressionLevel(Deflater.BEST_COMPRESSION); break; case 4: parameters.setCompressionLevel(Deflater.BEST_SPEED); break; default: // ignored for now break; } parameters.setOperatingSystem(inData.readUnsignedByte()); // Extra field, ignored if ((flg & FEXTRA) != 0) { int xlen = inData.readUnsignedByte(); xlen |= inData.readUnsignedByte() << 8; // This isn't as efficient as calling in.skip would be, // but it's lazier to handle unexpected end of input this way. // Most files don't have an extra field anyway. while (xlen-- > 0) { inData.readUnsignedByte(); } } // Original file name if ((flg & FNAME) != 0) { parameters.setFilename(new String(readToNull(inData), CharsetNames.ISO_8859_1)); } // Comment if ((flg & FCOMMENT) != 0) { parameters.setComment(new String(readToNull(inData), CharsetNames.ISO_8859_1)); } // Header "CRC16" which is actually a truncated CRC32 (which isn't // as good as real CRC16). I don't know if any encoder implementation // sets this, so it's not worth trying to verify it. GNU gzip 1.4 // doesn't support this field, but zlib seems to be able to at least // skip over it. if ((flg & FHCRC) != 0) { inData.readShort(); } // Reset inf.reset(); crc.reset(); return true; } private static byte[] readToNull(final DataInputStream inData) throws IOException { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); int b = 0; while ((b = inData.readUnsignedByte()) != 0x00) { // NOPMD bos.write(b); } return bos.toByteArray(); } private static long readLittleEndianInt(final DataInputStream inData) throws IOException { return inData.readUnsignedByte() | (inData.readUnsignedByte() << 8) | (inData.readUnsignedByte() << 16) | (((long) inData.readUnsignedByte()) << 24); } @Override public int read() throws IOException { return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; } /** * {@inheritDoc} * * @since 1.1 */ @Override public int read(final byte[] b, int off, int len) throws IOException { if (endReached) { return -1; } int size = 0; while (len > 0) { if (inf.needsInput()) { // Remember the current position because we may need to // rewind after reading too much input. in.mark(buf.length); bufUsed = in.read(buf); if (bufUsed == -1) { throw new EOFException(); } inf.setInput(buf, 0, bufUsed); } int ret; try { ret = inf.inflate(b, off, len); } catch (final DataFormatException e) { throw new IOException("Gzip-compressed data is corrupt"); } crc.update(b, off, ret); off += ret; len -= ret; size += ret; count(ret); if (inf.finished()) { // We may have read too many bytes. Rewind the read // position to match the actual amount used. // // NOTE: The "if" is there just in case. Since we used // in.mark earlier, it should always skip enough. in.reset(); final int skipAmount = bufUsed - inf.getRemaining(); if (in.skip(skipAmount) != skipAmount) { throw new IOException(); } bufUsed = 0; final DataInputStream inData = new DataInputStream(in); // CRC32 final long crcStored = readLittleEndianInt(inData); if (crcStored != crc.getValue()) { throw new IOException("Gzip-compressed data is corrupt " + "(CRC32 error)"); } // Uncompressed size modulo 2^32 (ISIZE in the spec) final long isize = readLittleEndianInt(inData); if (isize != (inf.getBytesWritten() & 0xffffffffl)) { throw new IOException("Gzip-compressed data is corrupt" + "(uncompressed size mismatch)"); } // See if this is the end of the file. if (!decompressConcatenated || !init(false)) { inf.end(); inf = null; endReached = true; return size == 0 ? -1 : size; } } } return size; } /** * Checks if the signature matches what is expected for a .gz file. * * @param signature the bytes to check * @param length the number of bytes to check * @return true if this is a .gz stream, false otherwise * * @since 1.1 */ public static boolean matches(final byte[] signature, final int length) { if (length < 2) { return false; } if (signature[0] != 31) { return false; } if (signature[1] != -117) { return false; } return true; } /** * Closes the input stream (unless it is System.in). * * @since 1.2 */ @Override public void close() throws IOException { if (inf != null) { inf.end(); inf = null; } if (this.in != System.in) { this.in.close(); } } }
src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.compressors.gzip; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.EOFException; import java.io.InputStream; import java.io.DataInputStream; import java.io.BufferedInputStream; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; import java.util.zip.CRC32; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.utils.CharsetNames; /** * Input stream that decompresses .gz files. * This supports decompressing concatenated .gz files which is important * when decompressing standalone .gz files. * <p> * {@link java.util.zip.GZIPInputStream} doesn't decompress concatenated .gz * files: it stops after the first member and silently ignores the rest. * It doesn't leave the read position to point to the beginning of the next * member, which makes it difficult workaround the lack of concatenation * support. * <p> * Instead of using <code>GZIPInputStream</code>, this class has its own .gz * container format decoder. The actual decompression is done with * {@link java.util.zip.Inflater}. */ public class GzipCompressorInputStream extends CompressorInputStream { // Header flags // private static final int FTEXT = 0x01; // Uninteresting for us private static final int FHCRC = 0x02; private static final int FEXTRA = 0x04; private static final int FNAME = 0x08; private static final int FCOMMENT = 0x10; private static final int FRESERVED = 0xE0; // Compressed input stream, possibly wrapped in a BufferedInputStream private final InputStream in; // True if decompressing multimember streams. private final boolean decompressConcatenated; // Buffer to hold the input data private final byte[] buf = new byte[8192]; // Amount of data in buf. private int bufUsed = 0; // Decompressor private Inflater inf = new Inflater(true); // CRC32 from uncompressed data private final CRC32 crc = new CRC32(); // True once everything has been decompressed private boolean endReached = false; // used in no-arg read method private final byte[] oneByte = new byte[1]; private final GzipParameters parameters = new GzipParameters(); /** * Constructs a new input stream that decompresses gzip-compressed data * from the specified input stream. * <p> * This is equivalent to * <code>GzipCompressorInputStream(inputStream, false)</code> and thus * will not decompress concatenated .gz files. * * @param inputStream the InputStream from which this object should * be created of * * @throws IOException if the stream could not be created */ public GzipCompressorInputStream(final InputStream inputStream) throws IOException { this(inputStream, false); } /** * Constructs a new input stream that decompresses gzip-compressed data * from the specified input stream. * <p> * If <code>decompressConcatenated</code> is {@code false}: * This decompressor might read more input than it will actually use. * If <code>inputStream</code> supports <code>mark</code> and * <code>reset</code>, then the input position will be adjusted * so that it is right after the last byte of the compressed stream. * If <code>mark</code> isn't supported, the input position will be * undefined. * * @param inputStream the InputStream from which this object should * be created of * @param decompressConcatenated * if true, decompress until the end of the input; * if false, stop after the first .gz member * * @throws IOException if the stream could not be created */ public GzipCompressorInputStream(final InputStream inputStream, final boolean decompressConcatenated) throws IOException { // Mark support is strictly needed for concatenated files only, // but it's simpler if it is always available. if (inputStream.markSupported()) { in = inputStream; } else { in = new BufferedInputStream(inputStream); } this.decompressConcatenated = decompressConcatenated; init(true); } /** * Provides the stream's meta data - may change with each stream * when decompressing concatenated streams. * @return the stream's meta data * @since 1.8 */ public GzipParameters getMetaData() { return parameters; } private boolean init(final boolean isFirstMember) throws IOException { assert isFirstMember || decompressConcatenated; // Check the magic bytes without a possibility of EOFException. final int magic0 = in.read(); final int magic1 = in.read(); // If end of input was reached after decompressing at least // one .gz member, we have reached the end of the file successfully. if (magic0 == -1 && !isFirstMember) { return false; } if (magic0 != 31 || magic1 != 139) { throw new IOException(isFirstMember ? "Input is not in the .gz format" : "Garbage after a valid .gz stream"); } // Parsing the rest of the header may throw EOFException. final DataInputStream inData = new DataInputStream(in); final int method = inData.readUnsignedByte(); if (method != Deflater.DEFLATED) { throw new IOException("Unsupported compression method " + method + " in the .gz header"); } final int flg = inData.readUnsignedByte(); if ((flg & FRESERVED) != 0) { throw new IOException( "Reserved flags are set in the .gz header"); } parameters.setModificationTime(readLittleEndianInt(inData) * 1000); switch (inData.readUnsignedByte()) { // extra flags case 2: parameters.setCompressionLevel(Deflater.BEST_COMPRESSION); break; case 4: parameters.setCompressionLevel(Deflater.BEST_SPEED); break; default: // ignored for now break; } parameters.setOperatingSystem(inData.readUnsignedByte()); // Extra field, ignored if ((flg & FEXTRA) != 0) { int xlen = inData.readUnsignedByte(); xlen |= inData.readUnsignedByte() << 8; // This isn't as efficient as calling in.skip would be, // but it's lazier to handle unexpected end of input this way. // Most files don't have an extra field anyway. while (xlen-- > 0) { inData.readUnsignedByte(); } } // Original file name if ((flg & FNAME) != 0) { parameters.setFilename(new String(readToNull(inData), CharsetNames.ISO_8859_1)); } // Comment if ((flg & FCOMMENT) != 0) { parameters.setComment(new String(readToNull(inData), CharsetNames.ISO_8859_1)); } // Header "CRC16" which is actually a truncated CRC32 (which isn't // as good as real CRC16). I don't know if any encoder implementation // sets this, so it's not worth trying to verify it. GNU gzip 1.4 // doesn't support this field, but zlib seems to be able to at least // skip over it. if ((flg & FHCRC) != 0) { inData.readShort(); } // Reset inf.reset(); crc.reset(); return true; } private byte[] readToNull(final DataInputStream inData) throws IOException { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); int b = 0; while ((b = inData.readUnsignedByte()) != 0x00) { // NOPMD bos.write(b); } return bos.toByteArray(); } private long readLittleEndianInt(final DataInputStream inData) throws IOException { return inData.readUnsignedByte() | (inData.readUnsignedByte() << 8) | (inData.readUnsignedByte() << 16) | (((long) inData.readUnsignedByte()) << 24); } @Override public int read() throws IOException { return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; } /** * {@inheritDoc} * * @since 1.1 */ @Override public int read(final byte[] b, int off, int len) throws IOException { if (endReached) { return -1; } int size = 0; while (len > 0) { if (inf.needsInput()) { // Remember the current position because we may need to // rewind after reading too much input. in.mark(buf.length); bufUsed = in.read(buf); if (bufUsed == -1) { throw new EOFException(); } inf.setInput(buf, 0, bufUsed); } int ret; try { ret = inf.inflate(b, off, len); } catch (final DataFormatException e) { throw new IOException("Gzip-compressed data is corrupt"); } crc.update(b, off, ret); off += ret; len -= ret; size += ret; count(ret); if (inf.finished()) { // We may have read too many bytes. Rewind the read // position to match the actual amount used. // // NOTE: The "if" is there just in case. Since we used // in.mark earler, it should always skip enough. in.reset(); final int skipAmount = bufUsed - inf.getRemaining(); if (in.skip(skipAmount) != skipAmount) { throw new IOException(); } bufUsed = 0; final DataInputStream inData = new DataInputStream(in); // CRC32 final long crcStored = readLittleEndianInt(inData); if (crcStored != crc.getValue()) { throw new IOException("Gzip-compressed data is corrupt " + "(CRC32 error)"); } // Uncompressed size modulo 2^32 (ISIZE in the spec) final long isize = readLittleEndianInt(inData); if (isize != (inf.getBytesWritten() & 0xffffffffl)) { throw new IOException("Gzip-compressed data is corrupt" + "(uncompressed size mismatch)"); } // See if this is the end of the file. if (!decompressConcatenated || !init(false)) { inf.end(); inf = null; endReached = true; return size == 0 ? -1 : size; } } } return size; } /** * Checks if the signature matches what is expected for a .gz file. * * @param signature the bytes to check * @param length the number of bytes to check * @return true if this is a .gz stream, false otherwise * * @since 1.1 */ public static boolean matches(final byte[] signature, final int length) { if (length < 2) { return false; } if (signature[0] != 31) { return false; } if (signature[1] != -117) { return false; } return true; } /** * Closes the input stream (unless it is System.in). * * @since 1.2 */ @Override public void close() throws IOException { if (inf != null) { inf.end(); inf = null; } if (this.in != System.in) { this.in.close(); } } }
Some fixes for GZipCompressorInputStream
src/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java
Some fixes for GZipCompressorInputStream
<ide><path>rc/main/java/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java <ide> // Compressed input stream, possibly wrapped in a BufferedInputStream <ide> private final InputStream in; <ide> <del> // True if decompressing multimember streams. <add> // True if decompressing multi member streams. <ide> private final boolean decompressConcatenated; <ide> <ide> // Buffer to hold the input data <ide> private final byte[] buf = new byte[8192]; <ide> <ide> // Amount of data in buf. <del> private int bufUsed = 0; <add> private int bufUsed; <ide> <ide> // Decompressor <ide> private Inflater inf = new Inflater(true); <ide> return true; <ide> } <ide> <del> private byte[] readToNull(final DataInputStream inData) throws IOException { <add> private static byte[] readToNull(final DataInputStream inData) throws IOException { <ide> final ByteArrayOutputStream bos = new ByteArrayOutputStream(); <ide> int b = 0; <ide> while ((b = inData.readUnsignedByte()) != 0x00) { // NOPMD <ide> return bos.toByteArray(); <ide> } <ide> <del> private long readLittleEndianInt(final DataInputStream inData) throws IOException { <add> private static long readLittleEndianInt(final DataInputStream inData) throws IOException { <ide> return inData.readUnsignedByte() <ide> | (inData.readUnsignedByte() << 8) <ide> | (inData.readUnsignedByte() << 16) <ide> // position to match the actual amount used. <ide> // <ide> // NOTE: The "if" is there just in case. Since we used <del> // in.mark earler, it should always skip enough. <add> // in.mark earlier, it should always skip enough. <ide> in.reset(); <ide> <ide> final int skipAmount = bufUsed - inf.getRemaining();
Java
bsd-2-clause
9095d7dfdac35f13638f064f095ce966e5723485
0
bogovicj/bigdataviewer-core,bigdataviewer/bigdataviewer-core,bigdataviewer/bigdataviewer-core,bogovicj/bigdataviewer-core
package bdv.export; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import mpicbg.spim.data.XmlHelpers; import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; import mpicbg.spim.data.generic.sequence.BasicImgLoader; import mpicbg.spim.data.generic.sequence.BasicViewSetup; import mpicbg.spim.data.sequence.TimePoint; import mpicbg.spim.data.sequence.TimePoints; import mpicbg.spim.data.sequence.ViewId; import net.imglib2.Cursor; import net.imglib2.Dimensions; import net.imglib2.FinalInterval; import net.imglib2.RandomAccess; import net.imglib2.RandomAccessibleInterval; import net.imglib2.img.array.ArrayImg; import net.imglib2.img.array.ArrayImgs; import net.imglib2.img.basictypeaccess.array.ShortArray; import net.imglib2.img.cell.CellImg; import net.imglib2.iterator.LocalizingIntervalIterator; import net.imglib2.type.numeric.RealType; import net.imglib2.type.numeric.integer.UnsignedShortType; import net.imglib2.view.Views; import bdv.img.hdf5.Hdf5ImageLoader; import bdv.img.hdf5.Partition; import bdv.img.hdf5.Util; import bdv.spimdata.SequenceDescriptionMinimal; import ch.systemsx.cisd.hdf5.HDF5Factory; import ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures; import ch.systemsx.cisd.hdf5.IHDF5Reader; import ch.systemsx.cisd.hdf5.IHDF5Writer; /** * Create a hdf5 files containing image data from all views and all timepoints * in a chunked, mipmaped representation. * * <p> * Every image is stored in multiple resolutions. The resolutions are described * as int[] arrays defining multiple of original pixel size in every dimension. * For example {1,1,1} is the original resolution, {4,4,2} is downsampled by * factor 4 in X and Y and factor 2 in Z. Each resolution of the image is stored * as a chunked three-dimensional array (each chunk corresponds to one cell of a * {@link CellImg} when the data is loaded). The chunk sizes are defined by the * subdivisions parameter which is an array of int[], one per resolution. Each * int[] array describes the X,Y,Z chunk size for one resolution. For instance * {32,32,8} says that the (downsampled) image is divided into 32x32x8 pixel * blocks. * * <p> * For every mipmap level we have a (3D) int[] resolution array, so the full * mipmap pyramid is specified by a nested int[][] array. Likewise, we have a * (3D) int[] subdivions array for every mipmap level, so the full chunking of * the full pyramid is specfied by a nested int[][] array. * * <p> * A data-set can be stored in a single hdf5 file or split across several hdf5 * "partitions" with one master hdf5 linking into the partitions. * * @author Tobias Pietzsch &lt;[email protected]&gt; */ public class WriteSequenceToHdf5 { /** * Create a hdf5 file containing image data from all views and all * timepoints in a chunked, mipmaped representation. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param hdf5File * hdf5 file to which the image data is written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5File( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final boolean deflate, final File hdf5File, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final HashMap< Integer, Integer > timepointIdSequenceToPartition = new HashMap< Integer, Integer >(); for ( final TimePoint timepoint : seq.getTimePoints().getTimePointsOrdered() ) timepointIdSequenceToPartition.put( timepoint.getId(), timepoint.getId() ); final HashMap< Integer, Integer > setupIdSequenceToPartition = new HashMap< Integer, Integer >(); for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) setupIdSequenceToPartition.put( setup.getId(), setup.getId() ); final Partition partition = new Partition( hdf5File.getPath(), timepointIdSequenceToPartition, setupIdSequenceToPartition ); writeHdf5PartitionFile( seq, perSetupMipmapInfo, deflate, partition, loopbackHeuristic, afterEachPlane, progressWriter ); } /** * Create a hdf5 file containing image data from all views and all * timepoints in a chunked, mipmaped representation. This is the same as * {@link WriteSequenceToHdf5#writeHdf5File(AbstractSequenceDescription, Map, boolean, File, LoopbackHeuristic, AfterEachPlane, ProgressWriter)} * except that only one set of supsampling factors and and subdivision * blocksizes is given, which is used for all {@link BasicViewSetup views}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param resolutions * this nested arrays contains per mipmap level, the subsampling * factors. * @param subdivisions * this nested arrays contains per mipmap level, the subdivision * block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param hdf5File * hdf5 file to which the image data is written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5File( final AbstractSequenceDescription< ?, ?, ? > seq, final int[][] resolutions, final int[][] subdivisions, final boolean deflate, final File hdf5File, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final HashMap< Integer, ExportMipmapInfo > perSetupMipmapInfo = new HashMap< Integer, ExportMipmapInfo >(); final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( resolutions, subdivisions ); for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) perSetupMipmapInfo.put( setup.getId(), mipmapInfo ); writeHdf5File( seq, perSetupMipmapInfo, deflate, hdf5File, loopbackHeuristic, afterEachPlane, progressWriter ); } /** * Create a hdf5 master file linking to image data from all views and all * timepoints. This is the same as * {@link #writeHdf5PartitionLinkFile(AbstractSequenceDescription, Map, ArrayList, File)}, * except that the information about the partition files as well as the * path of the master file to be written is obtained from the * {@link BasicImgLoader} of the sequence, which must be a * {@link Hdf5ImageLoader}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. */ public static void writeHdf5PartitionLinkFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo ) { if ( !( seq.getImgLoader() instanceof Hdf5ImageLoader ) ) throw new IllegalArgumentException( "sequence has " + seq.getImgLoader().getClass() + " imgloader. Hdf5ImageLoader required." ); final Hdf5ImageLoader loader = ( Hdf5ImageLoader ) seq.getImgLoader(); writeHdf5PartitionLinkFile( seq, perSetupMipmapInfo, loader.getPartitions(), loader.getHdf5File() ); } /** * Create a hdf5 master file linking to image data from all views and all * timepoints. Which hdf5 files contain which part of the image data is * specified in the {@code portitions} parameter. * * Note that this method only writes the master file containing links. The * individual partitions need to be written with * {@link #writeHdf5PartitionFile(AbstractSequenceDescription, Map, boolean, Partition, LoopbackHeuristic, AfterEachPlane, ProgressWriter)}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param partitions * which parts of the dataset are stored in which files. * @param hdf5File * hdf5 master file to which the image data from the partition * files is linked. */ public static void writeHdf5PartitionLinkFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final ArrayList< Partition > partitions, final File hdf5File ) { // open HDF5 output file if ( hdf5File.exists() ) hdf5File.delete(); final IHDF5Writer hdf5Writer = HDF5Factory.open( hdf5File ); // write Mipmap descriptions for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) { final int setupId = setup.getId(); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupId ); hdf5Writer.writeDoubleMatrix( Util.getResolutionsPath( setupId ), mipmapInfo.getResolutions() ); hdf5Writer.writeIntMatrix( Util.getSubdivisionsPath( setupId ), mipmapInfo.getSubdivisions() ); } // link Cells for all views in the partition final File basePath = hdf5File.getParentFile(); for ( final Partition partition : partitions ) { final Map< Integer, Integer > timepointIdSequenceToPartition = partition.getTimepointIdSequenceToPartition(); final Map< Integer, Integer > setupIdSequenceToPartition = partition.getSetupIdSequenceToPartition(); for ( final Entry< Integer, Integer > tEntry : timepointIdSequenceToPartition.entrySet() ) { final int tSequence = tEntry.getKey(); final int tPartition = tEntry.getValue(); for ( final Entry< Integer, Integer > sEntry : setupIdSequenceToPartition.entrySet() ) { final int sSequence = sEntry.getKey(); final int sPartition = sEntry.getValue(); final ViewId idSequence = new ViewId( tSequence, sSequence ); final ViewId idPartition = new ViewId( tPartition, sPartition ); final int numLevels = perSetupMipmapInfo.get( sSequence ).getNumLevels(); for ( int level = 0; level < numLevels; ++level ) { final String relativePath = XmlHelpers.getRelativePath( new File( partition.getPath() ), basePath ).getPath(); hdf5Writer.object().createOrUpdateExternalLink( relativePath, Util.getCellsPath( idPartition, level ), Util.getCellsPath( idSequence, level ) ); } } } } hdf5Writer.close(); } /** * Create a hdf5 partition file containing image data for a subset of views * and timepoints in a chunked, mipmaped representation. * * Please note that the description of the <em>full</em> dataset must be * given in the <code>seq</code>, <code>perSetupResolutions</code>, and * <code>perSetupSubdivisions</code> parameters. Then only the part * described by <code>partition</code> will be written. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param partition * which part of the dataset to write, and to which file. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5PartitionFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final boolean deflate, final Partition partition, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, ProgressWriter progressWriter ) { final int blockWriterQueueLength = 100; final int numThreads = Math.max( 1, Runtime.getRuntime().availableProcessors() - 2 ); if ( progressWriter == null ) progressWriter = new ProgressWriterConsole(); progressWriter.setProgress( 0 ); // get sequence timepointIds for the timepoints contained in this partition final ArrayList< Integer > timepointIdsSequence = new ArrayList< Integer >( partition.getTimepointIdSequenceToPartition().keySet() ); Collections.sort( timepointIdsSequence ); final int numTimepoints = timepointIdsSequence.size(); final ArrayList< Integer > setupIdsSequence = new ArrayList< Integer >( partition.getSetupIdSequenceToPartition().keySet() ); Collections.sort( setupIdsSequence ); // get the BasicImgLoader that supplies the images if ( !( seq.getImgLoader().getImageType() instanceof UnsignedShortType ) ) throw new IllegalArgumentException( "Expected BasicImgLoader<UnsignedShortTyp> but your dataset has BasicImgLoader<" + seq.getImgLoader().getImageType().getClass().getSimpleName() + ">.\nCurrently writing to HDF5 is only supported for UnsignedShortType." ); @SuppressWarnings( "unchecked" ) final BasicImgLoader< UnsignedShortType > imgLoader = ( BasicImgLoader< UnsignedShortType > ) seq.getImgLoader(); // open HDF5 partition output file final File hdf5File = new File( partition.getPath() ); if ( hdf5File.exists() ) hdf5File.delete(); final Hdf5BlockWriterThread writerQueue = new Hdf5BlockWriterThread( hdf5File, blockWriterQueueLength ); writerQueue.start(); // start CellCreatorThreads final CellCreatorThread[] cellCreatorThreads = createAndStartCellCreatorThreads( numThreads ); // calculate number of tasks for progressWriter int numTasks = 1; // first task is for writing mipmap descriptions etc... for ( final int timepointIdSequence : timepointIdsSequence ) for ( final int setupIdSequence : setupIdsSequence ) if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() ) numTasks++; int numCompletedTasks = 0; // write Mipmap descriptions for ( final Entry< Integer, Integer > entry : partition.getSetupIdSequenceToPartition().entrySet() ) { final int setupIdSequence = entry.getKey(); final int setupIdPartition = entry.getValue(); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupIdSequence ); writerQueue.writeMipmapDescription( setupIdPartition, mipmapInfo ); } progressWriter.setProgress( ( double ) ++numCompletedTasks / numTasks ); // write image data for all views to the HDF5 file int timepointIndex = 0; for ( final int timepointIdSequence : timepointIdsSequence ) { final int timepointIdPartition = partition.getTimepointIdSequenceToPartition().get( timepointIdSequence ); progressWriter.out().printf( "proccessing timepoint %d / %d\n", ++timepointIndex, numTimepoints ); // assemble the viewsetups that are present in this timepoint final ArrayList< Integer > setupsTimePoint = new ArrayList< Integer >(); for ( final int setupIdSequence : setupIdsSequence ) if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() ) setupsTimePoint.add( setupIdSequence ); final int numSetups = setupsTimePoint.size(); int setupIndex = 0; for ( final int setupIdSequence : setupsTimePoint ) { final int setupIdPartition = partition.getSetupIdSequenceToPartition().get( setupIdSequence ); progressWriter.out().printf( "proccessing setup %d / %d\n", ++setupIndex, numSetups ); final ViewId viewIdSequence = new ViewId( timepointIdSequence, setupIdSequence ); final RandomAccessibleInterval< UnsignedShortType > img = imgLoader.getImage( viewIdSequence ); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupIdSequence ); final double startCompletionRatio = ( double ) numCompletedTasks++ / numTasks; final double endCompletionRatio = ( double ) numCompletedTasks / numTasks; final ProgressWriter subProgressWriter = new SubTaskProgressWriter( progressWriter, startCompletionRatio, endCompletionRatio ); writeViewToHdf5PartitionFile( img, timepointIdPartition, setupIdPartition, mipmapInfo, false, deflate, writerQueue, cellCreatorThreads, loopbackHeuristic, afterEachPlane, subProgressWriter ); } } // shutdown and close file stopCellCreatorThreads( cellCreatorThreads ); writerQueue.close(); progressWriter.setProgress( 1.0 ); } /** * Write a single view to a hdf5 partition file, in a chunked, mipmaped * representation. Note that the specified view must not already exist in * the partition file! * * @param img * the view to be written. * @param partition * describes which part of the full sequence is contained in this * partition, and to which file this partition is written. * @param timepointIdPartition * the timepoint id wrt the partition of the view to be written. * The information in {@code partition} relates this to timepoint * id in the full sequence. * @param setupIdPartition * the setup id wrt the partition of the view to be written. The * information in {@code partition} relates this to setup id in * the full sequence. * @param mipmapInfo * contains for each mipmap level of the setup, the subsampling * factors and subdivision block sizes. * @param writeMipmapInfo * whether to write mipmap description for the setup. must be * done (at least) once for each setup in the partition. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. may * be null. */ public static void writeViewToHdf5PartitionFile( final RandomAccessibleInterval< UnsignedShortType > img, final Partition partition, final int timepointIdPartition, final int setupIdPartition, final ExportMipmapInfo mipmapInfo, final boolean writeMipmapInfo, final boolean deflate, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final int blockWriterQueueLength = 100; final int numThreads = Math.max( 1, Runtime.getRuntime().availableProcessors() - 2 ); // create and start Hdf5BlockWriterThread final Hdf5BlockWriterThread writerQueue = new Hdf5BlockWriterThread( partition.getPath(), blockWriterQueueLength ); writerQueue.start(); final CellCreatorThread[] cellCreatorThreads = createAndStartCellCreatorThreads( numThreads ); // write the image writeViewToHdf5PartitionFile( img, timepointIdPartition, setupIdPartition, mipmapInfo, writeMipmapInfo, deflate, writerQueue, cellCreatorThreads, loopbackHeuristic, afterEachPlane, progressWriter ); stopCellCreatorThreads( cellCreatorThreads ); writerQueue.close(); } static class LoopBackImageLoader extends Hdf5ImageLoader { private LoopBackImageLoader( final IHDF5Reader existingHdf5Reader, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription ) { super( null, existingHdf5Reader, null, sequenceDescription, false ); } static LoopBackImageLoader create( final IHDF5Reader existingHdf5Reader, final int timepointIdPartition, final int setupIdPartition, final Dimensions imageDimensions ) { final HashMap< Integer, TimePoint > timepoints = new HashMap< Integer, TimePoint >(); timepoints.put( timepointIdPartition, new TimePoint( timepointIdPartition ) ); final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >(); setups.put( setupIdPartition, new BasicViewSetup( setupIdPartition, null, imageDimensions, null ) ); final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups, null, null ); return new LoopBackImageLoader( existingHdf5Reader, seq ); } } /** * Write a single view to a hdf5 partition file, in a chunked, mipmaped * representation. Note that the specified view must not already exist in * the partition file! * * @param img * the view to be written. * @param timepointIdPartition * the timepoint id wrt the partition of the view to be written. * The information in {@code partition} relates this to timepoint * id in the full sequence. * @param setupIdPartition * the setup id wrt the partition of the view to be written. The * information in {@code partition} relates this to setup id in * the full sequence. * @param mipmapInfo * contains for each mipmap level of the setup, the subsampling * factors and subdivision block sizes. * @param writeMipmapInfo * whether to write mipmap description for the setup. must be * done (at least) once for each setup in the partition. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param writerQueue * block writing tasks are enqueued here. * @param cellCreatorThreads * threads used for creating (possibly down-sampled) blocks of * the view to be written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. may * be null. */ public static void writeViewToHdf5PartitionFile( final RandomAccessibleInterval< UnsignedShortType > img, final int timepointIdPartition, final int setupIdPartition, final ExportMipmapInfo mipmapInfo, final boolean writeMipmapInfo, final boolean deflate, final Hdf5BlockWriterThread writerQueue, final CellCreatorThread[] cellCreatorThreads, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, ProgressWriter progressWriter ) { final HDF5IntStorageFeatures storage = deflate ? HDF5IntStorageFeatures.INT_AUTO_SCALING_DEFLATE : HDF5IntStorageFeatures.INT_AUTO_SCALING; if ( progressWriter == null ) progressWriter = new ProgressWriterConsole(); // for progressWriter final int numTasks = mipmapInfo.getNumLevels(); int numCompletedTasks = 0; progressWriter.setProgress( ( double ) numCompletedTasks++ / numTasks ); // write Mipmap descriptions if ( writeMipmapInfo ) writerQueue.writeMipmapDescription( setupIdPartition, mipmapInfo ); // create loopback image-loader to read already written chunks from the // h5 for generating low-resolution versions. final LoopBackImageLoader loopback = ( loopbackHeuristic == null ) ? null : LoopBackImageLoader.create( writerQueue.getIHDF5Writer(), timepointIdPartition, setupIdPartition, img ); // write image data for all views to the HDF5 file final int n = 3; final long[] dimensions = new long[ n ]; final int[][] resolutions = mipmapInfo.getExportResolutions(); final int[][] subdivisions = mipmapInfo.getSubdivisions(); final int numLevels = mipmapInfo.getNumLevels(); for ( int level = 0; level < numLevels; ++level ) { progressWriter.out().println( "writing level " + level ); final long t0 = System.currentTimeMillis(); final RandomAccessibleInterval< UnsignedShortType > sourceImg; final int[] factor; final boolean useLoopBack; if ( loopbackHeuristic == null ) { sourceImg = img; factor = resolutions[ level ]; useLoopBack = false; } else { // Are downsampling factors a multiple of a level that we have // already written? int[] factorsToPreviousLevel = null; int previousLevel = -1; A: for ( int l = level - 1; l >= 0; --l ) { final int[] f = new int[ n ]; for ( int d = 0; d < n; ++d ) { f[ d ] = resolutions[ level ][ d ] / resolutions[ l ][ d ]; if ( f[ d ] * resolutions[ l ][ d ] != resolutions[ level ][ d ] ) continue A; } factorsToPreviousLevel = f; previousLevel = l; break; } // Now, if previousLevel >= 0 we can use loopback ImgLoader on // previousLevel and downsample with factorsToPreviousLevel. // // whether it makes sense to actually do so is determined by a // heuristic based on the following considerations: // * if downsampling a lot over original image, the cost of // reading images back from hdf5 outweighs the cost of // accessing and averaging original pixels. // * original image may already be cached (for example when // exporting an ImageJ virtual stack. To compute blocks // that downsample a lot in Z, many planes of the virtual // stack need to be accessed leading to cache thrashing if // individual planes are very large. useLoopBack = loopbackHeuristic.decide( img, resolutions[ level ], previousLevel, factorsToPreviousLevel, subdivisions[ level ] ); if ( useLoopBack ) { sourceImg = loopback.getImage( new ViewId( timepointIdPartition, setupIdPartition ), previousLevel ); factor = factorsToPreviousLevel; } else { sourceImg = img; factor = resolutions[ level ]; } } sourceImg.dimensions( dimensions ); final boolean fullResolution = ( factor[ 0 ] == 1 && factor[ 1 ] == 1 && factor[ 2 ] == 1 ); long size = 1; if ( !fullResolution ) { for ( int d = 0; d < n; ++d ) { dimensions[ d ] = Math.max( dimensions[ d ] / factor[ d ], 1 ); size *= factor[ d ]; } } final double scale = 1.0 / size; final long[] minRequiredInput = new long[ n ]; final long[] maxRequiredInput = new long[ n ]; sourceImg.min( minRequiredInput ); for ( int d = 0; d < n; ++d ) maxRequiredInput[ d ] = minRequiredInput[ d ] + dimensions[ d ] * factor[ d ] - 1; final RandomAccessibleInterval< UnsignedShortType > extendedImg = Views.interval( Views.extendBorder( sourceImg ), new FinalInterval( minRequiredInput, maxRequiredInput ) ); final int[] cellDimensions = subdivisions[ level ]; final ViewId viewIdPartition = new ViewId( timepointIdPartition, setupIdPartition ); final String path = Util.getCellsPath( viewIdPartition, level ); writerQueue.createAndOpenDataset( path, dimensions.clone(), cellDimensions.clone(), storage ); final long[] numCells = new long[ n ]; final int[] borderSize = new int[ n ]; final long[] minCell = new long[ n ]; final long[] maxCell = new long[ n ]; for ( int d = 0; d < n; ++d ) { numCells[ d ] = ( dimensions[ d ] - 1 ) / cellDimensions[ d ] + 1; maxCell[ d ] = numCells[ d ] - 1; borderSize[ d ] = ( int ) ( dimensions[ d ] - ( numCells[ d ] - 1 ) * cellDimensions[ d ] ); } // generate one "plane" of cells after the other to avoid cache thrashing when exporting from virtual stacks for ( int lastDimCell = 0; lastDimCell < numCells[ n - 1 ]; ++lastDimCell ) { minCell[ n - 1 ] = lastDimCell; maxCell[ n - 1 ] = lastDimCell; final LocalizingIntervalIterator i = new LocalizingIntervalIterator( minCell, maxCell ); final int numThreads = cellCreatorThreads.length; final CountDownLatch doneSignal = new CountDownLatch( numThreads ); for ( int threadNum = 0; threadNum < numThreads; ++threadNum ) { cellCreatorThreads[ threadNum ].run( new Runnable() { @Override public void run() { final double[] accumulator = fullResolution ? null : new double[ cellDimensions[ 0 ] * cellDimensions[ 1 ] * cellDimensions[ 2 ] ]; final long[] currentCellMin = new long[ n ]; final long[] currentCellMax = new long[ n ]; final long[] currentCellDim = new long[ n ]; final long[] currentCellPos = new long[ n ]; final long[] blockMin = new long[ n ]; final RandomAccess< UnsignedShortType > in = extendedImg.randomAccess(); while ( true ) { synchronized ( i ) { if ( !i.hasNext() ) break; i.fwd(); i.localize( currentCellPos ); } for ( int d = 0; d < n; ++d ) { currentCellMin[ d ] = currentCellPos[ d ] * cellDimensions[ d ]; blockMin[ d ] = currentCellMin[ d ] * factor[ d ]; final boolean isBorderCellInThisDim = ( currentCellPos[ d ] + 1 == numCells[ d ] ); currentCellDim[ d ] = isBorderCellInThisDim ? borderSize[ d ] : cellDimensions[ d ]; currentCellMax[ d ] = currentCellMin[ d ] + currentCellDim[ d ] - 1; } final ArrayImg< UnsignedShortType, ? > cell = ArrayImgs.unsignedShorts( currentCellDim ); final RandomAccess< UnsignedShortType > out = cell.randomAccess(); if ( fullResolution ) copyBlock( out, currentCellDim, in, blockMin ); else downsampleBlock( cell.cursor(), accumulator, currentCellDim, in, blockMin, factor, scale ); writerQueue.writeBlockWithOffset( ( ( ShortArray ) cell.update( null ) ).getCurrentStorageArray(), currentCellDim.clone(), currentCellMin.clone() ); } doneSignal.countDown(); } } ); } try { doneSignal.await(); } catch ( final InterruptedException e ) { e.printStackTrace(); } if ( afterEachPlane != null ) afterEachPlane.afterEachPlane( useLoopBack ); } writerQueue.closeDataset(); progressWriter.setProgress( ( double ) numCompletedTasks++ / numTasks ); } if ( loopback != null ) loopback.close(); } /** * A heuristic to decide for a given resolution level whether the source * pixels should be taken from the original image or read from a previously * written resolution level in the hdf5 file. */ public interface LoopbackHeuristic { public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize ); } public interface AfterEachPlane { public void afterEachPlane( final boolean usedLoopBack ); } /** * Simple heuristic: use loopback image loader if saving 8 times or more on * number of pixel access with respect to the original image. * * @author Tobias Pietzsch &lt;[email protected]&gt; */ public static class DefaultLoopbackHeuristic implements LoopbackHeuristic { @Override public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize ) { if ( previousLevel < 0 ) return false; if ( numElements( factorsToOriginalImg ) / numElements( factorsToPreviousLevel ) >= 8 ) return true; return false; } } public static int numElements( final int[] size ) { int numElements = size[ 0 ]; for ( int d = 1; d < size.length; ++d ) numElements *= size[ d ]; return numElements; } public static CellCreatorThread[] createAndStartCellCreatorThreads( final int numThreads ) { final CellCreatorThread[] cellCreatorThreads = new CellCreatorThread[ numThreads ]; for ( int threadNum = 0; threadNum < numThreads; ++threadNum ) { cellCreatorThreads[ threadNum ] = new CellCreatorThread(); cellCreatorThreads[ threadNum ].setName( "CellCreatorThread " + threadNum ); cellCreatorThreads[ threadNum ].start(); } return cellCreatorThreads; } public static void stopCellCreatorThreads( final CellCreatorThread[] cellCreatorThreads ) { for ( final CellCreatorThread thread : cellCreatorThreads ) thread.interrupt(); } public static class CellCreatorThread extends Thread { private Runnable currentTask = null; public synchronized void run( final Runnable task ) { currentTask = task; notify(); } @Override public void run() { while ( !isInterrupted() ) { synchronized ( this ) { try { if ( currentTask == null ) wait(); else { currentTask.run(); currentTask = null; } } catch ( final InterruptedException e ) { break; } } } } } private static < T extends RealType< T > > void copyBlock( final RandomAccess< T > out, final long[] outDim, final RandomAccess< T > in, final long[] blockMin ) { in.setPosition( blockMin ); for ( out.setPosition( 0, 2 ); out.getLongPosition( 2 ) < outDim[ 2 ]; out.fwd( 2 ) ) { for ( out.setPosition( 0, 1 ); out.getLongPosition( 1 ) < outDim[ 1 ]; out.fwd( 1 ) ) { for ( out.setPosition( 0, 0 ); out.getLongPosition( 0 ) < outDim[ 0 ]; out.fwd( 0 ), in.fwd( 0 ) ) { out.get().set( in.get() ); } in.setPosition( blockMin[ 0 ], 0 ); in.fwd( 1 ); } in.setPosition( blockMin[ 1 ], 1 ); in.fwd( 2 ); } } private static < T extends RealType< T > > void downsampleBlock( final Cursor< T > out, final double[] accumulator, final long[] outDim, final RandomAccess< UnsignedShortType > randomAccess, final long[] blockMin, final int[] blockSize, final double scale ) { final int numBlockPixels = ( int ) ( outDim[ 0 ] * outDim[ 1 ] * outDim[ 2 ] ); Arrays.fill( accumulator, 0, numBlockPixels, 0 ); randomAccess.setPosition( blockMin ); final int ox = ( int ) outDim[ 0 ]; final int oy = ( int ) outDim[ 1 ]; final int oz = ( int ) outDim[ 2 ]; final int sx = ox * blockSize[ 0 ]; final int sy = oy * blockSize[ 1 ]; final int sz = oz * blockSize[ 2 ]; int i = 0; for ( int z = 0, bz = 0; z < sz; ++z ) { for ( int y = 0, by = 0; y < sy; ++y ) { for ( int x = 0, bx = 0; x < sx; ++x ) { accumulator[ i ] += randomAccess.get().getRealDouble(); randomAccess.fwd( 0 ); if ( ++bx == blockSize[ 0 ] ) { bx = 0; ++i; } } randomAccess.move( -sx, 0 ); randomAccess.fwd( 1 ); if ( ++by == blockSize[ 1 ] ) by = 0; else i -= ox; } randomAccess.move( -sy, 1 ); randomAccess.fwd( 2 ); if ( ++bz == blockSize[ 2 ] ) bz = 0; else i -= ox * oy; } for ( int j = 0; j < numBlockPixels; ++j ) out.next().setReal( accumulator[ j ] * scale ); } }
src/main/java/bdv/export/WriteSequenceToHdf5.java
package bdv.export; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import mpicbg.spim.data.XmlHelpers; import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription; import mpicbg.spim.data.generic.sequence.BasicImgLoader; import mpicbg.spim.data.generic.sequence.BasicViewSetup; import mpicbg.spim.data.sequence.TimePoint; import mpicbg.spim.data.sequence.TimePoints; import mpicbg.spim.data.sequence.ViewId; import net.imglib2.Cursor; import net.imglib2.Dimensions; import net.imglib2.FinalInterval; import net.imglib2.RandomAccess; import net.imglib2.RandomAccessibleInterval; import net.imglib2.img.array.ArrayImg; import net.imglib2.img.array.ArrayImgs; import net.imglib2.img.basictypeaccess.array.ShortArray; import net.imglib2.img.cell.CellImg; import net.imglib2.iterator.LocalizingIntervalIterator; import net.imglib2.type.numeric.RealType; import net.imglib2.type.numeric.integer.UnsignedShortType; import net.imglib2.view.Views; import bdv.img.hdf5.Hdf5ImageLoader; import bdv.img.hdf5.Partition; import bdv.img.hdf5.Util; import bdv.spimdata.SequenceDescriptionMinimal; import ch.systemsx.cisd.hdf5.HDF5Factory; import ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures; import ch.systemsx.cisd.hdf5.IHDF5Reader; import ch.systemsx.cisd.hdf5.IHDF5Writer; /** * Create a hdf5 files containing image data from all views and all timepoints * in a chunked, mipmaped representation. * * <p> * Every image is stored in multiple resolutions. The resolutions are described * as int[] arrays defining multiple of original pixel size in every dimension. * For example {1,1,1} is the original resolution, {4,4,2} is downsampled by * factor 4 in X and Y and factor 2 in Z. Each resolution of the image is stored * as a chunked three-dimensional array (each chunk corresponds to one cell of a * {@link CellImg} when the data is loaded). The chunk sizes are defined by the * subdivisions parameter which is an array of int[], one per resolution. Each * int[] array describes the X,Y,Z chunk size for one resolution. For instance * {32,32,8} says that the (downsampled) image is divided into 32x32x8 pixel * blocks. * * <p> * For every mipmap level we have a (3D) int[] resolution array, so the full * mipmap pyramid is specified by a nested int[][] array. Likewise, we have a * (3D) int[] subdivions array for every mipmap level, so the full chunking of * the full pyramid is specfied by a nested int[][] array. * * <p> * A data-set can be stored in a single hdf5 file or split across several hdf5 * "partitions" with one master hdf5 linking into the partitions. * * @author Tobias Pietzsch &lt;[email protected]&gt; */ public class WriteSequenceToHdf5 { /** * Create a hdf5 file containing image data from all views and all * timepoints in a chunked, mipmaped representation. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param hdf5File * hdf5 file to which the image data is written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5File( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final boolean deflate, final File hdf5File, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final HashMap< Integer, Integer > timepointIdSequenceToPartition = new HashMap< Integer, Integer >(); for ( final TimePoint timepoint : seq.getTimePoints().getTimePointsOrdered() ) timepointIdSequenceToPartition.put( timepoint.getId(), timepoint.getId() ); final HashMap< Integer, Integer > setupIdSequenceToPartition = new HashMap< Integer, Integer >(); for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) setupIdSequenceToPartition.put( setup.getId(), setup.getId() ); final Partition partition = new Partition( hdf5File.getPath(), timepointIdSequenceToPartition, setupIdSequenceToPartition ); writeHdf5PartitionFile( seq, perSetupMipmapInfo, deflate, partition, loopbackHeuristic, afterEachPlane, progressWriter ); } /** * Create a hdf5 file containing image data from all views and all * timepoints in a chunked, mipmaped representation. This is the same as * {@link WriteSequenceToHdf5#writeHdf5File(AbstractSequenceDescription, Map, boolean, File, LoopbackHeuristic, AfterEachPlane, ProgressWriter)} * except that only one set of supsampling factors and and subdivision * blocksizes is given, which is used for all {@link BasicViewSetup views}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param resolutions * this nested arrays contains per mipmap level, the subsampling * factors. * @param subdivisions * this nested arrays contains per mipmap level, the subdivision * block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param hdf5File * hdf5 file to which the image data is written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5File( final AbstractSequenceDescription< ?, ?, ? > seq, final int[][] resolutions, final int[][] subdivisions, final boolean deflate, final File hdf5File, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final HashMap< Integer, ExportMipmapInfo > perSetupMipmapInfo = new HashMap< Integer, ExportMipmapInfo >(); final ExportMipmapInfo mipmapInfo = new ExportMipmapInfo( resolutions, subdivisions ); for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) perSetupMipmapInfo.put( setup.getId(), mipmapInfo ); writeHdf5File( seq, perSetupMipmapInfo, deflate, hdf5File, loopbackHeuristic, afterEachPlane, progressWriter ); } /** * Create a hdf5 master file linking to image data from all views and all * timepoints. This is the same as * {@link #writeHdf5PartitionLinkFile(AbstractSequenceDescription, Map, ArrayList, File)}, * except that the information about the partition files as well as the * path of the master file to be written is obtained from the * {@link BasicImgLoader} of the sequence, which must be a * {@link Hdf5ImageLoader}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. */ public static void writeHdf5PartitionLinkFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo ) { if ( !( seq.getImgLoader() instanceof Hdf5ImageLoader ) ) throw new IllegalArgumentException( "sequence has " + seq.getImgLoader().getClass() + " imgloader. Hdf5ImageLoader required." ); final Hdf5ImageLoader loader = ( Hdf5ImageLoader ) seq.getImgLoader(); writeHdf5PartitionLinkFile( seq, perSetupMipmapInfo, loader.getPartitions(), loader.getHdf5File() ); } /** * Create a hdf5 master file linking to image data from all views and all * timepoints. Which hdf5 files contain which part of the image data is * specified in the {@code portitions} parameter. * * Note that this method only writes the master file containing links. The * individual partitions need to be written with * {@link #writeHdf5PartitionFile(AbstractSequenceDescription, Map, boolean, Partition, LoopbackHeuristic, AfterEachPlane, ProgressWriter)}. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param partitions * which parts of the dataset are stored in which files. * @param hdf5File * hdf5 master file to which the image data from the partition * files is linked. */ public static void writeHdf5PartitionLinkFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final ArrayList< Partition > partitions, final File hdf5File ) { // open HDF5 output file if ( hdf5File.exists() ) hdf5File.delete(); final IHDF5Writer hdf5Writer = HDF5Factory.open( hdf5File ); // write Mipmap descriptions for ( final BasicViewSetup setup : seq.getViewSetupsOrdered() ) { final int setupId = setup.getId(); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupId ); hdf5Writer.writeDoubleMatrix( Util.getResolutionsPath( setupId ), mipmapInfo.getResolutions() ); hdf5Writer.writeIntMatrix( Util.getSubdivisionsPath( setupId ), mipmapInfo.getSubdivisions() ); } // link Cells for all views in the partition final File basePath = hdf5File.getParentFile(); for ( final Partition partition : partitions ) { final Map< Integer, Integer > timepointIdSequenceToPartition = partition.getTimepointIdSequenceToPartition(); final Map< Integer, Integer > setupIdSequenceToPartition = partition.getSetupIdSequenceToPartition(); for ( final Entry< Integer, Integer > tEntry : timepointIdSequenceToPartition.entrySet() ) { final int tSequence = tEntry.getKey(); final int tPartition = tEntry.getValue(); for ( final Entry< Integer, Integer > sEntry : setupIdSequenceToPartition.entrySet() ) { final int sSequence = sEntry.getKey(); final int sPartition = sEntry.getValue(); final ViewId idSequence = new ViewId( tSequence, sSequence ); final ViewId idPartition = new ViewId( tPartition, sPartition ); final int numLevels = perSetupMipmapInfo.get( sSequence ).getNumLevels(); for ( int level = 0; level < numLevels; ++level ) { final String relativePath = XmlHelpers.getRelativePath( new File( partition.getPath() ), basePath ).getPath(); hdf5Writer.object().createOrUpdateExternalLink( relativePath, Util.getCellsPath( idPartition, level ), Util.getCellsPath( idSequence, level ) ); } } } } hdf5Writer.close(); } /** * Create a hdf5 partition file containing image data for a subset of views * and timepoints in a chunked, mipmaped representation. * * Please note that the description of the <em>full</em> dataset must be * given in the <code>seq</code>, <code>perSetupResolutions</code>, and * <code>perSetupSubdivisions</code> parameters. Then only the part * described by <code>partition</code> will be written. * * @param seq * description of the sequence to be stored as hdf5. (The * {@link AbstractSequenceDescription} contains the number of * setups and timepoints as well as an {@link BasicImgLoader} * that provides the image data, Registration information is not * needed here, that will go into the accompanying xml). * @param perSetupMipmapInfo * this maps from setup {@link BasicViewSetup#getId() id} to * {@link ExportMipmapInfo} for that setup. The * {@link ExportMipmapInfo} contains for each mipmap level, the * subsampling factors and subdivision block sizes. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param partition * which part of the dataset to write, and to which file. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. */ public static void writeHdf5PartitionFile( final AbstractSequenceDescription< ?, ?, ? > seq, final Map< Integer, ExportMipmapInfo > perSetupMipmapInfo, final boolean deflate, final Partition partition, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, ProgressWriter progressWriter ) { final int blockWriterQueueLength = 100; final int numThreads = Math.max( 1, Runtime.getRuntime().availableProcessors() - 2 ); if ( progressWriter == null ) progressWriter = new ProgressWriterConsole(); progressWriter.setProgress( 0 ); // get sequence timepointIds for the timepoints contained in this partition final ArrayList< Integer > timepointIdsSequence = new ArrayList< Integer >( partition.getTimepointIdSequenceToPartition().keySet() ); Collections.sort( timepointIdsSequence ); final int numTimepoints = timepointIdsSequence.size(); final ArrayList< Integer > setupIdsSequence = new ArrayList< Integer >( partition.getSetupIdSequenceToPartition().keySet() ); Collections.sort( setupIdsSequence ); // get the BasicImgLoader that supplies the images if ( !( seq.getImgLoader().getImageType() instanceof UnsignedShortType ) ) throw new IllegalArgumentException( "Expected BasicImgLoader<UnsignedShortTyp> but your dataset has BasicImgLoader<" + seq.getImgLoader().getImageType().getClass().getSimpleName() + ">.\nCurrently writing to HDF5 is only supported for UnsignedShortType." ); @SuppressWarnings( "unchecked" ) final BasicImgLoader< UnsignedShortType > imgLoader = ( BasicImgLoader< UnsignedShortType > ) seq.getImgLoader(); // open HDF5 partition output file final File hdf5File = new File( partition.getPath() ); if ( hdf5File.exists() ) hdf5File.delete(); final Hdf5BlockWriterThread writerQueue = new Hdf5BlockWriterThread( hdf5File, blockWriterQueueLength ); writerQueue.start(); // start CellCreatorThreads final CellCreatorThread[] cellCreatorThreads = createAndStartCellCreatorThreads( numThreads ); // calculate number of tasks for progressWriter int numTasks = 1; // first task is for writing mipmap descriptions etc... for ( final int timepointIdSequence : timepointIdsSequence ) for ( final int setupIdSequence : setupIdsSequence ) if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() ) numTasks++; int numCompletedTasks = 0; // write Mipmap descriptions for ( final Entry< Integer, Integer > entry : partition.getSetupIdSequenceToPartition().entrySet() ) { final int setupIdSequence = entry.getKey(); final int setupIdPartition = entry.getValue(); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupIdSequence ); writerQueue.writeMipmapDescription( setupIdPartition, mipmapInfo ); } progressWriter.setProgress( ( double ) ++numCompletedTasks / numTasks ); // write image data for all views to the HDF5 file int timepointIndex = 0; for ( final int timepointIdSequence : timepointIdsSequence ) { final int timepointIdPartition = partition.getTimepointIdSequenceToPartition().get( timepointIdSequence ); progressWriter.out().printf( "proccessing timepoint %d / %d\n", ++timepointIndex, numTimepoints ); // assemble the viewsetups that are present in this timepoint final ArrayList< Integer > setupsTimePoint = new ArrayList< Integer >(); for ( final int setupIdSequence : setupIdsSequence ) if ( seq.getViewDescriptions().get( new ViewId( timepointIdSequence, setupIdSequence ) ).isPresent() ) setupsTimePoint.add( setupIdSequence ); final int numSetups = setupsTimePoint.size(); int setupIndex = 0; for ( final int setupIdSequence : setupsTimePoint ) { final int setupIdPartition = partition.getSetupIdSequenceToPartition().get( setupIdSequence ); progressWriter.out().printf( "proccessing setup %d / %d\n", ++setupIndex, numSetups ); final ViewId viewIdSequence = new ViewId( timepointIdSequence, setupIdSequence ); final RandomAccessibleInterval< UnsignedShortType > img = imgLoader.getImage( viewIdSequence ); final ExportMipmapInfo mipmapInfo = perSetupMipmapInfo.get( setupIdSequence ); final double startCompletionRatio = ( double ) numCompletedTasks++ / numTasks; final double endCompletionRatio = ( double ) numCompletedTasks / numTasks; final ProgressWriter subProgressWriter = new SubTaskProgressWriter( progressWriter, startCompletionRatio, endCompletionRatio ); writeViewToHdf5PartitionFile( img, timepointIdPartition, setupIdPartition, mipmapInfo, false, deflate, writerQueue, cellCreatorThreads, loopbackHeuristic, afterEachPlane, subProgressWriter ); } } // shutdown and close file stopCellCreatorThreads( cellCreatorThreads ); writerQueue.close(); progressWriter.setProgress( 1.0 ); } /** * Write a single view to a hdf5 partition file, in a chunked, mipmaped * representation. Note that the specified view must not already exist in * the partition file! * * @param img * the view to be written. * @param partition * describes which part of the full sequence is contained in this * partition, and to which file this partition is written. * @param timepointIdPartition * the timepoint id wrt the partition of the view to be written. * The information in {@code partition} relates this to timepoint * id in the full sequence. * @param setupIdPartition * the setup id wrt the partition of the view to be written. The * information in {@code partition} relates this to setup id in * the full sequence. * @param mipmapInfo * contains for each mipmap level of the setup, the subsampling * factors and subdivision block sizes. * @param writeMipmapInfo * whether to write mipmap description for the setup. must be * done (at least) once for each setup in the partition. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. may * be null. */ public static void writeViewToHdf5PartitionFile( final RandomAccessibleInterval< UnsignedShortType > img, final Partition partition, final int timepointIdPartition, final int setupIdPartition, final ExportMipmapInfo mipmapInfo, final boolean writeMipmapInfo, final boolean deflate, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, final ProgressWriter progressWriter ) { final int blockWriterQueueLength = 100; final int numThreads = Math.max( 1, Runtime.getRuntime().availableProcessors() - 2 ); // create and start Hdf5BlockWriterThread final Hdf5BlockWriterThread writerQueue = new Hdf5BlockWriterThread( partition.getPath(), blockWriterQueueLength ); writerQueue.start(); final CellCreatorThread[] cellCreatorThreads = createAndStartCellCreatorThreads( numThreads ); // write the image writeViewToHdf5PartitionFile( img, timepointIdPartition, setupIdPartition, mipmapInfo, writeMipmapInfo, deflate, writerQueue, cellCreatorThreads, loopbackHeuristic, afterEachPlane, progressWriter ); stopCellCreatorThreads( cellCreatorThreads ); writerQueue.close(); } static class LoopBackImageLoader extends Hdf5ImageLoader { private LoopBackImageLoader( final IHDF5Reader existingHdf5Reader, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription ) { super( null, existingHdf5Reader, null, sequenceDescription, true ); } static LoopBackImageLoader create( final IHDF5Reader existingHdf5Reader, final int timepointIdPartition, final int setupIdPartition, final Dimensions imageDimensions ) { final HashMap< Integer, TimePoint > timepoints = new HashMap< Integer, TimePoint >(); timepoints.put( timepointIdPartition, new TimePoint( timepointIdPartition ) ); final HashMap< Integer, BasicViewSetup > setups = new HashMap< Integer, BasicViewSetup >(); setups.put( setupIdPartition, new BasicViewSetup( setupIdPartition, null, imageDimensions, null ) ); final SequenceDescriptionMinimal seq = new SequenceDescriptionMinimal( new TimePoints( timepoints ), setups, null, null ); return new LoopBackImageLoader( existingHdf5Reader, seq ); } } /** * Write a single view to a hdf5 partition file, in a chunked, mipmaped * representation. Note that the specified view must not already exist in * the partition file! * * @param img * the view to be written. * @param timepointIdPartition * the timepoint id wrt the partition of the view to be written. * The information in {@code partition} relates this to timepoint * id in the full sequence. * @param setupIdPartition * the setup id wrt the partition of the view to be written. The * information in {@code partition} relates this to setup id in * the full sequence. * @param mipmapInfo * contains for each mipmap level of the setup, the subsampling * factors and subdivision block sizes. * @param writeMipmapInfo * whether to write mipmap description for the setup. must be * done (at least) once for each setup in the partition. * @param deflate * whether to compress the data with the HDF5 DEFLATE filter. * @param writerQueue * block writing tasks are enqueued here. * @param cellCreatorThreads * threads used for creating (possibly down-sampled) blocks of * the view to be written. * @param loopbackHeuristic * heuristic to decide whether to create each resolution level by * reading pixels from the original image or by reading back a * finer resolution level already written to the hdf5. may be * null (in this case always use the original image). * @param afterEachPlane * this is called after each "plane of chunks" is written, giving * the opportunity to clear caches, etc. * @param progressWriter * completion ratio and status output will be directed here. may * be null. */ public static void writeViewToHdf5PartitionFile( final RandomAccessibleInterval< UnsignedShortType > img, final int timepointIdPartition, final int setupIdPartition, final ExportMipmapInfo mipmapInfo, final boolean writeMipmapInfo, final boolean deflate, final Hdf5BlockWriterThread writerQueue, final CellCreatorThread[] cellCreatorThreads, final LoopbackHeuristic loopbackHeuristic, final AfterEachPlane afterEachPlane, ProgressWriter progressWriter ) { final HDF5IntStorageFeatures storage = deflate ? HDF5IntStorageFeatures.INT_AUTO_SCALING_DEFLATE : HDF5IntStorageFeatures.INT_AUTO_SCALING; if ( progressWriter == null ) progressWriter = new ProgressWriterConsole(); // for progressWriter final int numTasks = mipmapInfo.getNumLevels(); int numCompletedTasks = 0; progressWriter.setProgress( ( double ) numCompletedTasks++ / numTasks ); // write Mipmap descriptions if ( writeMipmapInfo ) writerQueue.writeMipmapDescription( setupIdPartition, mipmapInfo ); // create loopback image-loader to read already written chunks from the // h5 for generating low-resolution versions. final LoopBackImageLoader loopback = ( loopbackHeuristic == null ) ? null : LoopBackImageLoader.create( writerQueue.getIHDF5Writer(), timepointIdPartition, setupIdPartition, img ); // write image data for all views to the HDF5 file final int n = 3; final long[] dimensions = new long[ n ]; final int[][] resolutions = mipmapInfo.getExportResolutions(); final int[][] subdivisions = mipmapInfo.getSubdivisions(); final int numLevels = mipmapInfo.getNumLevels(); for ( int level = 0; level < numLevels; ++level ) { progressWriter.out().println( "writing level " + level ); final long t0 = System.currentTimeMillis(); final RandomAccessibleInterval< UnsignedShortType > sourceImg; final int[] factor; final boolean useLoopBack; if ( loopbackHeuristic == null ) { sourceImg = img; factor = resolutions[ level ]; useLoopBack = false; } else { // Are downsampling factors a multiple of a level that we have // already written? int[] factorsToPreviousLevel = null; int previousLevel = -1; A: for ( int l = level - 1; l >= 0; --l ) { final int[] f = new int[ n ]; for ( int d = 0; d < n; ++d ) { f[ d ] = resolutions[ level ][ d ] / resolutions[ l ][ d ]; if ( f[ d ] * resolutions[ l ][ d ] != resolutions[ level ][ d ] ) continue A; } factorsToPreviousLevel = f; previousLevel = l; break; } // Now, if previousLevel >= 0 we can use loopback ImgLoader on // previousLevel and downsample with factorsToPreviousLevel. // // whether it makes sense to actually do so is determined by a // heuristic based on the following considerations: // * if downsampling a lot over original image, the cost of // reading images back from hdf5 outweighs the cost of // accessing and averaging original pixels. // * original image may already be cached (for example when // exporting an ImageJ virtual stack. To compute blocks // that downsample a lot in Z, many planes of the virtual // stack need to be accessed leading to cache thrashing if // individual planes are very large. useLoopBack = loopbackHeuristic.decide( img, resolutions[ level ], previousLevel, factorsToPreviousLevel, subdivisions[ level ] ); if ( useLoopBack ) { sourceImg = loopback.getImage( new ViewId( timepointIdPartition, setupIdPartition ), previousLevel ); factor = factorsToPreviousLevel; } else { sourceImg = img; factor = resolutions[ level ]; } } sourceImg.dimensions( dimensions ); final boolean fullResolution = ( factor[ 0 ] == 1 && factor[ 1 ] == 1 && factor[ 2 ] == 1 ); long size = 1; if ( !fullResolution ) { for ( int d = 0; d < n; ++d ) { dimensions[ d ] = Math.max( dimensions[ d ] / factor[ d ], 1 ); size *= factor[ d ]; } } final double scale = 1.0 / size; final long[] minRequiredInput = new long[ n ]; final long[] maxRequiredInput = new long[ n ]; sourceImg.min( minRequiredInput ); for ( int d = 0; d < n; ++d ) maxRequiredInput[ d ] = minRequiredInput[ d ] + dimensions[ d ] * factor[ d ] - 1; final RandomAccessibleInterval< UnsignedShortType > extendedImg = Views.interval( Views.extendBorder( sourceImg ), new FinalInterval( minRequiredInput, maxRequiredInput ) ); final int[] cellDimensions = subdivisions[ level ]; final ViewId viewIdPartition = new ViewId( timepointIdPartition, setupIdPartition ); final String path = Util.getCellsPath( viewIdPartition, level ); writerQueue.createAndOpenDataset( path, dimensions.clone(), cellDimensions.clone(), storage ); final long[] numCells = new long[ n ]; final int[] borderSize = new int[ n ]; final long[] minCell = new long[ n ]; final long[] maxCell = new long[ n ]; for ( int d = 0; d < n; ++d ) { numCells[ d ] = ( dimensions[ d ] - 1 ) / cellDimensions[ d ] + 1; maxCell[ d ] = numCells[ d ] - 1; borderSize[ d ] = ( int ) ( dimensions[ d ] - ( numCells[ d ] - 1 ) * cellDimensions[ d ] ); } // generate one "plane" of cells after the other to avoid cache thrashing when exporting from virtual stacks for ( int lastDimCell = 0; lastDimCell < numCells[ n - 1 ]; ++lastDimCell ) { minCell[ n - 1 ] = lastDimCell; maxCell[ n - 1 ] = lastDimCell; final LocalizingIntervalIterator i = new LocalizingIntervalIterator( minCell, maxCell ); final int numThreads = cellCreatorThreads.length; final CountDownLatch doneSignal = new CountDownLatch( numThreads ); for ( int threadNum = 0; threadNum < numThreads; ++threadNum ) { cellCreatorThreads[ threadNum ].run( new Runnable() { @Override public void run() { final double[] accumulator = fullResolution ? null : new double[ cellDimensions[ 0 ] * cellDimensions[ 1 ] * cellDimensions[ 2 ] ]; final long[] currentCellMin = new long[ n ]; final long[] currentCellMax = new long[ n ]; final long[] currentCellDim = new long[ n ]; final long[] currentCellPos = new long[ n ]; final long[] blockMin = new long[ n ]; final RandomAccess< UnsignedShortType > in = extendedImg.randomAccess(); while ( true ) { synchronized ( i ) { if ( !i.hasNext() ) break; i.fwd(); i.localize( currentCellPos ); } for ( int d = 0; d < n; ++d ) { currentCellMin[ d ] = currentCellPos[ d ] * cellDimensions[ d ]; blockMin[ d ] = currentCellMin[ d ] * factor[ d ]; final boolean isBorderCellInThisDim = ( currentCellPos[ d ] + 1 == numCells[ d ] ); currentCellDim[ d ] = isBorderCellInThisDim ? borderSize[ d ] : cellDimensions[ d ]; currentCellMax[ d ] = currentCellMin[ d ] + currentCellDim[ d ] - 1; } final ArrayImg< UnsignedShortType, ? > cell = ArrayImgs.unsignedShorts( currentCellDim ); final RandomAccess< UnsignedShortType > out = cell.randomAccess(); if ( fullResolution ) copyBlock( out, currentCellDim, in, blockMin ); else downsampleBlock( cell.cursor(), accumulator, currentCellDim, in, blockMin, factor, scale ); writerQueue.writeBlockWithOffset( ( ( ShortArray ) cell.update( null ) ).getCurrentStorageArray(), currentCellDim.clone(), currentCellMin.clone() ); } doneSignal.countDown(); } } ); } try { doneSignal.await(); } catch ( final InterruptedException e ) { e.printStackTrace(); } if ( afterEachPlane != null ) afterEachPlane.afterEachPlane( useLoopBack ); } writerQueue.closeDataset(); progressWriter.setProgress( ( double ) numCompletedTasks++ / numTasks ); } if ( loopback != null ) loopback.close(); } /** * A heuristic to decide for a given resolution level whether the source * pixels should be taken from the original image or read from a previously * written resolution level in the hdf5 file. */ public interface LoopbackHeuristic { public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize ); } public interface AfterEachPlane { public void afterEachPlane( final boolean usedLoopBack ); } /** * Simple heuristic: use loopback image loader if saving 8 times or more on * number of pixel access with respect to the original image. * * @author Tobias Pietzsch &lt;[email protected]&gt; */ public static class DefaultLoopbackHeuristic implements LoopbackHeuristic { @Override public boolean decide( final RandomAccessibleInterval< ? > originalImg, final int[] factorsToOriginalImg, final int previousLevel, final int[] factorsToPreviousLevel, final int[] chunkSize ) { if ( previousLevel < 0 ) return false; if ( numElements( factorsToOriginalImg ) / numElements( factorsToPreviousLevel ) >= 8 ) return true; return false; } } public static int numElements( final int[] size ) { int numElements = size[ 0 ]; for ( int d = 1; d < size.length; ++d ) numElements *= size[ d ]; return numElements; } public static CellCreatorThread[] createAndStartCellCreatorThreads( final int numThreads ) { final CellCreatorThread[] cellCreatorThreads = new CellCreatorThread[ numThreads ]; for ( int threadNum = 0; threadNum < numThreads; ++threadNum ) { cellCreatorThreads[ threadNum ] = new CellCreatorThread(); cellCreatorThreads[ threadNum ].setName( "CellCreatorThread " + threadNum ); cellCreatorThreads[ threadNum ].start(); } return cellCreatorThreads; } public static void stopCellCreatorThreads( final CellCreatorThread[] cellCreatorThreads ) { for ( final CellCreatorThread thread : cellCreatorThreads ) thread.interrupt(); } public static class CellCreatorThread extends Thread { private Runnable currentTask = null; public synchronized void run( final Runnable task ) { currentTask = task; notify(); } @Override public void run() { while ( !isInterrupted() ) { synchronized ( this ) { try { if ( currentTask == null ) wait(); else { currentTask.run(); currentTask = null; } } catch ( final InterruptedException e ) { break; } } } } } private static < T extends RealType< T > > void copyBlock( final RandomAccess< T > out, final long[] outDim, final RandomAccess< T > in, final long[] blockMin ) { in.setPosition( blockMin ); for ( out.setPosition( 0, 2 ); out.getLongPosition( 2 ) < outDim[ 2 ]; out.fwd( 2 ) ) { for ( out.setPosition( 0, 1 ); out.getLongPosition( 1 ) < outDim[ 1 ]; out.fwd( 1 ) ) { for ( out.setPosition( 0, 0 ); out.getLongPosition( 0 ) < outDim[ 0 ]; out.fwd( 0 ), in.fwd( 0 ) ) { out.get().set( in.get() ); } in.setPosition( blockMin[ 0 ], 0 ); in.fwd( 1 ); } in.setPosition( blockMin[ 1 ], 1 ); in.fwd( 2 ); } } private static < T extends RealType< T > > void downsampleBlock( final Cursor< T > out, final double[] accumulator, final long[] outDim, final RandomAccess< UnsignedShortType > randomAccess, final long[] blockMin, final int[] blockSize, final double scale ) { final int numBlockPixels = ( int ) ( outDim[ 0 ] * outDim[ 1 ] * outDim[ 2 ] ); Arrays.fill( accumulator, 0, numBlockPixels, 0 ); randomAccess.setPosition( blockMin ); final int ox = ( int ) outDim[ 0 ]; final int oy = ( int ) outDim[ 1 ]; final int oz = ( int ) outDim[ 2 ]; final int sx = ox * blockSize[ 0 ]; final int sy = oy * blockSize[ 1 ]; final int sz = oz * blockSize[ 2 ]; int i = 0; for ( int z = 0, bz = 0; z < sz; ++z ) { for ( int y = 0, by = 0; y < sy; ++y ) { for ( int x = 0, bx = 0; x < sx; ++x ) { accumulator[ i ] += randomAccess.get().getRealDouble(); randomAccess.fwd( 0 ); if ( ++bx == blockSize[ 0 ] ) { bx = 0; ++i; } } randomAccess.move( -sx, 0 ); randomAccess.fwd( 1 ); if ( ++by == blockSize[ 1 ] ) by = 0; else i -= ox; } randomAccess.move( -sy, 1 ); randomAccess.fwd( 2 ); if ( ++bz == blockSize[ 2 ] ) bz = 0; else i -= ox * oy; } for ( int j = 0; j < numBlockPixels; ++j ) out.next().setReal( accumulator[ j ] * scale ); } }
Only open LoopBackImageLoader when it is required for the first time. This should fix a bug where the MipmapInfo is not yet written to the HDF5 but the ImgLoader initialization already wants to read it.
src/main/java/bdv/export/WriteSequenceToHdf5.java
Only open LoopBackImageLoader when it is required for the first time. This should fix a bug where the MipmapInfo is not yet written to the HDF5 but the ImgLoader initialization already wants to read it.
<ide><path>rc/main/java/bdv/export/WriteSequenceToHdf5.java <ide> { <ide> private LoopBackImageLoader( final IHDF5Reader existingHdf5Reader, final AbstractSequenceDescription< ?, ?, ? > sequenceDescription ) <ide> { <del> super( null, existingHdf5Reader, null, sequenceDescription, true ); <add> super( null, existingHdf5Reader, null, sequenceDescription, false ); <ide> } <ide> <ide> static LoopBackImageLoader create( final IHDF5Reader existingHdf5Reader, final int timepointIdPartition, final int setupIdPartition, final Dimensions imageDimensions )
Java
unlicense
error: pathspec 'sample.java' did not match any file(s) known to git
527249df1a4c7fd9a54c8eeef6716b6509029fc0
1
pressure679/Network-sniffer,pressure679/Network-sniffer
import jpcap.NetworkInterfaceAddress; import jpcap.NetworkInterface; import jpcap.JpcapCaptor; import jpcap.packet.Packet; import jpcap.packet.ARPPacket; import jpcap.PacketReceiver; import java.io.IOException; import jpcap.JpcapSender; import jpcap.packet.ICMPPacket; import java.net.InetAddress; public class sample { public static void sample(String args[]) { boolean noerr = true; // getdevs(); while (noerr) { try { int index = 2; NetworkInterface[] devices = JpcapCaptor.getDeviceList(); JpcapCaptor captor = JpcapCaptor.openDevice(devices[index], 4096, false, 5000); capture(captor); tracert(captor); // doesnt work } catch (IOException e) { System.out.println("IOException"); System.err.println(e); noerr = false; } } } // Unfinished, create inetaddress and use in captor.setfilter public static void tracert(JpcapCaptor captor) { InetAddress iaddr = captor.setFilter("icmp and dst" + ); JpcapSender sender = captor.getJpcapSenderInstance(); ICMPPacket icmp = new ICMPPacket(); boolean doing = true; while (doing) { ICMPPacket icmppack = captor.getPacket(); System.out.println("received " + icmppack); if (icmppack == null) { System.out.println("Timeout"); } else if (icmppack.type == ICMPPacket.ICMP_TIMXCEED){ icmppack.src_ip.getHostName(); System.out.println(icmp.hop_limit + ": " + icmppack.src_ip); icmp.hop_limit++; } else if (icmppack.type == ICMPPacket.ICMP_UNREACH){ icmppack.src_ip.getHostName(); System.out.println(icmp.hop_limit+": "+ icmppack.src_ip); System.exit(0); } else if (icmppack.type == ICMPPacket.ICMP_ECHOREPLY){ icmppack.src_ip.getHostName(); System.out.println(icmp.hop_limit + ": " + icmppack.src_ip); System.exit(0); } sender.sendPacket(icmp); System.out.println(); doing = false; } } public static void capture(JpcapCaptor captor) throws IOException { /* captor.setFilter("arp", true); System.out.println("Sender IP Address:\t" + arppack.getSenderProtocolAddress()); System.out.println("Sender HW Address:\t" + arppack.getSenderHardwareAddress()); System.out.println("Target IP Address:\t" + arppack.getTargetProtocolAddress()); System.out.println("Target HW Address:\t" + arppack.getTargetHardwareAddress()); */ boolean doing = true; while (doing) { int index = 2; NetworkInterface[] devices = JpcapCaptor.getDeviceList(); Packet pack = captor.getPacket(); if (pack != null) { System.out.println(pack); System.out.println(); doing = false; } } } //Get Interfaces public static void getdevs() throws IOException { //for each network interface int index = 2; NetworkInterface[] devices = JpcapCaptor.getDeviceList(); JpcapCaptor captor = JpcapCaptor.openDevice(devices[index], 4096, false, 5000); for (int i = 0; i < devices.length; i++) { //print out its name and description System.out.println(i + ": " + devices[i].name + "(" + devices[i].description + ")"); //print out its datalink name and description System.out.println(" datalink: " + devices[i].datalink_name + "(" + devices[i].datalink_description + ")"); //print out its MAC address System.out.print(" MAC address:"); for (byte b : devices[i].mac_address) { System.out.print(Integer.toHexString(b&0xff) + ":"); } System.out.println(); //print out its IP address, subnet mask and broadcast address for (NetworkInterfaceAddress a : devices[i].addresses) { System.out.println(" address: " + a.address + " " + a.subnet + " " + a.broadcast); } System.out.println(); } } }
sample.java
first commit
sample.java
first commit
<ide><path>ample.java <add>import jpcap.NetworkInterfaceAddress; <add>import jpcap.NetworkInterface; <add>import jpcap.JpcapCaptor; <add>import jpcap.packet.Packet; <add>import jpcap.packet.ARPPacket; <add>import jpcap.PacketReceiver; <add>import java.io.IOException; <add>import jpcap.JpcapSender; <add>import jpcap.packet.ICMPPacket; <add>import java.net.InetAddress; <add> <add>public class sample { <add> <add> public static void sample(String args[]) { <add> <add> boolean noerr = true; <add> // getdevs(); <add> while (noerr) { <add> try { <add> int index = 2; <add> NetworkInterface[] devices = JpcapCaptor.getDeviceList(); <add> JpcapCaptor captor = JpcapCaptor.openDevice(devices[index], 4096, false, 5000); <add> capture(captor); <add> tracert(captor); // doesnt work <add> <add> } catch (IOException e) { <add> System.out.println("IOException"); <add> System.err.println(e); <add> noerr = false; <add> } <add> } <add> } <add> <add> // Unfinished, create inetaddress and use in captor.setfilter <add> public static void tracert(JpcapCaptor captor) { <add> InetAddress iaddr = <add> captor.setFilter("icmp and dst" + ); <add> JpcapSender sender = captor.getJpcapSenderInstance(); <add> ICMPPacket icmp = new ICMPPacket(); <add> boolean doing = true; <add> <add> while (doing) { <add> ICMPPacket icmppack = captor.getPacket(); <add> System.out.println("received " + icmppack); <add> if (icmppack == null) { <add> System.out.println("Timeout"); <add> } else if (icmppack.type == ICMPPacket.ICMP_TIMXCEED){ <add> icmppack.src_ip.getHostName(); <add> System.out.println(icmp.hop_limit + ": " + icmppack.src_ip); <add> icmp.hop_limit++; <add> } else if (icmppack.type == ICMPPacket.ICMP_UNREACH){ <add> icmppack.src_ip.getHostName(); <add> System.out.println(icmp.hop_limit+": "+ icmppack.src_ip); <add> System.exit(0); <add> } else if (icmppack.type == ICMPPacket.ICMP_ECHOREPLY){ <add> icmppack.src_ip.getHostName(); <add> System.out.println(icmp.hop_limit + ": " + icmppack.src_ip); <add> System.exit(0); <add> } <add> sender.sendPacket(icmp); <add> System.out.println(); <add> doing = false; <add> } <add> } <add> <add> public static void capture(JpcapCaptor captor) throws IOException { <add> <add> /* <add> captor.setFilter("arp", true); <add> System.out.println("Sender IP Address:\t" + arppack.getSenderProtocolAddress()); <add> System.out.println("Sender HW Address:\t" + arppack.getSenderHardwareAddress()); <add> System.out.println("Target IP Address:\t" + arppack.getTargetProtocolAddress()); <add> System.out.println("Target HW Address:\t" + arppack.getTargetHardwareAddress()); <add> */ <add> <add> boolean doing = true; <add> while (doing) { <add> int index = 2; <add> NetworkInterface[] devices = JpcapCaptor.getDeviceList(); <add> Packet pack = captor.getPacket(); <add> <add> if (pack != null) { <add> <add> System.out.println(pack); <add> System.out.println(); <add> doing = false; <add> } <add> } <add> } <add> <add> //Get Interfaces <add> public static void getdevs() throws IOException { <add> //for each network interface <add> int index = 2; <add> NetworkInterface[] devices = JpcapCaptor.getDeviceList(); <add> JpcapCaptor captor = JpcapCaptor.openDevice(devices[index], 4096, false, 5000); <add> for (int i = 0; i < devices.length; i++) { <add> //print out its name and description <add> System.out.println(i + ": " + devices[i].name + "(" + devices[i].description + ")"); <add> <add> //print out its datalink name and description <add> System.out.println(" datalink: " + devices[i].datalink_name + "(" + devices[i].datalink_description + ")"); <add> <add> //print out its MAC address <add> System.out.print(" MAC address:"); <add> for (byte b : devices[i].mac_address) { <add> System.out.print(Integer.toHexString(b&0xff) + ":"); <add> } <add> System.out.println(); <add> <add> //print out its IP address, subnet mask and broadcast address <add> for (NetworkInterfaceAddress a : devices[i].addresses) { <add> System.out.println(" address: " + a.address + " " + a.subnet + " " + a.broadcast); <add> } <add> System.out.println(); <add> } <add> } <add>}
Java
apache-2.0
85dab42e8b9a584dcabf8027c4da651e3217f678
0
opennetworkinglab/onos,oplinkoms/onos,kuujo/onos,osinstom/onos,gkatsikas/onos,osinstom/onos,kuujo/onos,kuujo/onos,oplinkoms/onos,oplinkoms/onos,oplinkoms/onos,kuujo/onos,kuujo/onos,gkatsikas/onos,opennetworkinglab/onos,osinstom/onos,gkatsikas/onos,kuujo/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,osinstom/onos,osinstom/onos,opennetworkinglab/onos,gkatsikas/onos,gkatsikas/onos,oplinkoms/onos,gkatsikas/onos,oplinkoms/onos,opennetworkinglab/onos,kuujo/onos
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.castor; import javax.xml.bind.annotation.XmlRootElement; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; /** * POJO class for the Peer and the Route Servers. */ @XmlRootElement public class Peer { private String name; private String dpid; private String ipAddress; private String port; private boolean l2; public Peer() {} public Peer(String name, String dpid, String ipAddress, String port) { this.name = name; this.dpid = dpid; this.ipAddress = ipAddress; this.port = port; this.l2 = false; } public void setDpid(String dpid) { this.dpid = dpid; } public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } public void setPort(String port) { this.port = port; } /** * The name of the Peer or Customer to be added. * * @param name A String name. */ public void setName(String name) { this.name = name; } /** * Specifies if the layer two flows for this peer are configured or not. * * @param value True if layer two configured. */ public void setL2(boolean value) { this.l2 = value; } /** * Returns the name of the Peer or the Customer. * * @return The String name. */ public String getName() { return name; } /** * Returns the IP Address of the Peer. * * @return IP Address. */ public String getIpAddress() { return ipAddress; } /** * Returns the port number where the Peer is attached. * * @return String Connect Point */ public String getPort() { return port; } /** * Returns the layer two status of the Peer. * * @return True if layer two set. */ public boolean getl2Status() { return l2; } public String getDpid() { return dpid; } @Override public boolean equals(Object ob) { if (ob == null) { return false; } if (getClass() != ob.getClass()) { return false; } Peer other = (Peer) ob; return Objects.equal(this.ipAddress, other.ipAddress); } @Override public int hashCode() { return Objects.hashCode(this.ipAddress); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("ipAddress", ipAddress) .add("name", name) .add("dpid", dpid) .add("port", port) .toString(); } }
apps/castor/src/main/java/org/onosproject/castor/Peer.java
/* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.castor; import javax.xml.bind.annotation.XmlRootElement; /** * POJO class for the Peer and the Route Servers. */ @XmlRootElement public class Peer { private String name; private String dpid; private String ipAddress; private String port; private boolean l2; public Peer() {} public Peer(String name, String dpid, String ipAddress, String port) { this.name = name; this.dpid = dpid; this.ipAddress = ipAddress; this.port = port; this.l2 = false; } public void setDpid(String dpid) { this.dpid = dpid; } public void setIpAddress(String ipAddress) { this.ipAddress = ipAddress; } public void setPort(String port) { this.port = port; } /** * The name of the Peer or Customer to be added. * * @param name A String name. */ public void setName(String name) { this.name = name; } /** * Specifies if the layer two flows for this peer are configured or not. * * @param value True if layer two configured. */ public void setL2(boolean value) { this.l2 = value; } /** * Returns the name of the Peer or the Customer. * * @return The String name. */ public String getName() { return name; } /** * Returns the IP Address of the Peer. * * @return IP Address. */ public String getIpAddress() { return ipAddress; } /** * Returns the port number where the Peer is attached. * * @return String Connect Point */ public String getPort() { return port; } /** * Returns the layer two status of the Peer. * * @return True if layer two set. */ public boolean getl2Status() { return l2; } public String getDpid() { return dpid; } @Override public boolean equals(Object ob) { if (ob == null) { return false; } Peer other = (Peer) ob; if (this.ipAddress.equals(other.ipAddress)) { return true; } return false; } @Override public int hashCode() { int hash = 3; hash = 53 * hash + (this.ipAddress != null ? this.ipAddress.hashCode() : 0); return hash; } }
Sonar fix - bad equals method - Sonar deteced that the equals() method was not detecting type mismatch - Implemented hashCode(), equals(), and toString using guava Change-Id: I8ffea83e70a9c214d943767c8c6e74e940255c43
apps/castor/src/main/java/org/onosproject/castor/Peer.java
Sonar fix - bad equals method
<ide><path>pps/castor/src/main/java/org/onosproject/castor/Peer.java <ide> package org.onosproject.castor; <ide> <ide> import javax.xml.bind.annotation.XmlRootElement; <add>import com.google.common.base.MoreObjects; <add>import com.google.common.base.Objects; <ide> <ide> /** <ide> * POJO class for the Peer and the Route Servers. <ide> if (ob == null) { <ide> return false; <ide> } <add> if (getClass() != ob.getClass()) { <add> return false; <add> } <ide> Peer other = (Peer) ob; <del> if (this.ipAddress.equals(other.ipAddress)) { <del> return true; <del> } <del> return false; <add> return Objects.equal(this.ipAddress, other.ipAddress); <ide> } <ide> <ide> @Override <ide> public int hashCode() { <del> int hash = 3; <del> hash = 53 * hash + (this.ipAddress != null ? this.ipAddress.hashCode() : 0); <del> return hash; <add> return Objects.hashCode(this.ipAddress); <add> } <add> <add> @Override <add> public String toString() { <add> <add> return MoreObjects.toStringHelper(this) <add> .add("ipAddress", ipAddress) <add> .add("name", name) <add> .add("dpid", dpid) <add> .add("port", port) <add> .toString(); <add> <ide> } <ide> }
JavaScript
mit
fd2c8ff9e3792026a9bdc3c226228656ed60df5c
0
synzen/Discord.RSS,synzen/Discord.RSS
const config = require('../config.js') const FeedSchedule = require('./FeedSchedule.js') const debugFeeds = require('../util/debugFeeds.js').list const ArticleMessageQueue = require('./ArticleMessageQueue.js') const log = require('../util/logger.js') const dbOpsSchedules = require('../util/db/schedules.js') const dbOpsGuilds = require('../util/db/guilds.js') const dbOpsVips = require('../util/db/vips.js') const AssignedScheduleModel = require('../models/AssignedSchedule.js') const ArticleModel = require('../models/Article.js') class ScheduleManager { constructor (bot) { this.bot = bot this.articleMessageQueue = new ArticleMessageQueue() this.scheduleList = [] } async _queueArticle (article) { if (debugFeeds.includes(article._delivery.rssName)) log.debug.info(`${article._delivery.rssName} ScheduleManager queueing article ${article.link} to send`) try { await this.articleMessageQueue.send(article) } catch (err) { if (config.log.linkErrs === true) { const channel = this.bot.channels.get(article._delivery.channelId) log.general.warning(`Failed to send article ${article.link}`, channel.guild, channel, err) if (err.code === 50035) channel.send(`Failed to send formatted article for article <${article.link}> due to misformation.\`\`\`${err.message}\`\`\``).catch(err => log.general.warning(`Unable to send failed-to-send message for article`, err)) } } } _finishSchedule () { this.articleMessageQueue.sendDelayed() } async addSchedule (schedule, assignAllSchedules, doNotStart) { if (!schedule) { throw new TypeError('Undefined schedule') } if (schedule.name !== 'default' && (!schedule.refreshRateMinutes || (!schedule.keywords && !schedule.feedIDs))) { throw new TypeError('refreshRateMinutes, keywords or feedIDs is missing in schedule to addSchedule') } if (this.scheduleList.length === 0 && (!this.bot.shard || this.bot.shard.count === 0)) { await dbOpsSchedules.schedules.clear() // Only clear if it is unsharded, otherwise it's clearing multiple times on multiple shards } const feedSchedule = new FeedSchedule(this.bot, schedule, this) await dbOpsSchedules.schedules.add(schedule.name, schedule.refreshRateMinutes) this.scheduleList.push(feedSchedule) feedSchedule.on('article', this._queueArticle.bind(this)) feedSchedule.on('finish', this._finishSchedule.bind(this)) if (this.bot.shard && this.bot.shard.count > 0) { process.send({ _drss: true, type: 'addCustomSchedule', schedule: schedule }) } if (assignAllSchedules) { await this.assignAllSchedules() } if (!doNotStart) { feedSchedule.start() } } run (refreshRate) { // Run schedules with respect to their refresh times for (var feedSchedule of this.scheduleList) { if (feedSchedule.refreshRate === refreshRate) { return feedSchedule.run().catch(err => log.cycle.error(`${this.bot.shard && this.bot.shard.count > 0 ? `SH ${this.bot.shard.id} ` : ''}Schedule ${this.name} failed to run cycle`, err)) } } // If there is no schedule with that refresh time if (this.bot.shard && this.bot.shard.count > 0) process.send({ _drss: true, type: 'scheduleComplete', refreshRate }) } stopSchedules () { this.scheduleList.forEach(schedule => schedule.stop()) } startSchedules () { this.scheduleList.forEach(schedule => schedule.start()) } getSchedule (name) { for (const schedule of this.scheduleList) { if (schedule.name === name) return schedule } } getScheduleOfFeedID (feedID) { for (const schedule of this.scheduleList) { if (schedule.feedIDs.has(feedID)) return schedule } } async assignAllSchedules () { // Remove the old schedules if (!this.bot.shard || this.bot.shard.count === 0) { // Only clear if it is unsharded, otherwise it's clearing multiple times on multiple shards await dbOpsSchedules.assignedSchedules.clear() } const schedulesByName = {} for (const schedule of this.scheduleList) { schedule.feedIDs.clear() schedulesByName[schedule.name] = schedule } const guildRssList = await dbOpsGuilds.getAll() const vipServers = [] if (config._vip === true) { const vipUsers = await dbOpsVips.getAll() for (const vipUser of vipUsers) { if (vipUser.invalid) continue for (const serverId of vipUser.servers) vipServers.push(serverId) } } const scheduleDeterminationPromises = [] const feedRecords = [] guildRssList.forEach(guildRss => { if (!this.bot.guilds.has(guildRss.id)) return const rssList = guildRss.sources for (const rssName in rssList) { scheduleDeterminationPromises.push(this.determineSchedule(rssName, guildRss, vipServers)) feedRecords.push({ feedID: rssName, guildID: guildRss.id, link: rssList[rssName].link }) } }) const scheduleNames = await Promise.all(scheduleDeterminationPromises) const documentsToInsert = [] const AssignedSchedule = AssignedScheduleModel.model() const shard = this.bot.shard && this.bot.shard.count > 0 ? this.bot.shard.id : -1 for (let i = 0; i < scheduleNames.length; ++i) { const scheduleName = scheduleNames[i] const { feedID, link, guildID } = feedRecords[i] schedulesByName[scheduleName].feedIDs.add(feedID) const toInsert = { feedID, schedule: scheduleName, link, guildID, shard } documentsToInsert.push(new AssignedSchedule(toInsert)) } await dbOpsSchedules.assignedSchedules.setMany(documentsToInsert) } async determineSchedule (rssName, guildRss, vipServers) { if (config._vip === true && !vipServers) { vipServers = [] const vipUsers = await dbOpsVips.getAll() for (const vipUser of vipUsers) { if (vipUser.invalid) continue for (const serverId of vipUser.servers) vipServers.push(serverId) } } const shardID = this.bot.shard ? this.bot.shard.id : undefined const source = guildRss.sources[rssName] let assignedSchedule = await dbOpsSchedules.assignedSchedules.get(rssName, shardID) // Take care of our VIPs if (config._vip === true && !source.link.includes('feed43')) { const validVip = vipServers.includes(guildRss.id) if (validVip) { if (assignedSchedule !== 'vip') { return 'vip' } } } if (!assignedSchedule) { for (const schedule of this.scheduleList) { if (schedule.name === 'default' || (config._vip === true && schedule.name === 'vip')) continue // Check if non-default schedules first // rssnames first const feedIDs = schedule.feedIDs // Potential array if (feedIDs && feedIDs.has(rssName)) { return schedule.name } // keywords second const sKeywords = schedule.keywords if (!sKeywords) continue for (const word of sKeywords) { if (!source.link.includes(word)) continue return schedule.name } } if (!assignedSchedule) return 'default' } } async assignSchedule (feedID, guildRss, vipServers) { const scheduleName = await this.determineSchedule(feedID, guildRss, vipServers) const schedule = this.getSchedule(scheduleName) schedule.feedIDs.add(feedID) await dbOpsSchedules.assignedSchedules.set(feedID, scheduleName, guildRss.sources[feedID].link, guildRss.id) return scheduleName } async removeScheduleOfFeed (feedID, link) { const schedule = await this.getScheduleOfFeedID(feedID) if (!schedule) return schedule.feedIDs.delete(feedID) const shardID = this.bot.shard ? this.bot.shard.id : 0 await dbOpsSchedules.assignedSchedules.remove(feedID) const assignedSchedules = await dbOpsSchedules.assignedSchedules.getMany(shardID, schedule.name, link) if (assignedSchedules.length === 0 && config.database.uri.startsWith('mongo')) { ArticleModel.model(link, shardID, schedule.name).collection.drop().catch(err => err.code === 26 ? null : log.general.error('Failed to drop unused collection after feed removal', err)) } } cyclesInProgress (name) { for (var feedSchedule of this.scheduleList.length) { if (name && feedSchedule.name === name && feedSchedule.inProgress) return true else if (feedSchedule.inProgress) return true } return false } } module.exports = ScheduleManager
src/structs/ScheduleManager.js
const config = require('../config.js') const FeedSchedule = require('./FeedSchedule.js') const debugFeeds = require('../util/debugFeeds.js').list const ArticleMessageQueue = require('./ArticleMessageQueue.js') const log = require('../util/logger.js') const dbOpsSchedules = require('../util/db/schedules.js') const dbOpsGuilds = require('../util/db/guilds.js') const dbOpsVips = require('../util/db/vips.js') const AssignedScheduleModel = require('../models/AssignedSchedule.js') const ArticleModel = require('../models/Article.js') class ScheduleManager { constructor (bot) { this.bot = bot this.articleMessageQueue = new ArticleMessageQueue() this.scheduleList = [] } async _queueArticle (article) { if (debugFeeds.includes(article._delivery.rssName)) log.debug.info(`${article._delivery.rssName} ScheduleManager queueing article ${article.link} to send`) try { await this.articleMessageQueue.send(article) } catch (err) { if (config.log.linkErrs === true) { const channel = this.bot.channels.get(article._delivery.channelId) log.general.warning(`Failed to send article ${article.link}`, channel.guild, channel, err) if (err.code === 50035) channel.send(`Failed to send formatted article for article <${article.link}> due to misformation.\`\`\`${err.message}\`\`\``).catch(err => log.general.warning(`Unable to send failed-to-send message for article`, err)) } } } _finishSchedule () { this.articleMessageQueue.sendDelayed() } async addSchedule (schedule, assignAllSchedules, doNotStart) { if (!schedule) { throw new TypeError('Undefined schedule') } if (schedule.name !== 'default' && (!schedule.refreshRateMinutes || (!schedule.keywords && !schedule.feedIDs))) { throw new TypeError('refreshRateMinutes, keywords or feedIDs is missing in schedule to addSchedule') } if (this.scheduleList.length === 0 && (!this.bot.shard || this.bot.shard.count === 0)) { await dbOpsSchedules.schedules.clear() // Only clear if it is unsharded, otherwise it's clearing multiple times on multiple shards } const feedSchedule = new FeedSchedule(this.bot, schedule, this) await dbOpsSchedules.schedules.add(schedule.name, schedule.refreshRateMinutes) this.scheduleList.push(feedSchedule) feedSchedule.on('article', this._queueArticle.bind(this)) feedSchedule.on('finish', this._finishSchedule.bind(this)) if (this.bot.shard && this.bot.shard.count > 0) { process.send({ _drss: true, type: 'addCustomSchedule', schedule: schedule }) } if (assignAllSchedules) { await this.assignAllSchedules() } if (!doNotStart) { feedSchedule.start() } } run (refreshRate) { // Run schedules with respect to their refresh times for (var feedSchedule of this.scheduleList) { if (feedSchedule.refreshRate === refreshRate) { return feedSchedule.run().catch(err => log.cycle.error(`${this.bot.shard && this.bot.shard.count > 0 ? `SH ${this.bot.shard.id} ` : ''}Schedule ${this.name} failed to run cycle`, err)) } } // If there is no schedule with that refresh time if (this.bot.shard && this.bot.shard.count > 0) process.send({ _drss: true, type: 'scheduleComplete', refreshRate }) } stopSchedules () { this.scheduleList.forEach(schedule => schedule.stop()) } startSchedules () { this.scheduleList.forEach(schedule => schedule.start()) } getSchedule (name) { for (const schedule of this.scheduleList) { if (schedule.name === name) return schedule } } getScheduleOfFeedID (feedID) { for (const schedule of this.scheduleList) { if (schedule.feedIDs.has(feedID)) return schedule } } async assignAllSchedules () { // Remove the old schedules if (!this.bot.shard || this.bot.shard.count === 0) { // Only clear if it is unsharded, otherwise it's clearing multiple times on multiple shards await dbOpsSchedules.assignedSchedules.clear() } const schedulesByName = {} for (const schedule of this.scheduleList) { schedule.feedIDs.clear() schedulesByName[schedule.name] = schedule } const guildRssList = await dbOpsGuilds.getAll() const vipServers = [] if (config._vip === true) { const vipUsers = await dbOpsVips.getAll() for (const vipUser of vipUsers) { if (vipUser.invalid) continue for (const serverId of vipUser.servers) vipServers.push(serverId) } } const scheduleDeterminationPromises = [] const feedRecords = [] guildRssList.forEach(guildRss => { if (!this.bot.guilds.has(guildRss.id)) return const rssList = guildRss.sources for (const rssName in rssList) { scheduleDeterminationPromises.push(this.determineSchedule(rssName, guildRss, vipServers)) feedRecords.push({ feedID: rssName, guildID: guildRss.id, link: rssList[rssName].link }) } }) const scheduleNames = await Promise.all(scheduleDeterminationPromises) const documentsToInsert = [] const AssignedSchedule = AssignedScheduleModel.model() const shard = this.bot.shard && this.bot.shard.count > 0 ? this.bot.shard.id : -1 for (let i = 0; i < scheduleNames.length; ++i) { const scheduleName = scheduleNames[i] const { feedID, link, guildID } = feedRecords[i] schedulesByName[scheduleName].feedIDs.add(feedID) const toInsert = { feedID, schedule: scheduleName, link, guildID, shard } documentsToInsert.push(new AssignedSchedule(toInsert)) } await dbOpsSchedules.assignedSchedules.setMany(documentsToInsert) } async determineSchedule (rssName, guildRss, vipServers) { if (config._vip === true && !vipServers) { vipServers = [] const vipUsers = await dbOpsVips.getAll() for (const vipUser of vipUsers) { if (vipUser.invalid) continue for (const serverId of vipUser.servers) vipServers.push(serverId) } } const shardID = this.bot.shard ? this.bot.shard.id : undefined const source = guildRss.sources[rssName] let assignedSchedule = await dbOpsSchedules.assignedSchedules.get(rssName, shardID) // Take care of our VIPs if (config._vip === true && !source.link.includes('feed43')) { const validVip = vipServers.includes(guildRss.id) if (validVip) { if (assignedSchedule !== 'vip') { return 'vip' } } } if (!assignedSchedule) { for (const schedule of this.scheduleList) { if (schedule.name === 'default' || (config._vip === true && schedule.name === 'vip')) continue // Check if non-default schedules first // rssnames first const feedIDs = schedule.feedIDs // Potential array if (feedIDs && feedIDs.has(rssName)) { return schedule.name } // keywords second const sKeywords = schedule.keywords if (!sKeywords) continue for (const word of sKeywords) { if (!source.link.includes(word)) continue return schedule.name } } if (!assignedSchedule) return 'default' } } async assignSchedule (feedID, guildRss, vipServers) { const scheduleName = await this.determineSchedule(feedID, guildRss, vipServers) const schedule = this.getSchedule(scheduleName) schedule.feedIDs.add(feedID) await dbOpsSchedules.assignedSchedules.set(feedID, scheduleName, guildRss.sources[feedID].link, guildRss.id) return scheduleName } async removeScheduleOfFeed (feedID, link) { const schedule = await this.getScheduleOfFeedID(feedID) if (!schedule) return schedule.feedIDs.delete(feedID) const shardID = this.bot.shard ? this.bot.shard.id : 0 await dbOpsSchedules.assignedSchedules.remove(feedID) const assignedSchedules = await dbOpsSchedules.assignedSchedules.getMany(shardID, schedule.name, link) if (assignedSchedules.length === 0 && config.database.uri.startsWith('mongo')) { await ArticleModel.model(link, shardID, schedule.name).collection.drop() } } cyclesInProgress (name) { for (var feedSchedule of this.scheduleList.length) { if (name && feedSchedule.name === name && feedSchedule.inProgress) return true else if (feedSchedule.inProgress) return true } return false } } module.exports = ScheduleManager
Fix occasional internal error when using rssremove
src/structs/ScheduleManager.js
Fix occasional internal error when using rssremove
<ide><path>rc/structs/ScheduleManager.js <ide> await dbOpsSchedules.assignedSchedules.remove(feedID) <ide> const assignedSchedules = await dbOpsSchedules.assignedSchedules.getMany(shardID, schedule.name, link) <ide> if (assignedSchedules.length === 0 && config.database.uri.startsWith('mongo')) { <del> await ArticleModel.model(link, shardID, schedule.name).collection.drop() <add> ArticleModel.model(link, shardID, schedule.name).collection.drop().catch(err => err.code === 26 ? null : log.general.error('Failed to drop unused collection after feed removal', err)) <ide> } <ide> } <ide>
Java
apache-2.0
54055eb650cbfb3388e69497e0166e2ec3c1bab6
0
FHannes/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,fitermay/intellij-community,retomerz/intellij-community,allotria/intellij-community,ibinti/intellij-community,allotria/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,semonte/intellij-community,signed/intellij-community,allotria/intellij-community,ibinti/intellij-community,hurricup/intellij-community,retomerz/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,xfournet/intellij-community,signed/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,fitermay/intellij-community,asedunov/intellij-community,retomerz/intellij-community,da1z/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,signed/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,allotria/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,fitermay/intellij-community,hurricup/intellij-community,apixandru/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,xfournet/intellij-community,asedunov/intellij-community,fitermay/intellij-community,allotria/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,xfournet/intellij-community,semonte/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,youdonghai/intellij-community,apixandru/intellij-community,fitermay/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,allotria/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,signed/intellij-community,da1z/intellij-community,apixandru/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,FHannes/intellij-community,xfournet/intellij-community,signed/intellij-community,allotria/intellij-community,youdonghai/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,signed/intellij-community,retomerz/intellij-community,hurricup/intellij-community,allotria/intellij-community,xfournet/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,signed/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,asedunov/intellij-community,semonte/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,semonte/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,retomerz/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ibinti/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,xfournet/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,ibinti/intellij-community,allotria/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,semonte/intellij-community,semonte/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,apixandru/intellij-community,ibinti/intellij-community,da1z/intellij-community,asedunov/intellij-community,semonte/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,da1z/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,ibinti/intellij-community,FHannes/intellij-community,asedunov/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,retomerz/intellij-community,xfournet/intellij-community,hurricup/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fitermay/intellij-community,da1z/intellij-community,suncycheng/intellij-community,signed/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,asedunov/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.dom; import com.intellij.lang.properties.IProperty; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xml.*; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.dom.model.*; import org.jetbrains.idea.maven.model.MavenConstants; import org.jetbrains.idea.maven.model.MavenId; import org.jetbrains.idea.maven.model.MavenResource; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.project.MavenProjectsManager; import org.jetbrains.idea.maven.utils.MavenLog; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; public class MavenDomUtil { private static final Key<Pair<Long, Set<VirtualFile>>> FILTERED_RESOURCES_ROOTS_KEY = Key.create("MavenDomUtil.FILTERED_RESOURCES_ROOTS"); // see http://maven.apache.org/settings.html private static final Set<String> SUBTAGS_IN_SETTINGS_FILE = ContainerUtil.newHashSet("localRepository", "interactiveMode", "usePluginRegistry", "offline", "pluginGroups", "servers", "mirrors", "proxies", "profiles", "activeProfiles"); public static boolean isMavenFile(PsiFile file) { return isProjectFile(file) || isProfilesFile(file) || isSettingsFile(file); } public static boolean isProjectFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; String name = file.getName(); return name.equals(MavenConstants.POM_XML) || name.endsWith(".pom") || name.equals(MavenConstants.SUPER_POM_XML); } public static boolean isProfilesFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; return MavenConstants.PROFILES_XML.equals(file.getName()); } public static boolean isSettingsFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; //String name = file.getName(); //if (!name.equals(MavenConstants.SETTINGS_XML)) return false; XmlTag rootTag = ((XmlFile)file).getRootTag(); if (rootTag == null || !"settings".equals(rootTag.getName())) return false; String xmlns = rootTag.getAttributeValue("xmlns"); if (xmlns != null) { return xmlns.contains("maven"); } boolean hasTag = false; for (PsiElement e = rootTag.getFirstChild(); e != null; e = e.getNextSibling()) { if (e instanceof XmlTag) { if (SUBTAGS_IN_SETTINGS_FILE.contains(((XmlTag)e).getName())) return true; hasTag = true; } } return !hasTag; } public static boolean isMavenFile(PsiElement element) { return isMavenFile(element.getContainingFile()); } @Nullable public static Module findContainingMavenizedModule(@NotNull PsiFile psiFile) { VirtualFile file = psiFile.getVirtualFile(); if (file == null) return null; Project project = psiFile.getProject(); MavenProjectsManager manager = MavenProjectsManager.getInstance(project); if (!manager.isMavenizedProject()) return null; ProjectFileIndex index = ProjectRootManager.getInstance(project).getFileIndex(); Module module = index.getModuleForFile(file); if (module == null || !manager.isMavenizedModule(module)) return null; return module; } public static boolean isMavenProperty(PsiElement target) { XmlTag tag = PsiTreeUtil.getParentOfType(target, XmlTag.class, false); if (tag == null) return false; return DomUtil.findDomElement(tag, MavenDomProperties.class) != null; } public static String calcRelativePath(VirtualFile parent, VirtualFile child) { String result = FileUtil.getRelativePath(parent.getPath(), child.getPath(), '/'); if (result == null) { MavenLog.LOG.warn("cannot calculate relative path for\nparent: " + parent + "\nchild: " + child); result = child.getPath(); } return FileUtil.toSystemIndependentName(result); } public static MavenDomParent updateMavenParent(MavenDomProjectModel mavenModel, MavenProject parentProject) { MavenDomParent result = mavenModel.getMavenParent(); VirtualFile pomFile = DomUtil.getFile(mavenModel).getVirtualFile(); Project project = mavenModel.getXmlElement().getProject(); MavenId parentId = parentProject.getMavenId(); result.getGroupId().setStringValue(parentId.getGroupId()); result.getArtifactId().setStringValue(parentId.getArtifactId()); result.getVersion().setStringValue(parentId.getVersion()); if (!Comparing.equal(pomFile.getParent().getParent(), parentProject.getDirectoryFile())) { result.getRelativePath().setValue(PsiManager.getInstance(project).findFile(parentProject.getFile())); } return result; } public static <T> T getImmediateParent(ConvertContext context, Class<T> clazz) { DomElement parentElement = context.getInvocationElement().getParent(); return clazz.isInstance(parentElement) ? (T)parentElement : null; } @Nullable public static VirtualFile getVirtualFile(@NotNull DomElement element) { PsiFile psiFile = DomUtil.getFile(element); return getVirtualFile(psiFile); } @Nullable public static VirtualFile getVirtualFile(@NotNull PsiElement element) { PsiFile psiFile = element.getContainingFile(); return getVirtualFile(psiFile); } @Nullable private static VirtualFile getVirtualFile(PsiFile psiFile) { if (psiFile == null) return null; psiFile = psiFile.getOriginalFile(); return psiFile.getVirtualFile(); } @Nullable public static MavenProject findProject(@NotNull MavenDomProjectModel projectDom) { XmlElement element = projectDom.getXmlElement(); if (element == null) return null; VirtualFile file = getVirtualFile(element); if (file == null) return null; MavenProjectsManager manager = MavenProjectsManager.getInstance(element.getProject()); return manager.findProject(file); } @Nullable public static MavenProject findContainingProject(@NotNull DomElement element) { PsiElement psi = element.getXmlElement(); return psi == null ? null : findContainingProject(psi); } @Nullable public static MavenProject findContainingProject(@NotNull PsiElement element) { VirtualFile file = getVirtualFile(element); if (file == null) return null; MavenProjectsManager manager = MavenProjectsManager.getInstance(element.getProject()); return manager.findContainingProject(file); } @Nullable public static MavenDomProjectModel getMavenDomProjectModel(@NotNull Project project, @NotNull VirtualFile file) { return getMavenDomModel(project, file, MavenDomProjectModel.class); } @Nullable public static MavenDomProfiles getMavenDomProfilesModel(@NotNull Project project, @NotNull VirtualFile file) { MavenDomProfilesModel model = getMavenDomModel(project, file, MavenDomProfilesModel.class); if (model != null) return model.getProfiles(); return getMavenDomModel(project, file, MavenDomProfiles.class); // try old-style model } @Nullable public static <T extends MavenDomElement> T getMavenDomModel(@NotNull Project project, @NotNull VirtualFile file, @NotNull Class<T> clazz) { if (!file.isValid()) return null; PsiFile psiFile = PsiManager.getInstance(project).findFile(file); if (psiFile == null) return null; return getMavenDomModel(psiFile, clazz); } @Nullable public static <T extends MavenDomElement> T getMavenDomModel(@NotNull PsiFile file, @NotNull Class<T> clazz) { DomFileElement<T> fileElement = getMavenDomFile(file, clazz); return fileElement == null ? null : fileElement.getRootElement(); } @Nullable private static <T extends MavenDomElement> DomFileElement<T> getMavenDomFile(@NotNull PsiFile file, @NotNull Class<T> clazz) { if (!(file instanceof XmlFile)) return null; return DomManager.getDomManager(file.getProject()).getFileElement((XmlFile)file, clazz); } @Nullable public static XmlTag findTag(@NotNull DomElement domElement, @NotNull String path) { List<String> elements = StringUtil.split(path, "."); if (elements.isEmpty()) return null; Pair<String, Integer> nameAndIndex = translateTagName(elements.get(0)); String name = nameAndIndex.first; Integer index = nameAndIndex.second; XmlTag result = domElement.getXmlTag(); if (result == null || !name.equals(result.getName())) return null; result = getIndexedTag(result, index); for (String each : elements.subList(1, elements.size())) { nameAndIndex = translateTagName(each); name = nameAndIndex.first; index = nameAndIndex.second; result = result.findFirstSubTag(name); if (result == null) return null; result = getIndexedTag(result, index); } return result; } private static final Pattern XML_TAG_NAME_PATTERN = Pattern.compile("(\\S*)\\[(\\d*)\\]\\z"); private static Pair<String, Integer> translateTagName(String text) { String tagName = text.trim(); Integer index = null; Matcher matcher = XML_TAG_NAME_PATTERN.matcher(tagName); if (matcher.find()) { tagName = matcher.group(1); try { index = Integer.parseInt(matcher.group(2)); } catch (NumberFormatException e) { return null; } } return Pair.create(tagName, index); } private static XmlTag getIndexedTag(XmlTag parent, Integer index) { if (index == null) return parent; XmlTag[] children = parent.getSubTags(); if (index < 0 || index >= children.length) return null; return children[index]; } @Nullable public static PropertiesFile getPropertiesFile(@NotNull Project project, @NotNull VirtualFile file) { PsiFile psiFile = PsiManager.getInstance(project).findFile(file); if (!(psiFile instanceof PropertiesFile)) return null; return (PropertiesFile)psiFile; } @Nullable public static IProperty findProperty(@NotNull Project project, @NotNull VirtualFile file, @NotNull String propName) { PropertiesFile propertiesFile = getPropertiesFile(project, file); return propertiesFile == null ? null : propertiesFile.findPropertyByKey(propName); } @Nullable public static PsiElement findPropertyValue(@NotNull Project project, @NotNull VirtualFile file, @NotNull String propName) { IProperty prop = findProperty(project, file, propName); return prop == null ? null : prop.getPsiElement().getFirstChild().getNextSibling().getNextSibling(); } private static Set<VirtualFile> getFilteredResourcesRoots(@NotNull MavenProject mavenProject) { Pair<Long, Set<VirtualFile>> cachedValue = mavenProject.getCachedValue(FILTERED_RESOURCES_ROOTS_KEY); if (cachedValue == null || cachedValue.first != VirtualFileManager.getInstance().getModificationCount()) { Set<VirtualFile> set = null; for (MavenResource resource : ContainerUtil.concat(mavenProject.getResources(), mavenProject.getTestResources())) { if (!resource.isFiltered()) continue; VirtualFile resourceDir = LocalFileSystem.getInstance().findFileByPath(resource.getDirectory()); if (resourceDir == null) continue; if (set == null) { set = new HashSet<VirtualFile>(); } set.add(resourceDir); } if (set == null) { set = Collections.emptySet(); } cachedValue = Pair.create(VirtualFileManager.getInstance().getModificationCount(), set); mavenProject.putCachedValue(FILTERED_RESOURCES_ROOTS_KEY, cachedValue); } return cachedValue.second; } public static boolean isFilteredResourceFile(PsiElement element) { PsiFile psiFile = element.getContainingFile(); VirtualFile file = getVirtualFile(psiFile); if (file == null) return false; MavenProjectsManager manager = MavenProjectsManager.getInstance(psiFile.getProject()); MavenProject mavenProject = manager.findContainingProject(file); if (mavenProject == null) return false; Set<VirtualFile> filteredRoots = getFilteredResourcesRoots(mavenProject); if (!filteredRoots.isEmpty()) { for (VirtualFile f = file.getParent(); f != null; f = f.getParent()) { if (filteredRoots.contains(f)) { return true; } } } return false; } public static List<DomFileElement<MavenDomProjectModel>> collectProjectModels(Project p) { return DomService.getInstance().getFileElements(MavenDomProjectModel.class, p, GlobalSearchScope.projectScope(p)); } public static MavenId describe(PsiFile psiFile) { MavenDomProjectModel model = getMavenDomModel(psiFile, MavenDomProjectModel.class); String groupId = model.getGroupId().getStringValue(); String artifactId = model.getArtifactId().getStringValue(); String version = model.getVersion().getStringValue(); if (groupId == null) { groupId = model.getMavenParent().getGroupId().getStringValue(); } if (version == null) { version = model.getMavenParent().getVersion().getStringValue(); } return new MavenId(groupId, artifactId, version); } @NotNull public static MavenDomDependency createDomDependency(MavenDomProjectModel model, @Nullable Editor editor, @NotNull final MavenId id) { return createDomDependency(model.getDependencies(), editor, id); } @NotNull public static MavenDomDependency createDomDependency(MavenDomDependencies dependencies, @Nullable Editor editor, @NotNull final MavenId id) { MavenDomDependency dep = createDomDependency(dependencies, editor); dep.getGroupId().setStringValue(id.getGroupId()); dep.getArtifactId().setStringValue(id.getArtifactId()); dep.getVersion().setStringValue(id.getVersion()); return dep; } @NotNull public static MavenDomDependency createDomDependency(@NotNull MavenDomProjectModel model, @Nullable Editor editor) { return createDomDependency(model.getDependencies(), editor); } @NotNull public static MavenDomDependency createDomDependency(@NotNull MavenDomDependencies dependencies, @Nullable Editor editor) { int index = getCollectionIndex(dependencies, editor); if (index >= 0) { DomCollectionChildDescription childDescription = dependencies.getGenericInfo().getCollectionChildDescription("dependency"); if (childDescription != null) { DomElement element = childDescription.addValue(dependencies, index); if (element instanceof MavenDomDependency) { return (MavenDomDependency)element; } } } return dependencies.addDependency(); } public static int getCollectionIndex(@NotNull final MavenDomDependencies dependencies, @Nullable final Editor editor) { if (editor != null) { int offset = editor.getCaretModel().getOffset(); List<MavenDomDependency> dependencyList = dependencies.getDependencies(); for (int i = 0; i < dependencyList.size(); i++) { MavenDomDependency dependency = dependencyList.get(i); XmlElement xmlElement = dependency.getXmlElement(); if (xmlElement != null && xmlElement.getTextRange().getStartOffset() >= offset) { return i; } } } return -1; } }
plugins/maven/src/main/java/org/jetbrains/idea/maven/dom/MavenDomUtil.java
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.dom; import com.intellij.lang.properties.IProperty; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlElement; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xml.*; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.dom.model.*; import org.jetbrains.idea.maven.model.MavenConstants; import org.jetbrains.idea.maven.model.MavenId; import org.jetbrains.idea.maven.model.MavenResource; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.project.MavenProjectsManager; import org.jetbrains.idea.maven.utils.MavenLog; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; public class MavenDomUtil { private static final Key<Pair<Long, Set<VirtualFile>>> FILTERED_RESOURCES_ROOTS_KEY = Key.create("MavenDomUtil.FILTERED_RESOURCES_ROOTS"); // see http://maven.apache.org/settings.html private static final Set<String> SUBTAGS_IN_SETTINGS_FILE = ContainerUtil.newHashSet("localRepository", "interactiveMode", "usePluginRegistry", "offline", "pluginGroups", "servers", "mirrors", "proxies", "profiles", "activeProfiles"); public static boolean isMavenFile(PsiFile file) { return isProjectFile(file) || isProfilesFile(file) || isSettingsFile(file); } public static boolean isProjectFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; String name = file.getName(); return name.equals(MavenConstants.POM_XML) || name.endsWith(".pom") || name.equals(MavenConstants.SUPER_POM_XML); } public static boolean isProfilesFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; return MavenConstants.PROFILES_XML.equals(file.getName()); } public static boolean isSettingsFile(PsiFile file) { if (!(file instanceof XmlFile)) return false; String name = file.getName(); if (!name.equals(MavenConstants.SETTINGS_XML)) return false; XmlTag rootTag = ((XmlFile)file).getRootTag(); if (rootTag == null || !"settings".equals(rootTag.getName())) return false; String xmlns = rootTag.getAttributeValue("xmlns"); if (xmlns != null) { return xmlns.contains("maven"); } boolean hasTag = false; for (PsiElement e = rootTag.getFirstChild(); e != null; e = e.getNextSibling()) { if (e instanceof XmlTag) { if (SUBTAGS_IN_SETTINGS_FILE.contains(((XmlTag)e).getName())) return true; hasTag = true; } } return !hasTag; } public static boolean isMavenFile(PsiElement element) { return isMavenFile(element.getContainingFile()); } @Nullable public static Module findContainingMavenizedModule(@NotNull PsiFile psiFile) { VirtualFile file = psiFile.getVirtualFile(); if (file == null) return null; Project project = psiFile.getProject(); MavenProjectsManager manager = MavenProjectsManager.getInstance(project); if (!manager.isMavenizedProject()) return null; ProjectFileIndex index = ProjectRootManager.getInstance(project).getFileIndex(); Module module = index.getModuleForFile(file); if (module == null || !manager.isMavenizedModule(module)) return null; return module; } public static boolean isMavenProperty(PsiElement target) { XmlTag tag = PsiTreeUtil.getParentOfType(target, XmlTag.class, false); if (tag == null) return false; return DomUtil.findDomElement(tag, MavenDomProperties.class) != null; } public static String calcRelativePath(VirtualFile parent, VirtualFile child) { String result = FileUtil.getRelativePath(parent.getPath(), child.getPath(), '/'); if (result == null) { MavenLog.LOG.warn("cannot calculate relative path for\nparent: " + parent + "\nchild: " + child); result = child.getPath(); } return FileUtil.toSystemIndependentName(result); } public static MavenDomParent updateMavenParent(MavenDomProjectModel mavenModel, MavenProject parentProject) { MavenDomParent result = mavenModel.getMavenParent(); VirtualFile pomFile = DomUtil.getFile(mavenModel).getVirtualFile(); Project project = mavenModel.getXmlElement().getProject(); MavenId parentId = parentProject.getMavenId(); result.getGroupId().setStringValue(parentId.getGroupId()); result.getArtifactId().setStringValue(parentId.getArtifactId()); result.getVersion().setStringValue(parentId.getVersion()); if (!Comparing.equal(pomFile.getParent().getParent(), parentProject.getDirectoryFile())) { result.getRelativePath().setValue(PsiManager.getInstance(project).findFile(parentProject.getFile())); } return result; } public static <T> T getImmediateParent(ConvertContext context, Class<T> clazz) { DomElement parentElement = context.getInvocationElement().getParent(); return clazz.isInstance(parentElement) ? (T)parentElement : null; } @Nullable public static VirtualFile getVirtualFile(@NotNull DomElement element) { PsiFile psiFile = DomUtil.getFile(element); return getVirtualFile(psiFile); } @Nullable public static VirtualFile getVirtualFile(@NotNull PsiElement element) { PsiFile psiFile = element.getContainingFile(); return getVirtualFile(psiFile); } @Nullable private static VirtualFile getVirtualFile(PsiFile psiFile) { if (psiFile == null) return null; psiFile = psiFile.getOriginalFile(); return psiFile.getVirtualFile(); } @Nullable public static MavenProject findProject(@NotNull MavenDomProjectModel projectDom) { XmlElement element = projectDom.getXmlElement(); if (element == null) return null; VirtualFile file = getVirtualFile(element); if (file == null) return null; MavenProjectsManager manager = MavenProjectsManager.getInstance(element.getProject()); return manager.findProject(file); } @Nullable public static MavenProject findContainingProject(@NotNull DomElement element) { PsiElement psi = element.getXmlElement(); return psi == null ? null : findContainingProject(psi); } @Nullable public static MavenProject findContainingProject(@NotNull PsiElement element) { VirtualFile file = getVirtualFile(element); if (file == null) return null; MavenProjectsManager manager = MavenProjectsManager.getInstance(element.getProject()); return manager.findContainingProject(file); } @Nullable public static MavenDomProjectModel getMavenDomProjectModel(@NotNull Project project, @NotNull VirtualFile file) { return getMavenDomModel(project, file, MavenDomProjectModel.class); } @Nullable public static MavenDomProfiles getMavenDomProfilesModel(@NotNull Project project, @NotNull VirtualFile file) { MavenDomProfilesModel model = getMavenDomModel(project, file, MavenDomProfilesModel.class); if (model != null) return model.getProfiles(); return getMavenDomModel(project, file, MavenDomProfiles.class); // try old-style model } @Nullable public static <T extends MavenDomElement> T getMavenDomModel(@NotNull Project project, @NotNull VirtualFile file, @NotNull Class<T> clazz) { if (!file.isValid()) return null; PsiFile psiFile = PsiManager.getInstance(project).findFile(file); if (psiFile == null) return null; return getMavenDomModel(psiFile, clazz); } @Nullable public static <T extends MavenDomElement> T getMavenDomModel(@NotNull PsiFile file, @NotNull Class<T> clazz) { DomFileElement<T> fileElement = getMavenDomFile(file, clazz); return fileElement == null ? null : fileElement.getRootElement(); } @Nullable private static <T extends MavenDomElement> DomFileElement<T> getMavenDomFile(@NotNull PsiFile file, @NotNull Class<T> clazz) { if (!(file instanceof XmlFile)) return null; return DomManager.getDomManager(file.getProject()).getFileElement((XmlFile)file, clazz); } @Nullable public static XmlTag findTag(@NotNull DomElement domElement, @NotNull String path) { List<String> elements = StringUtil.split(path, "."); if (elements.isEmpty()) return null; Pair<String, Integer> nameAndIndex = translateTagName(elements.get(0)); String name = nameAndIndex.first; Integer index = nameAndIndex.second; XmlTag result = domElement.getXmlTag(); if (result == null || !name.equals(result.getName())) return null; result = getIndexedTag(result, index); for (String each : elements.subList(1, elements.size())) { nameAndIndex = translateTagName(each); name = nameAndIndex.first; index = nameAndIndex.second; result = result.findFirstSubTag(name); if (result == null) return null; result = getIndexedTag(result, index); } return result; } private static final Pattern XML_TAG_NAME_PATTERN = Pattern.compile("(\\S*)\\[(\\d*)\\]\\z"); private static Pair<String, Integer> translateTagName(String text) { String tagName = text.trim(); Integer index = null; Matcher matcher = XML_TAG_NAME_PATTERN.matcher(tagName); if (matcher.find()) { tagName = matcher.group(1); try { index = Integer.parseInt(matcher.group(2)); } catch (NumberFormatException e) { return null; } } return Pair.create(tagName, index); } private static XmlTag getIndexedTag(XmlTag parent, Integer index) { if (index == null) return parent; XmlTag[] children = parent.getSubTags(); if (index < 0 || index >= children.length) return null; return children[index]; } @Nullable public static PropertiesFile getPropertiesFile(@NotNull Project project, @NotNull VirtualFile file) { PsiFile psiFile = PsiManager.getInstance(project).findFile(file); if (!(psiFile instanceof PropertiesFile)) return null; return (PropertiesFile)psiFile; } @Nullable public static IProperty findProperty(@NotNull Project project, @NotNull VirtualFile file, @NotNull String propName) { PropertiesFile propertiesFile = getPropertiesFile(project, file); return propertiesFile == null ? null : propertiesFile.findPropertyByKey(propName); } @Nullable public static PsiElement findPropertyValue(@NotNull Project project, @NotNull VirtualFile file, @NotNull String propName) { IProperty prop = findProperty(project, file, propName); return prop == null ? null : prop.getPsiElement().getFirstChild().getNextSibling().getNextSibling(); } private static Set<VirtualFile> getFilteredResourcesRoots(@NotNull MavenProject mavenProject) { Pair<Long, Set<VirtualFile>> cachedValue = mavenProject.getCachedValue(FILTERED_RESOURCES_ROOTS_KEY); if (cachedValue == null || cachedValue.first != VirtualFileManager.getInstance().getModificationCount()) { Set<VirtualFile> set = null; for (MavenResource resource : ContainerUtil.concat(mavenProject.getResources(), mavenProject.getTestResources())) { if (!resource.isFiltered()) continue; VirtualFile resourceDir = LocalFileSystem.getInstance().findFileByPath(resource.getDirectory()); if (resourceDir == null) continue; if (set == null) { set = new HashSet<VirtualFile>(); } set.add(resourceDir); } if (set == null) { set = Collections.emptySet(); } cachedValue = Pair.create(VirtualFileManager.getInstance().getModificationCount(), set); mavenProject.putCachedValue(FILTERED_RESOURCES_ROOTS_KEY, cachedValue); } return cachedValue.second; } public static boolean isFilteredResourceFile(PsiElement element) { PsiFile psiFile = element.getContainingFile(); VirtualFile file = getVirtualFile(psiFile); if (file == null) return false; MavenProjectsManager manager = MavenProjectsManager.getInstance(psiFile.getProject()); MavenProject mavenProject = manager.findContainingProject(file); if (mavenProject == null) return false; Set<VirtualFile> filteredRoots = getFilteredResourcesRoots(mavenProject); if (!filteredRoots.isEmpty()) { for (VirtualFile f = file.getParent(); f != null; f = f.getParent()) { if (filteredRoots.contains(f)) { return true; } } } return false; } public static List<DomFileElement<MavenDomProjectModel>> collectProjectModels(Project p) { return DomService.getInstance().getFileElements(MavenDomProjectModel.class, p, GlobalSearchScope.projectScope(p)); } public static MavenId describe(PsiFile psiFile) { MavenDomProjectModel model = getMavenDomModel(psiFile, MavenDomProjectModel.class); String groupId = model.getGroupId().getStringValue(); String artifactId = model.getArtifactId().getStringValue(); String version = model.getVersion().getStringValue(); if (groupId == null) { groupId = model.getMavenParent().getGroupId().getStringValue(); } if (version == null) { version = model.getMavenParent().getVersion().getStringValue(); } return new MavenId(groupId, artifactId, version); } @NotNull public static MavenDomDependency createDomDependency(MavenDomProjectModel model, @Nullable Editor editor, @NotNull final MavenId id) { return createDomDependency(model.getDependencies(), editor, id); } @NotNull public static MavenDomDependency createDomDependency(MavenDomDependencies dependencies, @Nullable Editor editor, @NotNull final MavenId id) { MavenDomDependency dep = createDomDependency(dependencies, editor); dep.getGroupId().setStringValue(id.getGroupId()); dep.getArtifactId().setStringValue(id.getArtifactId()); dep.getVersion().setStringValue(id.getVersion()); return dep; } @NotNull public static MavenDomDependency createDomDependency(@NotNull MavenDomProjectModel model, @Nullable Editor editor) { return createDomDependency(model.getDependencies(), editor); } @NotNull public static MavenDomDependency createDomDependency(@NotNull MavenDomDependencies dependencies, @Nullable Editor editor) { int index = getCollectionIndex(dependencies, editor); if (index >= 0) { DomCollectionChildDescription childDescription = dependencies.getGenericInfo().getCollectionChildDescription("dependency"); if (childDescription != null) { DomElement element = childDescription.addValue(dependencies, index); if (element instanceof MavenDomDependency) { return (MavenDomDependency)element; } } } return dependencies.addDependency(); } public static int getCollectionIndex(@NotNull final MavenDomDependencies dependencies, @Nullable final Editor editor) { if (editor != null) { int offset = editor.getCaretModel().getOffset(); List<MavenDomDependency> dependencyList = dependencies.getDependencies(); for (int i = 0; i < dependencyList.size(); i++) { MavenDomDependency dependency = dependencyList.get(i); XmlElement xmlElement = dependency.getXmlElement(); if (xmlElement != null && xmlElement.getTextRange().getStartOffset() >= offset) { return i; } } } return -1; } }
Comment out checking maven settings by file name. This check prevents navigation to maven profile defined in file with name differs from "settings.xml". Mentioned navigation usually very helpful from Maven Projects tool window by pressing F4 key on corresponding profile.
plugins/maven/src/main/java/org/jetbrains/idea/maven/dom/MavenDomUtil.java
Comment out checking maven settings by file name. This check prevents navigation to maven profile defined in file with name differs from "settings.xml". Mentioned navigation usually very helpful from Maven Projects tool window by pressing F4 key on corresponding profile.
<ide><path>lugins/maven/src/main/java/org/jetbrains/idea/maven/dom/MavenDomUtil.java <ide> public static boolean isSettingsFile(PsiFile file) { <ide> if (!(file instanceof XmlFile)) return false; <ide> <del> String name = file.getName(); <del> if (!name.equals(MavenConstants.SETTINGS_XML)) return false; <add> //String name = file.getName(); <add> //if (!name.equals(MavenConstants.SETTINGS_XML)) return false; <ide> <ide> XmlTag rootTag = ((XmlFile)file).getRootTag(); <ide> if (rootTag == null || !"settings".equals(rootTag.getName())) return false;
JavaScript
bsd-3-clause
70c9b76c7f26e9208a429f6c6fe2adcf0facdd48
0
jtg-gg/blink,nwjs/blink,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,kurli/blink-crosswalk,Pluto-tv/blink-crosswalk,nwjs/blink,smishenk/blink-crosswalk,nwjs/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,kurli/blink-crosswalk,kurli/blink-crosswalk,kurli/blink-crosswalk,jtg-gg/blink,PeterWangIntel/blink-crosswalk,modulexcite/blink,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,PeterWangIntel/blink-crosswalk,modulexcite/blink,smishenk/blink-crosswalk,nwjs/blink,nwjs/blink,kurli/blink-crosswalk,XiaosongWei/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,jtg-gg/blink,XiaosongWei/blink-crosswalk,modulexcite/blink,modulexcite/blink,jtg-gg/blink,smishenk/blink-crosswalk,jtg-gg/blink,smishenk/blink-crosswalk,Bysmyyr/blink-crosswalk,XiaosongWei/blink-crosswalk,modulexcite/blink,modulexcite/blink,Bysmyyr/blink-crosswalk,modulexcite/blink,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,kurli/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,smishenk/blink-crosswalk,jtg-gg/blink,jtg-gg/blink,XiaosongWei/blink-crosswalk,Bysmyyr/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,PeterWangIntel/blink-crosswalk,PeterWangIntel/blink-crosswalk,modulexcite/blink,nwjs/blink,XiaosongWei/blink-crosswalk,jtg-gg/blink,kurli/blink-crosswalk,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,modulexcite/blink,nwjs/blink,XiaosongWei/blink-crosswalk,nwjs/blink,nwjs/blink,XiaosongWei/blink-crosswalk,PeterWangIntel/blink-crosswalk,kurli/blink-crosswalk,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,nwjs/blink,Pluto-tv/blink-crosswalk,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,Bysmyyr/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,XiaosongWei/blink-crosswalk,Pluto-tv/blink-crosswalk
/* * Copyright (C) 2012 Research In Motion Limited. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ /** * @constructor * @extends {WebInspector.VBox} * @param {!WebInspector.NetworkRequest} request */ WebInspector.ResourceWebSocketFrameView = function(request) { WebInspector.VBox.call(this); this.registerRequiredCSS("webSocketFrameView.css"); this.element.classList.add("websocket-frame-view"); this._request = request; var columns = [ {id: "data", title: WebInspector.UIString("Data"), sortable: false, weight: 88, longText: true}, {id: "length", title: WebInspector.UIString("Length"), sortable: false, align: WebInspector.DataGrid.Align.Right, weight: 5}, {id: "time", title: WebInspector.UIString("Time"), weight: 7} ] this._dataGrid = new WebInspector.SortableDataGrid(columns, undefined, undefined, undefined, this._onContextMenu.bind(this)); this._dataGrid.setCellClass("websocket-frame-view-td"); var comparator = /** @type {!WebInspector.SortableDataGrid.NodeComparator} */ (WebInspector.ResourceWebSocketFrameNodeTimeComparator); this._dataGrid.sortNodes(comparator, true); this.refresh(); this._dataGrid.setName("ResourceWebSocketFrameView"); this._dataGrid.show(this.element); } /** @enum {number} */ WebInspector.ResourceWebSocketFrameView.OpCodes = { ContinuationFrame: 0, TextFrame: 1, BinaryFrame: 2, ConnectionCloseFrame: 8, PingFrame: 9, PongFrame: 10 }; /** @type {!Array.<string> } */ WebInspector.ResourceWebSocketFrameView.opCodeDescriptions = (function() { var opCodes = WebInspector.ResourceWebSocketFrameView.OpCodes; var map = []; map[opCodes.ContinuationFrame] = "Continuation Frame"; map[opCodes.TextFrame] = "Text Frame"; map[opCodes.BinaryFrame] = "Binary Frame"; map[opCodes.ContinuationFrame] = "Connection Close Frame"; map[opCodes.PingFrame] = "Ping Frame"; map[opCodes.PongFrame] = "Pong Frame"; return map; })(); /** * @param {number} opCode * @param {boolean} mask * @return {string} */ WebInspector.ResourceWebSocketFrameView.opCodeDescription = function(opCode, mask) { var rawDescription = WebInspector.ResourceWebSocketFrameView.opCodeDescriptions[opCode] || ""; var localizedDescription = WebInspector.UIString(rawDescription); return WebInspector.UIString("%s (Opcode %d%s)", localizedDescription, opCode, (mask ? ", mask" : "")); } WebInspector.ResourceWebSocketFrameView.prototype = { refresh: function() { this._dataGrid.rootNode().removeChildren(); var frames = this._request.frames(); for (var i = frames.length - 1; i >= 0; --i) this._dataGrid.insertChild(new WebInspector.ResourceWebSocketFrameNode(frames[i])); }, show: function(parentElement, insertBefore) { this.refresh(); WebInspector.View.prototype.show.call(this, parentElement, insertBefore); }, /** * @param {!WebInspector.ContextMenu} contextMenu * @param {!WebInspector.DataGridNode} node */ _onContextMenu: function(contextMenu, node) { contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Copy message" : "Copy Message"), this._copyMessage.bind(this, node.data)); }, /** * @param {!Object} row */ _copyMessage: function(row) { InspectorFrontendHost.copyText(row.data); }, __proto__: WebInspector.VBox.prototype } /** * @constructor * @extends {WebInspector.SortableDataGridNode} * @param {!WebInspector.NetworkRequest.WebSocketFrame} frame */ WebInspector.ResourceWebSocketFrameNode = function(frame) { this._frame = frame; this._dataText = frame.text; this._length = frame.text.length; this._timeText = (new Date(frame.time * 1000)).toLocaleTimeString(); this._isTextFrame = frame.opCode === WebInspector.ResourceWebSocketFrameView.OpCodes.TextFrame; if (!this._isTextFrame) this._dataText = WebInspector.ResourceWebSocketFrameView.opCodeDescription(frame.opCode, frame.mask); WebInspector.SortableDataGridNode.call(this, {data: this._dataText, length: this._length, time: this._timeText}); } WebInspector.ResourceWebSocketFrameNode.prototype = { /** override */ createCells: function() { var element = this._element; element.classList.toggle("websocket-frame-view-row-error", this._frame.type === WebInspector.NetworkRequest.WebSocketFrameType.Error); element.classList.toggle("websocket-frame-view-row-outcoming", this._frame.type === WebInspector.NetworkRequest.WebSocketFrameType.Send); element.classList.toggle("websocket-frame-view-row-opcode", !this._isTextFrame); WebInspector.SortableDataGridNode.prototype.createCells.call(this); }, __proto__: WebInspector.SortableDataGridNode.prototype } /** * @param {!WebInspector.ResourceWebSocketFrameNode} a * @param {!WebInspector.ResourceWebSocketFrameNode} b * @return {number} */ WebInspector.ResourceWebSocketFrameNodeTimeComparator = function(a, b) { return a._frame.time - b._frame.time; }
Source/devtools/front_end/network/ResourceWebSocketFrameView.js
/* * Copyright (C) 2012 Research In Motion Limited. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ /** * @constructor * @extends {WebInspector.VBox} * @param {!WebInspector.NetworkRequest} request */ WebInspector.ResourceWebSocketFrameView = function(request) { WebInspector.VBox.call(this); this.registerRequiredCSS("webSocketFrameView.css"); this.element.classList.add("websocket-frame-view"); this._request = request; this.element.removeChildren(); var columns = [ {id: "data", title: WebInspector.UIString("Data"), sortable: false, weight: 88, longText: true}, {id: "length", title: WebInspector.UIString("Length"), sortable: false, align: WebInspector.DataGrid.Align.Right, weight: 5}, {id: "time", title: WebInspector.UIString("Time"), weight: 7} ] this._dataGrid = new WebInspector.SortableDataGrid(columns, undefined, undefined, undefined, this._onContextMenu.bind(this)); this._dataGrid.setCellClass("websocket-frame-view-td"); var comparator = /** @type {!WebInspector.SortableDataGrid.NodeComparator} */ (WebInspector.ResourceWebSocketFrameNodeTimeComparator); this._dataGrid.sortNodes(comparator, true); this.refresh(); this._dataGrid.setName("ResourceWebSocketFrameView"); this._dataGrid.show(this.element); } /** @enum {number} */ WebInspector.ResourceWebSocketFrameView.OpCodes = { ContinuationFrame: 0, TextFrame: 1, BinaryFrame: 2, ConnectionCloseFrame: 8, PingFrame: 9, PongFrame: 10 }; /** @type {!Array.<string> } */ WebInspector.ResourceWebSocketFrameView.opCodeDescriptions = (function() { var opCodes = WebInspector.ResourceWebSocketFrameView.OpCodes; var map = []; map[opCodes.ContinuationFrame] = "Continuation Frame"; map[opCodes.TextFrame] = "Text Frame"; map[opCodes.BinaryFrame] = "Binary Frame"; map[opCodes.ContinuationFrame] = "Connection Close Frame"; map[opCodes.PingFrame] = "Ping Frame"; map[opCodes.PongFrame] = "Pong Frame"; return map; })(); /** * @param {number} opCode * @param {boolean} mask * @return {string} */ WebInspector.ResourceWebSocketFrameView.opCodeDescription = function(opCode, mask) { var rawDescription = WebInspector.ResourceWebSocketFrameView.opCodeDescriptions[opCode] || ""; var localizedDescription = WebInspector.UIString(rawDescription); return WebInspector.UIString("%s (Opcode %d%s)", localizedDescription, opCode, (mask ? ", mask" : "")); } WebInspector.ResourceWebSocketFrameView.prototype = { refresh: function() { this._dataGrid.rootNode().removeChildren(); var frames = this._request.frames(); for (var i = frames.length - 1; i >= 0; --i) this._dataGrid.insertChild(new WebInspector.ResourceWebSocketFrameNode(frames[i])); }, show: function(parentElement, insertBefore) { this.refresh(); WebInspector.View.prototype.show.call(this, parentElement, insertBefore); }, /** * @param {!WebInspector.ContextMenu} contextMenu * @param {!WebInspector.DataGridNode} node */ _onContextMenu: function(contextMenu, node) { contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Copy message" : "Copy Message"), this._copyMessage.bind(this, node.data)); }, /** * @param {!Object} row */ _copyMessage: function(row) { InspectorFrontendHost.copyText(row.data); }, __proto__: WebInspector.VBox.prototype } /** * @constructor * @extends {WebInspector.SortableDataGridNode} * @param {!WebInspector.NetworkRequest.WebSocketFrame} frame */ WebInspector.ResourceWebSocketFrameNode = function(frame) { this._frame = frame; this._dataText = frame.text; this._length = frame.text.length; this._timeText = (new Date(frame.time * 1000)).toLocaleTimeString(); this._isTextFrame = frame.opCode === WebInspector.ResourceWebSocketFrameView.OpCodes.TextFrame; if (!this._isTextFrame) this._dataText = WebInspector.ResourceWebSocketFrameView.opCodeDescription(frame.opCode, frame.mask); WebInspector.SortableDataGridNode.call(this, {data: this._dataText, length: this._length, time: this._timeText}); } WebInspector.ResourceWebSocketFrameNode.prototype = { /** override */ createCells: function() { var element = this._element; element.classList.toggle("websocket-frame-view-row-error", this._frame.type === WebInspector.NetworkRequest.WebSocketFrameType.Error); element.classList.toggle("websocket-frame-view-row-outcoming", this._frame.type === WebInspector.NetworkRequest.WebSocketFrameType.Send); element.classList.toggle("websocket-frame-view-row-opcode", !this._isTextFrame); WebInspector.SortableDataGridNode.prototype.createCells.call(this); }, __proto__: WebInspector.SortableDataGridNode.prototype } /** * @param {!WebInspector.ResourceWebSocketFrameNode} a * @param {!WebInspector.ResourceWebSocketFrameNode} b * @return {number} */ WebInspector.ResourceWebSocketFrameNodeTimeComparator = function(a, b) { return a._frame.time - b._frame.time; }
DevTools: NetworkPanel: fix - WS frames are not shown. This is because injected stylesheet is removed by obsolete code. BUG= Review URL: https://codereview.chromium.org/660443002 git-svn-id: bf5cd6ccde378db821296732a091cfbcf5285fbd@183739 bbb929c8-8fbe-4397-9dbb-9b2b20218538
Source/devtools/front_end/network/ResourceWebSocketFrameView.js
DevTools: NetworkPanel: fix - WS frames are not shown.
<ide><path>ource/devtools/front_end/network/ResourceWebSocketFrameView.js <ide> this.registerRequiredCSS("webSocketFrameView.css"); <ide> this.element.classList.add("websocket-frame-view"); <ide> this._request = request; <del> this.element.removeChildren(); <ide> <ide> var columns = [ <ide> {id: "data", title: WebInspector.UIString("Data"), sortable: false, weight: 88, longText: true},
JavaScript
mit
7744d5808e0a0f873033c3833eeaa4857921e10f
0
panxzz/NN-blackout,panxzz/NN-blackout,panxzz/NN-blackout
function displayMessage() { $(document.body).append("<div id='black-background' style='background:rgba(0, 0, 0, 0.6); position: fixed; top:0; left:0; width: 100%; height: 100%;'></div>"); $(document.body).append("<div id='net-message' style='font-family: Arial, sans serif; text-align:center; padding:20px; width:80%; background:#eee; position:fixed; left:10%; top:25%; border-radius:5px; border:5px solid black;'></div>") $("#net-message").append("<h1 style='margin-top:5px;'>This is the internet <strong>without</strong> Net Neutrality!</h1>"); $("#net-message").append("<h4>Without Net Neutrality, Internet Service Providers (ISPs) like Comcast and AT&T will be able to control exactly how you access the internet, which includes slowing or blocking your connection to websites they don't like.</h4>"); $("#net-message").append("<p>Can you imagine if you wanted to switch from Comcast to one of their competitors, but you were unable to look into alternatives from your home network? This is a real possibility if Net Neutrality didn't exist because Comcast would have control over what you can browse on the internet.</p>"); $("#net-message").append("<h2>What happens without Net Neutrality?</h2>"); $("#net-message").append("<ul>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>Comcast wants us to watch their own video content... They would have the ability to break or cripple Netflix forcing us to use Comcast's offerings.</li>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>If you use Facebook or Instagram you would have to purchase the 'Social Bundle' for $15 extra per month. To use Netflix and Hulu you need to pay $20 for the 'Entertainment Bundle'.</li>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>Entrepreneurs with new ideas would be stifled by the extreme costs of purchasing a 'Fast Lane' or else be inaccessible due to throttled speeds.</li>"); $("#net-message").append("</ul>"); $("#net-message").append("<p><h4>In August there will be a vote to remove Net Neutrality and we need you to join our efforts to protect it!</h4></p>"); $("#net-message").append("<h1><a style='text-decoration: none;' href='https://dearfcc.org/' target='_blank'>Tell the FCC you support Net Neutrality!</a></h1>"); $("net-message").append("<p>Click anywhere outside of this popup to close</p>"); //$("#net-message").append("<h1><a style='text-decoration: none;' href='http://www.house.gov/representatives/find/' target='_blank'>Tell your representative you support Net Neutrality!</a></h1>"); $(document.body).click(function(){ $('#net-message').remove(); $('#black-background').remove(); }); }
lib/modal-display.js
function displayMessage() { $(document.body).append("<div id='black-background' style='background:rgba(0, 0, 0, 0.6); position: fixed; top:0; left:0; width: 100%; height: 100%;'></div>"); $(document.body).append("<div id='net-message' style='font-family: Arial, sans serif; text-align:center; padding:20px; width:80%; background:#eee; position:fixed; left:10%; top:25%; border-radius:5px; border:5px solid black;'></div>") $("#net-message").append("<h1 style='margin-top:5px;'>This is the internet <strong>without</strong> Net Neutrality!</h1>"); $("#net-message").append("<h4>Without Net Neutrality, Internet Service Providers (ISPs) like Comcast and AT&T will be able to control exactly how you access the internet, which includes slowing or blocking your connection to websites they don't like.</h4>"); $("#net-message").append("<p>Can you imagine if you wanted to switch from Comcast to one of their competitors, but you were unable to look into alternatives from your home network? This is a real possibility if Net Neutrality didn't exist because Comcast would have control over what you can browse on the internet.</p>"); $("#net-message").append("<h2>What happens without Net Neutrality?</h2>"); $("#net-message").append("<ul>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>Comcast wants us to watch their own video content... They would have the ability to break or cripple Netflix forcing us to use Comcast's offerings.</li>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>If you use Facebook or Instagram you would have to purchase the 'Social Bundle' for $15 extra per month. To use Netflix and Hulu you need to pay $20 for the 'Entertainment Bundle'.</li>"); $("#net-message").append("<li style='padding:5px 50px 5px 50px;'>Entrepreneurs with new ideas would be stifled by the extreme costs of purchasing a 'Fast Lane' or else be inaccessible due to throttled speeds.</li>"); $("#net-message").append("</ul>"); $("#net-message").append("<p><h4>In August there will be a vote to remove Net Neutrality and we need you to join our efforts to protect it!</h4></p>"); $("#net-message").append("<h1><a style='text-decoration: none;' href='https://dearfcc.org/' target='_blank'>Tell the FCC you support Net Neutrality!</a></h1>"); $("net-message").append("<p>Click anywhere outside of this popup to close"); //$("#net-message").append("<h1><a style='text-decoration: none;' href='http://www.house.gov/representatives/find/' target='_blank'>Tell your representative you support Net Neutrality!</a></h1>"); $(document.body).click(function(){ $('#net-message').remove(); $('#black-background').remove(); }); }
forgot to add closing tag
lib/modal-display.js
forgot to add closing tag
<ide><path>ib/modal-display.js <ide> $("#net-message").append("</ul>"); <ide> $("#net-message").append("<p><h4>In August there will be a vote to remove Net Neutrality and we need you to join our efforts to protect it!</h4></p>"); <ide> $("#net-message").append("<h1><a style='text-decoration: none;' href='https://dearfcc.org/' target='_blank'>Tell the FCC you support Net Neutrality!</a></h1>"); <del> $("net-message").append("<p>Click anywhere outside of this popup to close"); <add> $("net-message").append("<p>Click anywhere outside of this popup to close</p>"); <ide> //$("#net-message").append("<h1><a style='text-decoration: none;' href='http://www.house.gov/representatives/find/' target='_blank'>Tell your representative you support Net Neutrality!</a></h1>"); <ide> <ide> $(document.body).click(function(){
Java
apache-2.0
2f257166cf61a9066a92370b025163c5981e0165
0
lucafavatella/intellij-community,clumsy/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,jagguli/intellij-community,fnouama/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,signed/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,izonder/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,allotria/intellij-community,samthor/intellij-community,diorcety/intellij-community,dslomov/intellij-community,da1z/intellij-community,robovm/robovm-studio,petteyg/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,samthor/intellij-community,apixandru/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,fnouama/intellij-community,diorcety/intellij-community,jagguli/intellij-community,signed/intellij-community,semonte/intellij-community,ryano144/intellij-community,izonder/intellij-community,kdwink/intellij-community,caot/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,signed/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,ernestp/consulo,akosyakov/intellij-community,supersven/intellij-community,holmes/intellij-community,caot/intellij-community,slisson/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,diorcety/intellij-community,da1z/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,kool79/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,ryano144/intellij-community,hurricup/intellij-community,kdwink/intellij-community,adedayo/intellij-community,slisson/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,caot/intellij-community,wreckJ/intellij-community,kool79/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,consulo/consulo,signed/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,jagguli/intellij-community,da1z/intellij-community,kdwink/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,clumsy/intellij-community,xfournet/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,caot/intellij-community,holmes/intellij-community,allotria/intellij-community,holmes/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,supersven/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,supersven/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,xfournet/intellij-community,kdwink/intellij-community,xfournet/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,FHannes/intellij-community,semonte/intellij-community,wreckJ/intellij-community,supersven/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,petteyg/intellij-community,jagguli/intellij-community,amith01994/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,supersven/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,consulo/consulo,mglukhikh/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,retomerz/intellij-community,holmes/intellij-community,robovm/robovm-studio,supersven/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,consulo/consulo,ibinti/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,holmes/intellij-community,ahb0327/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,caot/intellij-community,supersven/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,signed/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,allotria/intellij-community,adedayo/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,fnouama/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,signed/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,amith01994/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,jagguli/intellij-community,petteyg/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,da1z/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,holmes/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,slisson/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,semonte/intellij-community,hurricup/intellij-community,petteyg/intellij-community,ryano144/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,vladmm/intellij-community,kool79/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,xfournet/intellij-community,amith01994/intellij-community,ernestp/consulo,idea4bsd/idea4bsd,vvv1559/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,da1z/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,izonder/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,kdwink/intellij-community,da1z/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,xfournet/intellij-community,blademainer/intellij-community,vladmm/intellij-community,ernestp/consulo,holmes/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,caot/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,caot/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,hurricup/intellij-community,petteyg/intellij-community,slisson/intellij-community,apixandru/intellij-community,petteyg/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,vladmm/intellij-community,da1z/intellij-community,wreckJ/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,signed/intellij-community,fnouama/intellij-community,FHannes/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,semonte/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,apixandru/intellij-community,caot/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,asedunov/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,consulo/consulo,jagguli/intellij-community,diorcety/intellij-community,hurricup/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,holmes/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,blademainer/intellij-community,adedayo/intellij-community,slisson/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,holmes/intellij-community,caot/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,consulo/consulo,suncycheng/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,ibinti/intellij-community,apixandru/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,signed/intellij-community,Distrotech/intellij-community,izonder/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,allotria/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,ernestp/consulo,ivan-fedorov/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,ibinti/intellij-community,dslomov/intellij-community,vladmm/intellij-community,samthor/intellij-community,kool79/intellij-community,supersven/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,kool79/intellij-community,ryano144/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,caot/intellij-community,samthor/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,ryano144/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,blademainer/intellij-community,hurricup/intellij-community,kool79/intellij-community,signed/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,robovm/robovm-studio,ryano144/intellij-community,supersven/intellij-community,ryano144/intellij-community,caot/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,gnuhub/intellij-community,ahb0327/intellij-community,consulo/consulo,da1z/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,retomerz/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,kool79/intellij-community,samthor/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,izonder/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,samthor/intellij-community,da1z/intellij-community,jagguli/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,holmes/intellij-community,gnuhub/intellij-community,da1z/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,dslomov/intellij-community,xfournet/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,slisson/intellij-community,fitermay/intellij-community,robovm/robovm-studio,retomerz/intellij-community,tmpgit/intellij-community,slisson/intellij-community,jagguli/intellij-community,asedunov/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ftomassetti/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.openapi.application.ReadActionProcessor; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.PackageIndex; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileFilter; import com.intellij.psi.*; import com.intellij.psi.impl.file.PsiPackageImpl; import com.intellij.psi.impl.file.impl.JavaFileManager; import com.intellij.psi.impl.source.DummyHolderFactory; import com.intellij.psi.impl.source.JavaDummyHolder; import com.intellij.psi.impl.source.JavaDummyHolderFactory; import com.intellij.psi.impl.source.resolve.FileContextUtil; import com.intellij.psi.impl.source.tree.JavaElementType; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.util.ConcurrencyUtil; import com.intellij.util.Processor; import com.intellij.util.SmartList; import com.intellij.util.containers.ConcurrentHashMap; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.messages.MessageBus; import org.jetbrains.annotations.NotNull; import java.util.*; import java.util.concurrent.ConcurrentMap; /** * @author max */ public class JavaPsiFacadeImpl extends JavaPsiFacadeEx { private final PsiElementFinder[] myElementFinders; private final PsiNameHelper myNameHelper; private final PsiConstantEvaluationHelper myConstantEvaluationHelper; private final ConcurrentMap<String, PsiPackage> myPackageCache = new ConcurrentHashMap<String, PsiPackage>(); private final Project myProject; private final JavaFileManager myFileManager; public JavaPsiFacadeImpl(Project project, PsiManagerImpl psiManager, JavaFileManager javaFileManager, MessageBus bus) { myProject = project; myFileManager = javaFileManager; myNameHelper = new PsiNameHelperImpl(this); myConstantEvaluationHelper = new PsiConstantEvaluationHelperImpl(); List<PsiElementFinder> elementFinders = new ArrayList<PsiElementFinder>(); elementFinders.add(new PsiElementFinderImpl()); ContainerUtil.addAll(elementFinders, myProject.getExtensions(PsiElementFinder.EP_NAME)); myElementFinders = elementFinders.toArray(new PsiElementFinder[elementFinders.size()]); final PsiModificationTracker modificationTracker = psiManager.getModificationTracker(); bus.connect().subscribe(PsiModificationTracker.TOPIC, new PsiModificationTracker.Listener() { private long lastTimeSeen = -1L; public void modificationCountChanged() { final long now = modificationTracker.getJavaStructureModificationCount(); if (lastTimeSeen != now) { lastTimeSeen = now; myPackageCache.clear(); } } }); DummyHolderFactory.setFactory(new JavaDummyHolderFactory()); JavaElementType.ANNOTATION.getIndex(); // Initialize stubs. } /** * @deprecated */ public PsiClass findClass(@NotNull String qualifiedName) { return findClass(qualifiedName, GlobalSearchScope.allScope(myProject)); } public PsiClass findClass(@NotNull final String qualifiedName, @NotNull GlobalSearchScope scope) { ProgressIndicatorProvider.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly if (DumbService.getInstance(getProject()).isDumb()) { final List<PsiClass> classes = findClassesInDumbMode(qualifiedName, scope); if (!classes.isEmpty()) { return classes.get(0); } return null; } for (PsiElementFinder finder : myElementFinders) { PsiClass aClass = finder.findClass(qualifiedName, scope); if (aClass != null) return aClass; } return null; } @NotNull private List<PsiClass> findClassesInDumbMode(String qualifiedName, GlobalSearchScope scope) { final String packageName = StringUtil.getPackageName(qualifiedName); final PsiPackage pkg = findPackage(packageName); final String className = StringUtil.getShortName(qualifiedName); if (pkg == null && packageName.length() < qualifiedName.length()) { final List<PsiClass> containingClasses = findClassesInDumbMode(packageName, scope); if (containingClasses.size() == 1) { return filterByName(className, containingClasses.get(0).getInnerClasses()); } return Collections.emptyList(); } if (pkg == null || pkg instanceof PsiPackageImpl && !((PsiPackageImpl)pkg).containsClassNamed(className)) { return Collections.emptyList(); } return filterByName(className, pkg.getClasses(scope)); } private static List<PsiClass> filterByName(String className, PsiClass[] classes) { final List<PsiClass> foundClasses = new SmartList<PsiClass>(); for (PsiClass psiClass : classes) { if (className.equals(psiClass.getName())) { foundClasses.add(psiClass); } } return foundClasses; } @NotNull public PsiClass[] findClasses(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { if (DumbService.getInstance(getProject()).isDumb()) { final List<PsiClass> classes = findClassesInDumbMode(qualifiedName, scope); return classes.toArray(new PsiClass[classes.size()]); } List<PsiClass> classes = new SmartList<PsiClass>(); for (PsiElementFinder finder : myElementFinders) { PsiClass[] finderClasses = finder.findClasses(qualifiedName, scope); ContainerUtil.addAll(classes, finderClasses); } return classes.toArray(new PsiClass[classes.size()]); } @NotNull public PsiConstantEvaluationHelper getConstantEvaluationHelper() { return myConstantEvaluationHelper; } public PsiPackage findPackage(@NotNull String qualifiedName) { PsiPackage aPackage = myPackageCache.get(qualifiedName); if (aPackage != null) { return aPackage; } DumbService dumbService = DumbService.getInstance(getProject()); List<PsiElementFinder> finders = Arrays.asList(myElementFinders); if (dumbService.isDumb()) { finders = dumbService.filterByDumbAwareness(finders); } for (PsiElementFinder finder : finders) { aPackage = finder.findPackage(qualifiedName); if (aPackage != null) { return ConcurrencyUtil.cacheOrGet(myPackageCache, qualifiedName, aPackage); } } return null; } @NotNull public PsiJavaParserFacade getParserFacade() { return getElementFactory(); // TODO: lighter implementation which doesn't mark all the elements as generated. } @NotNull public PsiResolveHelper getResolveHelper() { return PsiResolveHelper.SERVICE.getInstance(myProject); } @NotNull public PsiNameHelper getNameHelper() { return myNameHelper; } public Set<String> getClassNames(PsiPackage psiPackage, GlobalSearchScope scope) { Set<String> result = new HashSet<String>(); for (PsiElementFinder finder : myElementFinders) { result.addAll(finder.getClassNames(psiPackage, scope)); } return result; } public PsiClass[] getClasses(PsiPackage psiPackage, GlobalSearchScope scope) { List<PsiClass> result = null; for (PsiElementFinder finder : myElementFinders) { PsiClass[] classes = finder.getClasses(psiPackage, scope); if (classes.length == 0) continue; if (result == null) result = new ArrayList<PsiClass>(); ContainerUtil.addAll(result, classes); } return result == null ? PsiClass.EMPTY_ARRAY : result.toArray(new PsiClass[result.size()]); } public boolean processPackageDirectories(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope, Processor<PsiDirectory> consumer) { for (PsiElementFinder finder : myElementFinders) { if (!finder.processPackageDirectories(psiPackage, scope, consumer)) { return false; } } return true; } public PsiPackage[] getSubPackages(PsiPackage psiPackage, GlobalSearchScope scope) { List<PsiPackage> result = new ArrayList<PsiPackage>(); for (PsiElementFinder finder : myElementFinders) { PsiPackage[] packages = finder.getSubPackages(psiPackage, scope); ContainerUtil.addAll(result, packages); } return result.toArray(new PsiPackage[result.size()]); } private class PsiElementFinderImpl extends PsiElementFinder { public PsiClass findClass(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myFileManager.findClass(qualifiedName, scope); } @NotNull public PsiClass[] findClasses(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myFileManager.findClasses(qualifiedName, scope); } public PsiPackage findPackage(@NotNull String qualifiedName) { return myFileManager.findPackage(qualifiedName); } @NotNull public PsiPackage[] getSubPackages(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { final Map<String, PsiPackage> packagesMap = new HashMap<String, PsiPackage>(); final String qualifiedName = psiPackage.getQualifiedName(); for (PsiDirectory dir : psiPackage.getDirectories(scope)) { PsiDirectory[] subDirs = dir.getSubdirectories(); for (PsiDirectory subDir : subDirs) { final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(subDir); if (aPackage != null) { final String subQualifiedName = aPackage.getQualifiedName(); if (subQualifiedName.startsWith(qualifiedName) && !packagesMap.containsKey(subQualifiedName)) { packagesMap.put(aPackage.getQualifiedName(), aPackage); } } } } packagesMap.remove(qualifiedName); // avoid SOE caused by returning a package as a subpackage of itself return packagesMap.values().toArray(new PsiPackage[packagesMap.size()]); } @NotNull public PsiClass[] getClasses(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { List<PsiClass> list = null; String packageName = psiPackage.getQualifiedName(); for (PsiDirectory dir : psiPackage.getDirectories(scope)) { PsiClass[] classes = JavaDirectoryService.getInstance().getClasses(dir); if (classes.length == 0) continue; if (list == null) list = new ArrayList<PsiClass>(); for (PsiClass aClass : classes) { // class file can be located in wrong place inside file system String qualifiedName = aClass.getQualifiedName(); if (qualifiedName != null) qualifiedName = StringUtil.getPackageName(qualifiedName); if (Comparing.strEqual(qualifiedName, packageName)) { list.add(aClass); } } } return list == null ? PsiClass.EMPTY_ARRAY : list.toArray(new PsiClass[list.size()]); } @Override public Set<String> getClassNames(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { Set<String> names = null; for (PsiDirectory dir : psiPackage.getDirectories(scope)) { for (PsiFile file : dir.getFiles()) { FileViewProvider viewProvider = file.getViewProvider(); if (file instanceof PsiClassOwner && file == viewProvider.getPsi(viewProvider.getBaseLanguage())) { Set<String> inFile = file instanceof PsiClassOwnerEx ? ((PsiClassOwnerEx)file).getClassNames() : getClassNames(((PsiClassOwner)file).getClasses()); if (inFile.isEmpty()) continue; if (names == null) names = new HashSet<String>(); names.addAll(inFile); } } } return names == null ? Collections.<String>emptySet() : names; } @Override public boolean processPackageDirectories(@NotNull PsiPackage psiPackage, @NotNull final GlobalSearchScope scope, final Processor<PsiDirectory> consumer) { final PsiManager psiManager = PsiManager.getInstance(getProject()); PackageIndex.getInstance(getProject()).getDirsByPackageName(psiPackage.getQualifiedName(), false).forEach(new ReadActionProcessor<VirtualFile>() { public boolean processInReadAction(final VirtualFile dir) { if (!scope.contains(dir)) return true; PsiDirectory psiDir = psiManager.findDirectory(dir); assert psiDir != null; return consumer.process(psiDir); } }); return true; } } public boolean isPartOfPackagePrefix(String packageName) { final Collection<String> packagePrefixes = myFileManager.getNonTrivialPackagePrefixes(); for (final String subpackageName : packagePrefixes) { if (isSubpackageOf(subpackageName, packageName)) return true; } return false; } private static boolean isSubpackageOf(final String subpackageName, String packageName) { return subpackageName.equals(packageName) || subpackageName.startsWith(packageName) && subpackageName.charAt(packageName.length()) == '.'; } public boolean isInPackage(@NotNull PsiElement element, @NotNull PsiPackage aPackage) { final PsiFile file = FileContextUtil.getContextFile(element); if (file instanceof JavaDummyHolder) { return ((JavaDummyHolder) file).isInPackage(aPackage); } if (file instanceof PsiJavaFile) { final String packageName = ((PsiJavaFile) file).getPackageName(); return packageName.equals(aPackage.getQualifiedName()); } return false; } public boolean arePackagesTheSame(@NotNull PsiElement element1, @NotNull PsiElement element2) { PsiFile file1 = FileContextUtil.getContextFile(element1); PsiFile file2 = FileContextUtil.getContextFile(element2); if (Comparing.equal(file1, file2)) return true; if (file1 instanceof JavaDummyHolder && file2 instanceof JavaDummyHolder) return true; if (file1 instanceof JavaDummyHolder || file2 instanceof JavaDummyHolder) { JavaDummyHolder dummyHolder = (JavaDummyHolder) (file1 instanceof JavaDummyHolder ? file1 : file2); PsiElement other = file1 instanceof JavaDummyHolder ? file2 : file1; return dummyHolder.isSamePackage(other); } if (!(file1 instanceof PsiClassOwner)) return false; if (!(file2 instanceof PsiClassOwner)) return false; String package1 = ((PsiClassOwner) file1).getPackageName(); String package2 = ((PsiClassOwner) file2).getPackageName(); return Comparing.equal(package1, package2); } public Project getProject() { return myProject; } @NotNull public PsiElementFactory getElementFactory() { return PsiElementFactory.SERVICE.getInstance(myProject); } public void setAssertOnFileLoadingFilter(final VirtualFileFilter filter) { ((PsiManagerImpl)PsiManager.getInstance(myProject)).setAssertOnFileLoadingFilter(filter); } }
java/java-impl/src/com/intellij/psi/impl/JavaPsiFacadeImpl.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.openapi.application.ReadActionProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.PackageIndex; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileFilter; import com.intellij.psi.*; import com.intellij.psi.impl.file.PsiPackageImpl; import com.intellij.psi.impl.file.impl.JavaFileManager; import com.intellij.psi.impl.source.DummyHolderFactory; import com.intellij.psi.impl.source.JavaDummyHolder; import com.intellij.psi.impl.source.JavaDummyHolderFactory; import com.intellij.psi.impl.source.resolve.FileContextUtil; import com.intellij.psi.impl.source.tree.JavaElementType; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.util.ConcurrencyUtil; import com.intellij.util.Processor; import com.intellij.util.SmartList; import com.intellij.util.containers.ConcurrentHashMap; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.messages.MessageBus; import org.jetbrains.annotations.NotNull; import java.util.*; import java.util.concurrent.ConcurrentMap; /** * @author max */ public class JavaPsiFacadeImpl extends JavaPsiFacadeEx { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.JavaPsiFacadeImpl"); private final PsiElementFinder[] myElementFinders; private final PsiNameHelper myNameHelper; private final PsiConstantEvaluationHelper myConstantEvaluationHelper; private final ConcurrentMap<String, PsiPackage> myPackageCache = new ConcurrentHashMap<String, PsiPackage>(); private final Project myProject; public JavaPsiFacadeImpl(Project project, PsiManagerImpl psiManager, MessageBus bus) { myProject = project; myNameHelper = new PsiNameHelperImpl(this); myConstantEvaluationHelper = new PsiConstantEvaluationHelperImpl(); List<PsiElementFinder> elementFinders = new ArrayList<PsiElementFinder>(); elementFinders.add(new PsiElementFinderImpl()); ContainerUtil.addAll(elementFinders, myProject.getExtensions(PsiElementFinder.EP_NAME)); myElementFinders = elementFinders.toArray(new PsiElementFinder[elementFinders.size()]); final PsiModificationTracker modificationTracker = psiManager.getModificationTracker(); bus.connect().subscribe(PsiModificationTracker.TOPIC, new PsiModificationTracker.Listener() { private long lastTimeSeen = -1L; public void modificationCountChanged() { final long now = modificationTracker.getJavaStructureModificationCount(); if (lastTimeSeen != now) { lastTimeSeen = now; myPackageCache.clear(); } } }); DummyHolderFactory.setFactory(new JavaDummyHolderFactory()); JavaElementType.ANNOTATION.getIndex(); // Initialize stubs. } /** * @deprecated */ public PsiClass findClass(@NotNull String qualifiedName) { return findClass(qualifiedName, GlobalSearchScope.allScope(myProject)); } public PsiClass findClass(@NotNull final String qualifiedName, @NotNull GlobalSearchScope scope) { ProgressManager.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly if (DumbService.getInstance(getProject()).isDumb()) { final List<PsiClass> classes = findClassesInDumbMode(qualifiedName, scope); if (!classes.isEmpty()) { return classes.get(0); } return null; } for (PsiElementFinder finder : myElementFinders) { PsiClass aClass = finder.findClass(qualifiedName, scope); if (aClass != null) return aClass; } return null; } @NotNull private List<PsiClass> findClassesInDumbMode(String qualifiedName, GlobalSearchScope scope) { final String packageName = StringUtil.getPackageName(qualifiedName); final PsiPackage pkg = findPackage(packageName); final String className = StringUtil.getShortName(qualifiedName); if (pkg == null && packageName.length() < qualifiedName.length()) { final List<PsiClass> containingClasses = findClassesInDumbMode(packageName, scope); if (containingClasses.size() == 1) { return filterByName(className, containingClasses.get(0).getInnerClasses()); } return Collections.emptyList(); } if (pkg == null || pkg instanceof PsiPackageImpl && !((PsiPackageImpl)pkg).containsClassNamed(className)) { return Collections.emptyList(); } return filterByName(className, pkg.getClasses(scope)); } private static List<PsiClass> filterByName(String className, PsiClass[] classes) { final List<PsiClass> foundClasses = new SmartList<PsiClass>(); for (PsiClass psiClass : classes) { if (className.equals(psiClass.getName())) { foundClasses.add(psiClass); } } return foundClasses; } @NotNull public PsiClass[] findClasses(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { if (DumbService.getInstance(getProject()).isDumb()) { final List<PsiClass> classes = findClassesInDumbMode(qualifiedName, scope); return classes.toArray(new PsiClass[classes.size()]); } List<PsiClass> classes = new SmartList<PsiClass>(); for (PsiElementFinder finder : myElementFinders) { PsiClass[] finderClasses = finder.findClasses(qualifiedName, scope); ContainerUtil.addAll(classes, finderClasses); } return classes.toArray(new PsiClass[classes.size()]); } @NotNull public PsiConstantEvaluationHelper getConstantEvaluationHelper() { return myConstantEvaluationHelper; } public PsiPackage findPackage(@NotNull String qualifiedName) { PsiPackage aPackage = myPackageCache.get(qualifiedName); if (aPackage != null) { return aPackage; } DumbService dumbService = DumbService.getInstance(getProject()); List<PsiElementFinder> finders = Arrays.asList(myElementFinders); if (dumbService.isDumb()) { finders = dumbService.filterByDumbAwareness(finders); } for (PsiElementFinder finder : finders) { aPackage = finder.findPackage(qualifiedName); if (aPackage != null) { return ConcurrencyUtil.cacheOrGet(myPackageCache, qualifiedName, aPackage); } } return null; } @NotNull public PsiJavaParserFacade getParserFacade() { return getElementFactory(); // TODO: lighter implementation which doesn't mark all the elements as generated. } @NotNull public PsiResolveHelper getResolveHelper() { return PsiResolveHelper.SERVICE.getInstance(myProject); } @NotNull public PsiNameHelper getNameHelper() { return myNameHelper; } public Set<String> getClassNames(PsiPackageImpl psiPackage, GlobalSearchScope scope) { Set<String> result = new HashSet<String>(); for (PsiElementFinder finder : myElementFinders) { result.addAll(finder.getClassNames(psiPackage, scope)); } return result; } public PsiClass[] getClasses(PsiPackageImpl psiPackage, GlobalSearchScope scope) { List<PsiClass> result = null; for (PsiElementFinder finder : myElementFinders) { PsiClass[] classes = finder.getClasses(psiPackage, scope); if (classes.length == 0) continue; if (result == null) result = new ArrayList<PsiClass>(); ContainerUtil.addAll(result, classes); } return result == null ? PsiClass.EMPTY_ARRAY : result.toArray(new PsiClass[result.size()]); } public boolean processPackageDirectories(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope, Processor<PsiDirectory> consumer) { for (PsiElementFinder finder : myElementFinders) { if (!finder.processPackageDirectories(psiPackage, scope, consumer)) { return false; } } return true; } public PsiPackage[] getSubPackages(PsiPackageImpl psiPackage, GlobalSearchScope scope) { List<PsiPackage> result = new ArrayList<PsiPackage>(); for (PsiElementFinder finder : myElementFinders) { PsiPackage[] packages = finder.getSubPackages(psiPackage, scope); ContainerUtil.addAll(result, packages); } return result.toArray(new PsiPackage[result.size()]); } private class PsiElementFinderImpl extends PsiElementFinder { private final JavaFileManager myFileManager; private PsiElementFinderImpl() { myFileManager = myProject.getComponent(JavaFileManager.class); } public PsiClass findClass(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myFileManager.findClass(qualifiedName, scope); } @NotNull public PsiClass[] findClasses(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { return myFileManager.findClasses(qualifiedName, scope); } public PsiPackage findPackage(@NotNull String qualifiedName) { return myFileManager.findPackage(qualifiedName); } @NotNull public PsiPackage[] getSubPackages(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { final Map<String, PsiPackage> packagesMap = new HashMap<String, PsiPackage>(); final String qualifiedName = psiPackage.getQualifiedName(); for (PsiDirectory dir : psiPackage.getDirectories(scope)) { PsiDirectory[] subDirs = dir.getSubdirectories(); for (PsiDirectory subDir : subDirs) { final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(subDir); if (aPackage != null) { final String subQualifiedName = aPackage.getQualifiedName(); if (subQualifiedName.startsWith(qualifiedName) && !packagesMap.containsKey(subQualifiedName)) { packagesMap.put(aPackage.getQualifiedName(), aPackage); } } } } packagesMap.remove(qualifiedName); // avoid SOE caused by returning a package as a subpackage of itself return packagesMap.values().toArray(new PsiPackage[packagesMap.size()]); } @NotNull public PsiClass[] getClasses(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { List<PsiClass> list = null; String packageName = psiPackage.getQualifiedName(); for (PsiDirectory dir : psiPackage.getDirectories(scope)) { PsiClass[] classes = JavaDirectoryService.getInstance().getClasses(dir); if (classes.length == 0) continue; if (list == null) list = new ArrayList<PsiClass>(); for (PsiClass aClass : classes) { // class file can be located in wrong place inside file system String qualifiedName = aClass.getQualifiedName(); if (qualifiedName != null) qualifiedName = StringUtil.getPackageName(qualifiedName); if (Comparing.strEqual(qualifiedName, packageName)) { list.add(aClass); } } } return list == null ? PsiClass.EMPTY_ARRAY : list.toArray(new PsiClass[list.size()]); } @Override public Set<String> getClassNames(@NotNull PsiPackage psiPackage, @NotNull GlobalSearchScope scope) { Set<String> names = null; for (PsiDirectory dir : psiPackage.getDirectories(scope)) { for (PsiFile file : dir.getFiles()) { FileViewProvider viewProvider = file.getViewProvider(); if (file instanceof PsiClassOwner && file == viewProvider.getPsi(viewProvider.getBaseLanguage())) { Set<String> inFile = file instanceof PsiClassOwnerEx ? ((PsiClassOwnerEx)file).getClassNames() : getClassNames(((PsiClassOwner)file).getClasses()); if (inFile.isEmpty()) continue; if (names == null) names = new HashSet<String>(); names.addAll(inFile); } } } return names == null ? Collections.<String>emptySet() : names; } @Override public boolean processPackageDirectories(@NotNull PsiPackage psiPackage, @NotNull final GlobalSearchScope scope, final Processor<PsiDirectory> consumer) { final PsiManager psiManager = PsiManager.getInstance(getProject()); PackageIndex.getInstance(getProject()).getDirsByPackageName(psiPackage.getQualifiedName(), false).forEach(new ReadActionProcessor<VirtualFile>() { public boolean processInReadAction(final VirtualFile dir) { if (!scope.contains(dir)) return true; PsiDirectory psiDir = psiManager.findDirectory(dir); assert psiDir != null; return consumer.process(psiDir); } }); return true; } } public boolean isPartOfPackagePrefix(String packageName) { final Collection<String> packagePrefixes = myProject.getComponent(JavaFileManager.class).getNonTrivialPackagePrefixes(); for (final String subpackageName : packagePrefixes) { if (isSubpackageOf(subpackageName, packageName)) return true; } return false; } private static boolean isSubpackageOf(final String subpackageName, String packageName) { return subpackageName.equals(packageName) || subpackageName.startsWith(packageName) && subpackageName.charAt(packageName.length()) == '.'; } public boolean isInPackage(@NotNull PsiElement element, @NotNull PsiPackage aPackage) { final PsiFile file = FileContextUtil.getContextFile(element); if (file instanceof JavaDummyHolder) { return ((JavaDummyHolder) file).isInPackage(aPackage); } if (file instanceof PsiJavaFile) { final String packageName = ((PsiJavaFile) file).getPackageName(); return packageName.equals(aPackage.getQualifiedName()); } return false; } public boolean arePackagesTheSame(@NotNull PsiElement element1, @NotNull PsiElement element2) { PsiFile file1 = FileContextUtil.getContextFile(element1); PsiFile file2 = FileContextUtil.getContextFile(element2); if (Comparing.equal(file1, file2)) return true; if (file1 instanceof JavaDummyHolder && file2 instanceof JavaDummyHolder) return true; if (file1 instanceof JavaDummyHolder || file2 instanceof JavaDummyHolder) { JavaDummyHolder dummyHolder = (JavaDummyHolder) (file1 instanceof JavaDummyHolder ? file1 : file2); PsiElement other = file1 instanceof JavaDummyHolder ? file2 : file1; return dummyHolder.isSamePackage(other); } if (!(file1 instanceof PsiClassOwner)) return false; if (!(file2 instanceof PsiClassOwner)) return false; String package1 = ((PsiClassOwner) file1).getPackageName(); String package2 = ((PsiClassOwner) file2).getPackageName(); return Comparing.equal(package1, package2); } public Project getProject() { return myProject; } @NotNull public PsiElementFactory getElementFactory() { return PsiElementFactory.SERVICE.getInstance(myProject); } public void setAssertOnFileLoadingFilter(final VirtualFileFilter filter) { ((PsiManagerImpl)PsiManager.getInstance(myProject)).setAssertOnFileLoadingFilter(filter); } }
inject JavaFileManager; remove unnecessary PsiPackageImpl usages; ProgressManager -> ProgressIndicatorProvider
java/java-impl/src/com/intellij/psi/impl/JavaPsiFacadeImpl.java
inject JavaFileManager; remove unnecessary PsiPackageImpl usages; ProgressManager -> ProgressIndicatorProvider
<ide><path>ava/java-impl/src/com/intellij/psi/impl/JavaPsiFacadeImpl.java <ide> package com.intellij.psi.impl; <ide> <ide> import com.intellij.openapi.application.ReadActionProcessor; <del>import com.intellij.openapi.diagnostic.Logger; <del>import com.intellij.openapi.progress.ProgressManager; <add>import com.intellij.openapi.progress.ProgressIndicatorProvider; <ide> import com.intellij.openapi.project.DumbService; <ide> import com.intellij.openapi.project.Project; <ide> import com.intellij.openapi.roots.PackageIndex; <ide> * @author max <ide> */ <ide> public class JavaPsiFacadeImpl extends JavaPsiFacadeEx { <del> private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.JavaPsiFacadeImpl"); <del> <ide> private final PsiElementFinder[] myElementFinders; <ide> private final PsiNameHelper myNameHelper; <ide> private final PsiConstantEvaluationHelper myConstantEvaluationHelper; <ide> private final ConcurrentMap<String, PsiPackage> myPackageCache = new ConcurrentHashMap<String, PsiPackage>(); <ide> private final Project myProject; <add> private final JavaFileManager myFileManager; <ide> <ide> <ide> public JavaPsiFacadeImpl(Project project, <ide> PsiManagerImpl psiManager, <add> JavaFileManager javaFileManager, <ide> MessageBus bus) { <ide> myProject = project; <add> myFileManager = javaFileManager; <ide> myNameHelper = new PsiNameHelperImpl(this); <ide> myConstantEvaluationHelper = new PsiConstantEvaluationHelperImpl(); <ide> <ide> } <ide> <ide> public PsiClass findClass(@NotNull final String qualifiedName, @NotNull GlobalSearchScope scope) { <del> ProgressManager.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly <add> ProgressIndicatorProvider.checkCanceled(); // We hope this method is being called often enough to cancel daemon processes smoothly <ide> <ide> if (DumbService.getInstance(getProject()).isDumb()) { <ide> final List<PsiClass> classes = findClassesInDumbMode(qualifiedName, scope); <ide> return myNameHelper; <ide> } <ide> <del> public Set<String> getClassNames(PsiPackageImpl psiPackage, GlobalSearchScope scope) { <add> public Set<String> getClassNames(PsiPackage psiPackage, GlobalSearchScope scope) { <ide> Set<String> result = new HashSet<String>(); <ide> for (PsiElementFinder finder : myElementFinders) { <ide> result.addAll(finder.getClassNames(psiPackage, scope)); <ide> } <ide> return result; <ide> } <del> public PsiClass[] getClasses(PsiPackageImpl psiPackage, GlobalSearchScope scope) { <add> public PsiClass[] getClasses(PsiPackage psiPackage, GlobalSearchScope scope) { <ide> List<PsiClass> result = null; <ide> for (PsiElementFinder finder : myElementFinders) { <ide> PsiClass[] classes = finder.getClasses(psiPackage, scope); <ide> return true; <ide> } <ide> <del> public PsiPackage[] getSubPackages(PsiPackageImpl psiPackage, GlobalSearchScope scope) { <add> public PsiPackage[] getSubPackages(PsiPackage psiPackage, GlobalSearchScope scope) { <ide> List<PsiPackage> result = new ArrayList<PsiPackage>(); <ide> for (PsiElementFinder finder : myElementFinders) { <ide> PsiPackage[] packages = finder.getSubPackages(psiPackage, scope); <ide> } <ide> <ide> private class PsiElementFinderImpl extends PsiElementFinder { <del> private final JavaFileManager myFileManager; <del> <del> private PsiElementFinderImpl() { <del> myFileManager = myProject.getComponent(JavaFileManager.class); <del> } <del> <ide> public PsiClass findClass(@NotNull String qualifiedName, @NotNull GlobalSearchScope scope) { <ide> return myFileManager.findClass(qualifiedName, scope); <ide> } <ide> <ide> <ide> public boolean isPartOfPackagePrefix(String packageName) { <del> final Collection<String> packagePrefixes = myProject.getComponent(JavaFileManager.class).getNonTrivialPackagePrefixes(); <add> final Collection<String> packagePrefixes = myFileManager.getNonTrivialPackagePrefixes(); <ide> for (final String subpackageName : packagePrefixes) { <ide> if (isSubpackageOf(subpackageName, packageName)) return true; <ide> }
Java
mit
ac9dbdd11d9df559c03d53c00c9521cfba3a83f5
0
dr0pthedoge/unitwallet-android,litecoin-foundation/loafwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android,litecoin-foundation/loafwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android,dr0pthedoge/unitwallet-android
package com.breadwallet.presenter.fragments; import android.app.Activity; import android.app.Fragment; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.Button; import android.widget.ImageButton; import android.widget.RelativeLayout; import android.widget.TextView; import android.graphics.Typeface; import com.breadwallet.R; import com.breadwallet.BreadWalletApp; import com.breadwallet.presenter.activities.MainActivity; import com.breadwallet.tools.animation.BRAnimator; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.manager.SharedPreferencesManager; import com.breadwallet.tools.animation.SpringAnimator; import java.math.BigDecimal; import java.math.RoundingMode; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.Currency; import java.util.Locale; import java.util.Objects; import static com.breadwallet.tools.util.BRConstants.CURRENT_UNIT_BITS; import static com.breadwallet.tools.util.BRStringFormatter.getNumberOfDecimalPlaces; /** * BreadWallet * <p> * Created by Mihail Gutan <[email protected]> on 7/14/15. * Copyright (c) 2016 breadwallet LLC * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class FragmentScanResult extends Fragment implements View.OnClickListener { private static final String TAG = FragmentScanResult.class.getName(); private TextView scanResult; private RelativeLayout customKeyboardLayout; private TextView rightTextView; private TextView leftTextView; public static String address; //amount stuff // private boolean comaHasBeenInserted = false; private boolean isTextColorGrey = true; private ValueItem rightValue; private ValueItem leftValue; private int buttonCode = BRConstants.PAY_BUTTON; private boolean pressAvailable = true; private int unit = BRConstants.CURRENT_UNIT_BITS; public static FragmentScanResult instance; private String ISO; public double rate = -1; public static boolean isARequest = false; public FragmentScanResult() { instance = this; } @Override public View onCreateView(LayoutInflater inflater, final ViewGroup container, Bundle savedInstanceState) { // The last two arguments ensure LayoutParams are inflated // properly. final View rootView = inflater.inflate(R.layout.fragment_scan_result, container, false); scanResult = (TextView) rootView.findViewById(R.id.scan_result); customKeyboardLayout = (RelativeLayout) rootView.findViewById(R.id.custom_keyboard_layout); rightTextView = (TextView) rootView.findViewById(R.id.right_textview); leftTextView = (TextView) rootView.findViewById(R.id.left_textview); TextView doubleArrow = (TextView) rootView.findViewById(R.id.double_arrow_text); rightValue = new ValueItem("0", true); leftValue = new ValueItem("0", false); /** * This mess is for the custom keyboard to be created after the soft keyboard is hidden * (if it was previously shown) to prevent the wrong position of the keyboard layout placement */ customKeyboardLayout.getViewTreeObserver().addOnGlobalLayoutListener( new ViewTreeObserver.OnGlobalLayoutListener() { public void onGlobalLayout() { MainActivity app = MainActivity.app; if (app != null) if (!app.isSoftKeyboardShown()) { int[] locations = new int[2]; customKeyboardLayout.getLocationOnScreen(locations); customKeyboardLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); createCustomKeyboardButtons(locations[1]); } } }); View.OnClickListener listener = new View.OnClickListener() { @Override public void onClick(View v) { switchCurrencies(); SpringAnimator.showAnimation(leftTextView); SpringAnimator.showAnimation(rightTextView); } }; updateBothTextValues("0", "0"); doubleArrow.setText(BRConstants.DOUBLE_ARROW); doubleArrow.setOnClickListener(listener); leftTextView.setOnClickListener(listener); rightTextView.setOnClickListener(listener); return rootView; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onResume() { if (!isARequest && (address == null || address.length() < 20)) throw new NullPointerException("address is corrupted"); updateRateAndISO(); calculateAndPassValuesToFragment("0"); scanResult.setText(isARequest ? "" : getString(R.string.to) + address); super.onResume(); } @Override public void onPause() { super.onPause(); resetKeyboard(); ((BreadWalletApp) getActivity().getApplication()).setLockerPayButton(BRConstants.LOCKER_BUTTON); isARequest = false; BRAnimator.hideScanResultFragment(); } @Override public void onDestroyView() { super.onDestroyView(); } private void createCustomKeyboardButtons(int y) { int availableWidth = MainActivity.screenParametersPoint.x; int availableHeight = MainActivity.screenParametersPoint.y; int spaceNeededForRest = availableHeight / 14; float gapRate = 0.2f; float gap = (availableWidth * gapRate); float interButtonGap = gap / 5; float buttonWidth = (availableWidth - gap) / 3; float buttonHeight = buttonWidth; float spaceNeeded = buttonHeight * 4 + gap; int buttonTextSize = 45; if (spaceNeeded > (availableHeight - (spaceNeededForRest + y))) { buttonHeight = ((availableHeight - (spaceNeededForRest + y)) - gap) / 4; buttonTextSize = (int) ((buttonHeight / 7)); } int minimumHeight = (int) (buttonHeight * 4 + interButtonGap * 4); if (customKeyboardLayout == null) { customKeyboardLayout = (RelativeLayout) getActivity().findViewById(R.id.custom_keyboard_layout); } customKeyboardLayout.setMinimumHeight(minimumHeight); int childCount = 12; for (int i = 0; i < childCount; i++) { Button b = new Button(getActivity()); b.setWidth((int) buttonWidth); b.setHeight((int) buttonHeight); b.setTextSize(buttonTextSize); b.setTypeface(Typeface.create("sans-serif-thin", Typeface.NORMAL)); //noinspection deprecation b.setTextColor(getResources().getColor(R.color.dark_blue)); b.setBackgroundResource(R.drawable.button_regular_blue); b.setOnClickListener(this); b.setGravity(Gravity.CENTER); b.setTextAlignment(View.TEXT_ALIGNMENT_GRAVITY); ImageButton imageB = null; if (i < 9) b.setText(String.valueOf(i + 1)); switch (i) { case 0: b.setX(interButtonGap / 2 + interButtonGap); break; case 1: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); break; case 2: b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); break; case 3: b.setX(interButtonGap / 2 + interButtonGap); b.setY(buttonHeight + interButtonGap); break; case 4: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setY(buttonHeight + interButtonGap); break; case 5: b.setY(buttonHeight + interButtonGap); b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); break; case 6: b.setY(buttonHeight * 2 + interButtonGap * 2); b.setX(interButtonGap / 2 + interButtonGap); break; case 7: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setY(buttonHeight * 2 + interButtonGap * 2); break; case 8: b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); b.setY(buttonHeight * 2 + interButtonGap * 2); break; case 9: b.setText("."); b.setY(buttonHeight * 3 + interButtonGap * 3); b.setX(interButtonGap / 2 + interButtonGap); break; case 10: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setText("0"); b.setY(buttonHeight * 3 + interButtonGap * 3); break; case 11: imageB = new ImageButton(getActivity()); imageB.setBackgroundResource(R.drawable.button_regular_blue); imageB.setImageResource(R.drawable.deletetoleft); imageB.setOnClickListener(this); imageB.setTextAlignment(View.TEXT_ALIGNMENT_CENTER); imageB.setLongClickable(true); imageB.setOnClickListener(this); imageB.setId(R.id.keyboard_back_button); //noinspection deprecation imageB.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); imageB.setY(buttonHeight * 3 + interButtonGap * 3); imageB.setMinimumWidth((int) buttonWidth); imageB.setMinimumHeight((int) buttonHeight); break; } customKeyboardLayout.addView(imageB != null ? imageB : b); } } @Override public void onClick(View v) { String tmp; try { tmp = ((Button) v).getText().toString(); } catch (ClassCastException ex) { tmp = ""; } preConditions(tmp); } public void updateBothTextValues(String left, String right) { Log.e(TAG, "updateBothTextValues: " + left + " : " + right); if (ISO == null) updateRateAndISO(); if (ISO == null) ISO = "USD"; leftValue.value = left; rightValue.value = right; final String btcISO = "BTC"; String formattedRightVal = getFormattedCurrencyStringForKeyboard(rightValue.isBitcoin ? btcISO : ISO, rightValue.value, true); String formattedLeftVal = getFormattedCurrencyStringForKeyboard(leftValue.isBitcoin ? btcISO : ISO, leftValue.value, false); Log.e(TAG, "formatted: " + formattedLeftVal + " : " + formattedRightVal); rightTextView.setText(formattedRightVal); leftTextView.setText(formattedLeftVal); } private String getCleanValue(String value) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (c == '.' || Character.isDigit(c)) builder.append(c); } return builder.toString(); } private void updateRateAndISO() { MainActivity app = MainActivity.app; if (app == null) return; ISO = SharedPreferencesManager.getIso(app); getOtherValue().iso = ISO; rate = SharedPreferencesManager.getRate(app); } public void preConditions(String tmp) { Activity context = MainActivity.app; if (context != null) unit = SharedPreferencesManager.getCurrencyUnit(context); if (FragmentScanResult.isARequest) { buttonCode = BRConstants.REQUEST_BUTTON; } else { buttonCode = BRConstants.PAY_BUTTON; } switch (tmp) { case "": doBackSpace(); break; case ".": insertSeparator(); break; default: insertDigit(tmp); break; } } private void doBackSpace() { MainActivity app = MainActivity.app; String amount = rightValue.value; int length = amount.length(); if (length > 1) { calculateAndPassValuesToFragment(rightValue.value.substring(0, length - 1)); } else { ((BreadWalletApp) app.getApplication()).setLockerPayButton(BRConstants.LOCKER_BUTTON); changeTextColor(2); calculateAndPassValuesToFragment("0"); } } private void insertSeparator() { MainActivity app = MainActivity.app; if (isTextColorGrey) { changeTextColor(1); ((BreadWalletApp) app.getApplication()).setLockerPayButton(buttonCode); } String amount = rightValue.value; int maxDigit = getMaxFractionDigits(); if (!amount.contains(".") && maxDigit != 0) calculateAndPassValuesToFragment(amount + "."); } private void insertDigit(String tmp) { MainActivity app = MainActivity.app; String amount = rightValue.value; int length = amount.length(); if (isTextColorGrey) { changeTextColor(1); ((BreadWalletApp) app.getApplication()).setLockerPayButton(buttonCode); } if (isDigitInsertingLegal(tmp)) { if (length == 1 && amount.equals("0")) { calculateAndPassValuesToFragment(tmp); } else { calculateAndPassValuesToFragment(rightValue.value + tmp); } } } private boolean isDigitInsertingLegal(String tmp) { int maxDig = getMaxFractionDigits(); long limit = 21000000000000L; if (unit == BRConstants.CURRENT_UNIT_MBITS) limit = 21000000000L; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) limit = 21000000L; if (rightValue.isBitcoin) { maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_BITS; if (unit == BRConstants.CURRENT_UNIT_MBITS) maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_MBITS; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_BITCOINS; } boolean isFractionStarted = rightValue.value.contains("."); int nrOfDecimals = getNumberOfDecimalPlaces(rightValue.value); if (isFractionStarted) return nrOfDecimals < maxDig; long l = 0; try { l = Long.valueOf(rightValue.value + tmp); } catch (Exception e) { e.printStackTrace(); } return l < limit; } private int getMaxFractionDigits() { try { Currency currency = Currency.getInstance(rightValue.iso); return currency.getDefaultFractionDigits(); } catch (Exception e) { e.printStackTrace(); return 2; } } /** * Sets the textColor of the amount TextView to black or grey * * @param color the color of the textView: 1 Black, 2 Grey. */ private void changeTextColor(int color) { Activity context = MainActivity.app; isTextColorGrey = color != 1; rightTextView.setTextColor((color == 1) ? context.getColor(R.color.black) : context.getColor(android.R.color.darker_gray)); } public void resetKeyboard() { isTextColorGrey = true; rightValue.value = "0"; leftValue.value = "0"; } public void calculateAndPassValuesToFragment(String valuePassed) { String divideBy = "1000000"; Log.e(TAG, "calculateAndPassValuesToFragment: valuePassed: " + valuePassed); if (unit == BRConstants.CURRENT_UNIT_MBITS) divideBy = "1000"; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) divideBy = "1"; rightValue.value = valuePassed; BigDecimal rightValueObject = new BigDecimal(valuePassed); BigDecimal leftValueObject; BigDecimal theRate = new BigDecimal(rate); if (rightValue.isBitcoin) { //from bits to other currency using rate if (theRate.intValue() > 1) { leftValueObject = theRate.multiply(rightValueObject.divide(new BigDecimal(divideBy))); } else { leftValueObject = new BigDecimal("0"); } } else { //from other currency to bits using rate if (theRate.intValue() > 1) { leftValueObject = rightValueObject.multiply(new BigDecimal(divideBy)). divide(theRate, 8, RoundingMode.HALF_UP); } else { leftValueObject = new BigDecimal("0"); } } updateBothTextValues(leftValueObject.toString(), valuePassed); } public void switchCurrencies() { if (checkPressingAvailability()) { rightValue.value = getCleanValue(rightTextView.getText().toString()); leftValue.value = getCleanValue(leftTextView.getText().toString()); ValueItem tmp = rightValue; rightValue = leftValue; leftValue = tmp; updateBothTextValues(leftValue.value, rightValue.value); } } public boolean checkPressingAvailability() { if (pressAvailable) { pressAvailable = false; new Handler().postDelayed(new Runnable() { @Override public void run() { pressAvailable = true; } }, 100); return true; } else { return false; } } public String getFormattedCurrencyStringForKeyboard(String isoCurrencyCode, String amount, boolean rightItem) { MainActivity app = MainActivity.app; int unit = app == null ? CURRENT_UNIT_BITS : SharedPreferencesManager.getCurrencyUnit(app); if (amount == null) { // Log.e(TAG, "getFormattedCurrencyStringForKeyboard: AMOUNT == null"); return "0"; } String multiplyBy = "100"; if (unit == BRConstants.CURRENT_UNIT_MBITS) multiplyBy = "100000"; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) multiplyBy = "100000000"; BigDecimal result; if (isoCurrencyCode.equals("BTC")) { result = new BigDecimal(amount).multiply(new BigDecimal(multiplyBy)); } else { result = new BigDecimal(amount).multiply(new BigDecimal("100")); } DecimalFormat currencyFormat; result = result.divide(new BigDecimal("100")); currencyFormat = (DecimalFormat) DecimalFormat.getCurrencyInstance(Locale.getDefault()); DecimalFormatSymbols decimalFormatSymbols; Currency currency; String symbol = null; decimalFormatSymbols = currencyFormat.getDecimalFormatSymbols(); int decimalPoints = 0; if (Objects.equals(isoCurrencyCode, "BTC")) { String currencySymbolString = BRConstants.bitcoinLowercase; if (app != null) { currencyFormat.setMinimumFractionDigits(0); switch (unit) { case CURRENT_UNIT_BITS: currencySymbolString = BRConstants.bitcoinLowercase; decimalPoints = 2; if (getNumberOfDecimalPlaces(result.toPlainString()) == 1) currencyFormat.setMinimumFractionDigits(1); break; case BRConstants.CURRENT_UNIT_MBITS: currencySymbolString = "m" + BRConstants.bitcoinUppercase; decimalPoints = 5; result = new BigDecimal(String.valueOf(amount)).divide(new BigDecimal("100000")); break; case BRConstants.CURRENT_UNIT_BITCOINS: currencySymbolString = BRConstants.bitcoinUppercase; decimalPoints = 8; result = new BigDecimal(String.valueOf(amount)).divide(new BigDecimal("100000000")); break; } } symbol = currencySymbolString; } else { try { currency = Currency.getInstance(isoCurrencyCode); } catch (IllegalArgumentException e) { currency = Currency.getInstance(Locale.getDefault()); } symbol = currency.getSymbol(); decimalPoints = currency.getDefaultFractionDigits(); } decimalFormatSymbols.setCurrencySymbol(symbol); currencyFormat.setMaximumFractionDigits(decimalPoints); int currNrOfDecimal = getNumberOfDecimalPlaces(amount); currencyFormat.setMinimumFractionDigits(currNrOfDecimal > decimalPoints ? decimalPoints : currNrOfDecimal); currencyFormat.setGroupingUsed(true); if (rightItem && amount.endsWith(".")) currencyFormat.setDecimalSeparatorAlwaysShown(true); currencyFormat.setDecimalFormatSymbols(decimalFormatSymbols); currencyFormat.setNegativePrefix(decimalFormatSymbols.getCurrencySymbol() + "-"); currencyFormat.setNegativeSuffix(""); return currencyFormat.format(result.doubleValue()); } public ValueItem getBitcoinValue() { if (rightValue.isBitcoin) return rightValue; else return leftValue; } public ValueItem getOtherValue() { if (!rightValue.isBitcoin) return rightValue; else return leftValue; } public class ValueItem { public String value; public boolean isBitcoin; public String iso; public ValueItem(String value, boolean isBitcoin) { this.value = value; this.isBitcoin = isBitcoin; } } }
app/src/main/java/com/breadwallet/presenter/fragments/FragmentScanResult.java
package com.breadwallet.presenter.fragments; import android.app.Activity; import android.app.Fragment; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.Gravity; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.Button; import android.widget.ImageButton; import android.widget.RelativeLayout; import android.widget.TextView; import android.graphics.Typeface; import com.breadwallet.R; import com.breadwallet.BreadWalletApp; import com.breadwallet.presenter.activities.MainActivity; import com.breadwallet.tools.animation.BRAnimator; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.util.BRStringFormatter; import com.breadwallet.tools.manager.SharedPreferencesManager; import com.breadwallet.tools.animation.SpringAnimator; import java.math.BigDecimal; import java.math.RoundingMode; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.Currency; import java.util.Locale; import java.util.Objects; import static android.R.attr.value; import static com.breadwallet.tools.util.BRConstants.CURRENT_UNIT_BITS; import static com.breadwallet.tools.util.BRStringFormatter.getNumberOfDecimalPlaces; /** * BreadWallet * <p> * Created by Mihail Gutan <[email protected]> on 7/14/15. * Copyright (c) 2016 breadwallet LLC * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ public class FragmentScanResult extends Fragment implements View.OnClickListener { private static final String TAG = FragmentScanResult.class.getName(); private TextView scanResult; private RelativeLayout customKeyboardLayout; private TextView rightTextView; private TextView leftTextView; public static String address; //amount stuff // private boolean comaHasBeenInserted = false; private boolean isTextColorGrey = true; private ValueItem rightValue; private ValueItem leftValue; private int buttonCode = BRConstants.PAY_BUTTON; private boolean pressAvailable = true; private int unit = BRConstants.CURRENT_UNIT_BITS; public static FragmentScanResult instance; private String ISO; public double rate = -1; public static boolean isARequest = false; public FragmentScanResult() { instance = this; } @Override public View onCreateView(LayoutInflater inflater, final ViewGroup container, Bundle savedInstanceState) { // The last two arguments ensure LayoutParams are inflated // properly. final View rootView = inflater.inflate(R.layout.fragment_scan_result, container, false); scanResult = (TextView) rootView.findViewById(R.id.scan_result); customKeyboardLayout = (RelativeLayout) rootView.findViewById(R.id.custom_keyboard_layout); rightTextView = (TextView) rootView.findViewById(R.id.right_textview); leftTextView = (TextView) rootView.findViewById(R.id.left_textview); TextView doubleArrow = (TextView) rootView.findViewById(R.id.double_arrow_text); rightValue = new ValueItem("0", true); leftValue = new ValueItem("0", false); /** * This mess is for the custom keyboard to be created after the soft keyboard is hidden * (if it was previously shown) to prevent the wrong position of the keyboard layout placement */ customKeyboardLayout.getViewTreeObserver().addOnGlobalLayoutListener( new ViewTreeObserver.OnGlobalLayoutListener() { public void onGlobalLayout() { MainActivity app = MainActivity.app; if (app != null) if (!app.isSoftKeyboardShown()) { int[] locations = new int[2]; customKeyboardLayout.getLocationOnScreen(locations); customKeyboardLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); createCustomKeyboardButtons(locations[1]); } } }); View.OnClickListener listener = new View.OnClickListener() { @Override public void onClick(View v) { switchCurrencies(); SpringAnimator.showAnimation(leftTextView); SpringAnimator.showAnimation(rightTextView); } }; updateBothTextValues("0", "0"); doubleArrow.setText(BRConstants.DOUBLE_ARROW); doubleArrow.setOnClickListener(listener); leftTextView.setOnClickListener(listener); rightTextView.setOnClickListener(listener); return rootView; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onResume() { if (!isARequest && (address == null || address.length() < 20)) throw new NullPointerException("address is corrupted"); updateRateAndISO(); calculateAndPassValuesToFragment("0"); scanResult.setText(isARequest ? "" : getString(R.string.to) + address); super.onResume(); } @Override public void onPause() { super.onPause(); resetKeyboard(); ((BreadWalletApp) getActivity().getApplication()).setLockerPayButton(BRConstants.LOCKER_BUTTON); isARequest = false; BRAnimator.hideScanResultFragment(); } @Override public void onDestroyView() { super.onDestroyView(); } private void createCustomKeyboardButtons(int y) { int availableWidth = MainActivity.screenParametersPoint.x; int availableHeight = MainActivity.screenParametersPoint.y; int spaceNeededForRest = availableHeight / 14; float gapRate = 0.2f; float gap = (availableWidth * gapRate); float interButtonGap = gap / 5; float buttonWidth = (availableWidth - gap) / 3; float buttonHeight = buttonWidth; float spaceNeeded = buttonHeight * 4 + gap; int buttonTextSize = 45; if (spaceNeeded > (availableHeight - (spaceNeededForRest + y))) { buttonHeight = ((availableHeight - (spaceNeededForRest + y)) - gap) / 4; buttonTextSize = (int) ((buttonHeight / 7)); } int minimumHeight = (int) (buttonHeight * 4 + interButtonGap * 4); if (customKeyboardLayout == null) { customKeyboardLayout = (RelativeLayout) getActivity().findViewById(R.id.custom_keyboard_layout); } customKeyboardLayout.setMinimumHeight(minimumHeight); int childCount = 12; for (int i = 0; i < childCount; i++) { Button b = new Button(getActivity()); b.setWidth((int) buttonWidth); b.setHeight((int) buttonHeight); b.setTextSize(buttonTextSize); b.setTypeface(Typeface.create("sans-serif-thin", Typeface.NORMAL)); //noinspection deprecation b.setTextColor(getResources().getColor(R.color.dark_blue)); b.setBackgroundResource(R.drawable.button_regular_blue); b.setOnClickListener(this); b.setGravity(Gravity.CENTER); b.setTextAlignment(View.TEXT_ALIGNMENT_GRAVITY); ImageButton imageB = null; if (i < 9) b.setText(String.valueOf(i + 1)); switch (i) { case 0: b.setX(interButtonGap / 2 + interButtonGap); break; case 1: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); break; case 2: b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); break; case 3: b.setX(interButtonGap / 2 + interButtonGap); b.setY(buttonHeight + interButtonGap); break; case 4: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setY(buttonHeight + interButtonGap); break; case 5: b.setY(buttonHeight + interButtonGap); b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); break; case 6: b.setY(buttonHeight * 2 + interButtonGap * 2); b.setX(interButtonGap / 2 + interButtonGap); break; case 7: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setY(buttonHeight * 2 + interButtonGap * 2); break; case 8: b.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); b.setY(buttonHeight * 2 + interButtonGap * 2); break; case 9: b.setText("."); b.setY(buttonHeight * 3 + interButtonGap * 3); b.setX(interButtonGap / 2 + interButtonGap); break; case 10: b.setX(interButtonGap / 2 + interButtonGap * 2 + buttonWidth); b.setText("0"); b.setY(buttonHeight * 3 + interButtonGap * 3); break; case 11: imageB = new ImageButton(getActivity()); imageB.setBackgroundResource(R.drawable.button_regular_blue); imageB.setImageResource(R.drawable.deletetoleft); imageB.setOnClickListener(this); imageB.setTextAlignment(View.TEXT_ALIGNMENT_CENTER); imageB.setLongClickable(true); imageB.setOnClickListener(this); imageB.setId(R.id.keyboard_back_button); //noinspection deprecation imageB.setX(interButtonGap / 2 + interButtonGap * 3 + buttonWidth * 2); imageB.setY(buttonHeight * 3 + interButtonGap * 3); imageB.setMinimumWidth((int) buttonWidth); imageB.setMinimumHeight((int) buttonHeight); break; } customKeyboardLayout.addView(imageB != null ? imageB : b); } } @Override public void onClick(View v) { String tmp; try { tmp = ((Button) v).getText().toString(); } catch (ClassCastException ex) { tmp = ""; } preConditions(tmp); } public void updateBothTextValues(String left, String right) { Log.e(TAG, "updateBothTextValues: " + left + " : " + right); if (ISO == null) updateRateAndISO(); if (ISO == null) ISO = "USD"; leftValue.value = left; rightValue.value = right; final String btcISO = "BTC"; String formattedRightVal = getFormattedCurrencyStringForKeyboard(rightValue.isBitcoin ? btcISO : ISO, rightValue.value, true); String formattedLeftVal = getFormattedCurrencyStringForKeyboard(leftValue.isBitcoin ? btcISO : ISO, leftValue.value, false); Log.e(TAG, "formatted: " + formattedLeftVal + " : " + formattedRightVal); // String cleanRightValue = cleanRightValue(right, formattedRightVal); // String cleanLeftValue = cleanLeftValue(left, formattedLeftVal); // Log.e(TAG, "cleaned: " + cleanLeftValue + " : " + cleanRightValue); rightTextView.setText(formattedRightVal); leftTextView.setText(formattedLeftVal); } private String getCleanValue(String value) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (c == '.' || Character.isDigit(c)) builder.append(c); } return builder.toString(); } // private String cleanLeftValue(String value, String formattedValue) { // if (value.equalsIgnoreCase("0.00")) { // leftValue.value = "0"; // return formattedValue.replace("0.00", leftValue.value); // } // if (value.endsWith(".")) return formattedValue.replace(".", ""); //// Log.e(TAG, "getFormattedCurrencyStringForKeyboard: strResult: " + strResult); //// Pattern p = Pattern.compile("\\.\\d0"); //// Matcher m = p.matcher(strResult); //// if (m.find()) //// strResult = strResult.substring(0, strResult.indexOf(".") + 1). //// concat(strResult.substring(strResult.indexOf(".") + 1, strResult.length() - 1)); //// //// Log.e(TAG, "strResult: " + strResult); //// if (amount.endsWith(".")) //// return strResult + "."; //// if (amount.endsWith(".0")) //// return strResult + ".0"; // return formattedValue; // } // private String cleanRightValue(String value, String formattedValue) { // //clean: $12. to $12 // if (value.endsWith(".") || value.contains(".") && !Character.isDigit(value.charAt(value.indexOf(".") + 1))) // return formattedValue.replace(value.charAt(value.length() - 1), value); // return formattedValue; // } private void updateRateAndISO() { MainActivity app = MainActivity.app; if (app == null) return; ISO = SharedPreferencesManager.getIso(app); getOtherValue().iso = ISO; rate = SharedPreferencesManager.getRate(app); } public void preConditions(String tmp) { Activity context = MainActivity.app; if (context != null) unit = SharedPreferencesManager.getCurrencyUnit(context); if (FragmentScanResult.isARequest) { buttonCode = BRConstants.REQUEST_BUTTON; } else { buttonCode = BRConstants.PAY_BUTTON; } switch (tmp) { case "": doBackSpace(); break; case ".": insertSeparator(); break; default: insertDigit(tmp); break; } } private void doBackSpace() { MainActivity app = MainActivity.app; String amount = rightValue.value; int length = amount.length(); if (length > 1) { calculateAndPassValuesToFragment(rightValue.value.substring(0, length - 1)); } else { ((BreadWalletApp) app.getApplication()).setLockerPayButton(BRConstants.LOCKER_BUTTON); changeTextColor(2); calculateAndPassValuesToFragment("0"); } } private void insertSeparator() { MainActivity app = MainActivity.app; if (isTextColorGrey) { changeTextColor(1); ((BreadWalletApp) app.getApplication()).setLockerPayButton(buttonCode); } String amount = rightValue.value; int maxDigit = getMaxFractionDigits(); if (!amount.contains(".") && maxDigit != 0) calculateAndPassValuesToFragment(amount + "."); } private void insertDigit(String tmp) { MainActivity app = MainActivity.app; String amount = rightValue.value; int length = amount.length(); if (isTextColorGrey) { changeTextColor(1); ((BreadWalletApp) app.getApplication()).setLockerPayButton(buttonCode); } if (isDigitInsertingLegal(tmp)) { if (length == 1 && amount.equals("0")) { calculateAndPassValuesToFragment(tmp); } else { calculateAndPassValuesToFragment(rightValue.value + tmp); } } } private boolean isDigitInsertingLegal(String tmp) { int maxDig = getMaxFractionDigits(); long limit = 21000000000000L; if (unit == BRConstants.CURRENT_UNIT_MBITS) limit = 21000000000L; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) limit = 21000000L; if (rightValue.isBitcoin) { maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_BITS; if (unit == BRConstants.CURRENT_UNIT_MBITS) maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_MBITS; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) maxDig = BRConstants.MAX_DIGITS_AFTER_SEPARATOR_BITCOINS; } boolean isFractionStarted = rightValue.value.contains("."); int nrOfDecimals = getNumberOfDecimalPlaces(rightValue.value); if (isFractionStarted) return nrOfDecimals < maxDig; long l = 0; try { l = Long.valueOf(rightValue.value + tmp); } catch (Exception e) { e.printStackTrace(); } return l < limit; } private int getMaxFractionDigits() { try { Currency currency = Currency.getInstance(rightValue.iso); return currency.getDefaultFractionDigits(); } catch (Exception e) { e.printStackTrace(); return 2; } } /** * Sets the textColor of the amount TextView to black or grey * * @param color the color of the textView: 1 Black, 2 Grey. */ private void changeTextColor(int color) { Activity context = MainActivity.app; isTextColorGrey = color != 1; rightTextView.setTextColor((color == 1) ? context.getColor(R.color.black) : context.getColor(android.R.color.darker_gray)); } public void resetKeyboard() { isTextColorGrey = true; rightValue.value = "0"; leftValue.value = "0"; } public void calculateAndPassValuesToFragment(String valuePassed) { String divideBy = "1000000"; Log.e(TAG, "calculateAndPassValuesToFragment: valuePassed: " + valuePassed); if (unit == BRConstants.CURRENT_UNIT_MBITS) divideBy = "1000"; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) divideBy = "1"; rightValue.value = valuePassed; BigDecimal rightValueObject = new BigDecimal(valuePassed); BigDecimal leftValueObject; BigDecimal theRate = new BigDecimal(rate); if (rightValue.isBitcoin) { //from bits to other currency using rate if (theRate.intValue() > 1) { leftValueObject = theRate.multiply(rightValueObject.divide(new BigDecimal(divideBy))); } else { leftValueObject = new BigDecimal("0"); } } else { //from other currency to bits using rate if (theRate.intValue() > 1) { leftValueObject = rightValueObject.multiply(new BigDecimal(divideBy)). divide(theRate, 8, RoundingMode.HALF_UP); } else { leftValueObject = new BigDecimal("0"); } } updateBothTextValues(leftValueObject.toString(), valuePassed); } public void switchCurrencies() { if (checkPressingAvailability()) { rightValue.value = getCleanValue(rightTextView.getText().toString()); leftValue.value = getCleanValue(leftTextView.getText().toString()); ValueItem tmp = rightValue; rightValue = leftValue; leftValue = tmp; updateBothTextValues(leftValue.value, rightValue.value); } } public boolean checkPressingAvailability() { if (pressAvailable) { pressAvailable = false; new Handler().postDelayed(new Runnable() { @Override public void run() { pressAvailable = true; } }, 100); return true; } else { return false; } } public String getFormattedCurrencyStringForKeyboard(String isoCurrencyCode, String amount, boolean rightItem) { MainActivity app = MainActivity.app; int unit = app == null ? CURRENT_UNIT_BITS : SharedPreferencesManager.getCurrencyUnit(app); if (amount == null) { // Log.e(TAG, "getFormattedCurrencyStringForKeyboard: AMOUNT == null"); return "0"; } String multiplyBy = "100"; if (unit == BRConstants.CURRENT_UNIT_MBITS) multiplyBy = "100000"; if (unit == BRConstants.CURRENT_UNIT_BITCOINS) multiplyBy = "100000000"; BigDecimal result; if (isoCurrencyCode.equals("BTC")) { result = new BigDecimal(amount).multiply(new BigDecimal(multiplyBy)); } else { result = new BigDecimal(amount).multiply(new BigDecimal("100")); } DecimalFormat currencyFormat; result = result.divide(new BigDecimal("100")); currencyFormat = (DecimalFormat) DecimalFormat.getCurrencyInstance(Locale.getDefault()); DecimalFormatSymbols decimalFormatSymbols; Currency currency; String symbol = null; decimalFormatSymbols = currencyFormat.getDecimalFormatSymbols(); int decimalPoints = 0; if (Objects.equals(isoCurrencyCode, "BTC")) { String currencySymbolString = BRConstants.bitcoinLowercase; if (app != null) { currencyFormat.setMinimumFractionDigits(0); switch (unit) { case CURRENT_UNIT_BITS: currencySymbolString = BRConstants.bitcoinLowercase; decimalPoints = 2; if (getNumberOfDecimalPlaces(result.toPlainString()) == 1) currencyFormat.setMinimumFractionDigits(1); break; case BRConstants.CURRENT_UNIT_MBITS: currencySymbolString = "m" + BRConstants.bitcoinUppercase; decimalPoints = 5; result = new BigDecimal(String.valueOf(amount)).divide(new BigDecimal("100000")); break; case BRConstants.CURRENT_UNIT_BITCOINS: currencySymbolString = BRConstants.bitcoinUppercase; decimalPoints = 8; result = new BigDecimal(String.valueOf(amount)).divide(new BigDecimal("100000000")); break; } } symbol = currencySymbolString; } else { try { currency = Currency.getInstance(isoCurrencyCode); } catch (IllegalArgumentException e) { currency = Currency.getInstance(Locale.getDefault()); } symbol = currency.getSymbol(); decimalPoints = currency.getDefaultFractionDigits(); } decimalFormatSymbols.setCurrencySymbol(symbol); currencyFormat.setMaximumFractionDigits(decimalPoints); int currNrOfDecimal = getNumberOfDecimalPlaces(amount); currencyFormat.setMinimumFractionDigits(currNrOfDecimal > decimalPoints ? decimalPoints : currNrOfDecimal); currencyFormat.setGroupingUsed(true); if (rightItem && amount.endsWith(".")) currencyFormat.setDecimalSeparatorAlwaysShown(true); currencyFormat.setDecimalFormatSymbols(decimalFormatSymbols); currencyFormat.setNegativePrefix(decimalFormatSymbols.getCurrencySymbol() + "-"); currencyFormat.setNegativeSuffix(""); return currencyFormat.format(result.doubleValue()); } public ValueItem getBitcoinValue() { if (rightValue.isBitcoin) return rightValue; else return leftValue; } public ValueItem getOtherValue() { if (!rightValue.isBitcoin) return rightValue; else return leftValue; } public class ValueItem { public String value; public boolean isBitcoin; public String iso; public ValueItem(String value, boolean isBitcoin) { this.value = value; this.isBitcoin = isBitcoin; } } }
clean up + automation tests (last commit)
app/src/main/java/com/breadwallet/presenter/fragments/FragmentScanResult.java
clean up + automation tests (last commit)
<ide><path>pp/src/main/java/com/breadwallet/presenter/fragments/FragmentScanResult.java <ide> import com.breadwallet.presenter.activities.MainActivity; <ide> import com.breadwallet.tools.animation.BRAnimator; <ide> import com.breadwallet.tools.util.BRConstants; <del>import com.breadwallet.tools.util.BRStringFormatter; <ide> import com.breadwallet.tools.manager.SharedPreferencesManager; <ide> import com.breadwallet.tools.animation.SpringAnimator; <ide> <ide> import java.util.Locale; <ide> import java.util.Objects; <ide> <del>import static android.R.attr.value; <ide> import static com.breadwallet.tools.util.BRConstants.CURRENT_UNIT_BITS; <ide> import static com.breadwallet.tools.util.BRStringFormatter.getNumberOfDecimalPlaces; <ide> <ide> String formattedLeftVal = getFormattedCurrencyStringForKeyboard(leftValue.isBitcoin ? btcISO : ISO, leftValue.value, false); <ide> Log.e(TAG, "formatted: " + formattedLeftVal + " : " + formattedRightVal); <ide> <del>// String cleanRightValue = cleanRightValue(right, formattedRightVal); <del>// String cleanLeftValue = cleanLeftValue(left, formattedLeftVal); <del> <del>// Log.e(TAG, "cleaned: " + cleanLeftValue + " : " + cleanRightValue); <del> <ide> rightTextView.setText(formattedRightVal); <ide> leftTextView.setText(formattedLeftVal); <ide> } <ide> } <ide> return builder.toString(); <ide> } <del> <del>// private String cleanLeftValue(String value, String formattedValue) { <del>// if (value.equalsIgnoreCase("0.00")) { <del>// leftValue.value = "0"; <del>// return formattedValue.replace("0.00", leftValue.value); <del>// } <del>// if (value.endsWith(".")) return formattedValue.replace(".", ""); <del>//// Log.e(TAG, "getFormattedCurrencyStringForKeyboard: strResult: " + strResult); <del>//// Pattern p = Pattern.compile("\\.\\d0"); <del>//// Matcher m = p.matcher(strResult); <del>//// if (m.find()) <del>//// strResult = strResult.substring(0, strResult.indexOf(".") + 1). <del>//// concat(strResult.substring(strResult.indexOf(".") + 1, strResult.length() - 1)); <del>//// <del>//// Log.e(TAG, "strResult: " + strResult); <del>//// if (amount.endsWith(".")) <del>//// return strResult + "."; <del>//// if (amount.endsWith(".0")) <del>//// return strResult + ".0"; <del>// return formattedValue; <del>// } <del> <del>// private String cleanRightValue(String value, String formattedValue) { <del>// //clean: $12. to $12 <del>// if (value.endsWith(".") || value.contains(".") && !Character.isDigit(value.charAt(value.indexOf(".") + 1))) <del>// return formattedValue.replace(value.charAt(value.length() - 1), value); <del>// return formattedValue; <del>// } <ide> <ide> private void updateRateAndISO() { <ide> MainActivity app = MainActivity.app;
Java
apache-2.0
3bab49a3210c857d599cb5706706d2c2a30ee63e
0
sankarh/hive,anishek/hive,vergilchiu/hive,vineetgarg02/hive,vineetgarg02/hive,vergilchiu/hive,alanfgates/hive,vineetgarg02/hive,vergilchiu/hive,alanfgates/hive,jcamachor/hive,vergilchiu/hive,b-slim/hive,nishantmonu51/hive,nishantmonu51/hive,alanfgates/hive,b-slim/hive,b-slim/hive,jcamachor/hive,vineetgarg02/hive,anishek/hive,b-slim/hive,lirui-apache/hive,nishantmonu51/hive,nishantmonu51/hive,anishek/hive,anishek/hive,sankarh/hive,alanfgates/hive,b-slim/hive,anishek/hive,alanfgates/hive,vineetgarg02/hive,vergilchiu/hive,alanfgates/hive,lirui-apache/hive,nishantmonu51/hive,vergilchiu/hive,lirui-apache/hive,nishantmonu51/hive,sankarh/hive,anishek/hive,lirui-apache/hive,vineetgarg02/hive,vineetgarg02/hive,jcamachor/hive,vergilchiu/hive,nishantmonu51/hive,lirui-apache/hive,anishek/hive,lirui-apache/hive,sankarh/hive,vergilchiu/hive,sankarh/hive,jcamachor/hive,vergilchiu/hive,b-slim/hive,vineetgarg02/hive,sankarh/hive,lirui-apache/hive,jcamachor/hive,jcamachor/hive,sankarh/hive,sankarh/hive,alanfgates/hive,nishantmonu51/hive,lirui-apache/hive,b-slim/hive,anishek/hive,lirui-apache/hive,b-slim/hive,jcamachor/hive,jcamachor/hive,alanfgates/hive,anishek/hive,vineetgarg02/hive,alanfgates/hive,jcamachor/hive,sankarh/hive,nishantmonu51/hive,b-slim/hive
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.jdbc; import static org.apache.hive.service.rpc.thrift.TCLIServiceConstants.TYPE_NAMES; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TCLIServiceConstants; import org.apache.hive.service.rpc.thrift.TCloseOperationReq; import org.apache.hive.service.rpc.thrift.TCloseOperationResp; import org.apache.hive.service.rpc.thrift.TColumnDesc; import org.apache.hive.service.rpc.thrift.TFetchOrientation; import org.apache.hive.service.rpc.thrift.TFetchResultsReq; import org.apache.hive.service.rpc.thrift.TFetchResultsResp; import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq; import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp; import org.apache.hive.service.rpc.thrift.TOperationHandle; import org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry; import org.apache.hive.service.rpc.thrift.TProtocolVersion; import org.apache.hive.service.rpc.thrift.TRowSet; import org.apache.hive.service.rpc.thrift.TSessionHandle; import org.apache.hive.service.rpc.thrift.TTableSchema; import org.apache.hive.service.rpc.thrift.TTypeQualifierValue; import org.apache.hive.service.rpc.thrift.TTypeQualifiers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * HiveQueryResultSet. * */ public class HiveQueryResultSet extends HiveBaseResultSet { public static final Logger LOG = LoggerFactory.getLogger(HiveQueryResultSet.class); private TCLIService.Iface client; private TOperationHandle stmtHandle; private TSessionHandle sessHandle; private int maxRows; private int fetchSize; private int rowsFetched = 0; private RowSet fetchedRows; private Iterator<Object[]> fetchedRowsItr; private boolean isClosed = false; private boolean emptyResultSet = false; private boolean isScrollable = false; private boolean fetchFirst = false; private final TProtocolVersion protocol; public static class Builder { private final Connection connection; private final Statement statement; private TCLIService.Iface client = null; private TOperationHandle stmtHandle = null; private TSessionHandle sessHandle = null; /** * Sets the limit for the maximum number of rows that any ResultSet object produced by this * Statement can contain to the given number. If the limit is exceeded, the excess rows * are silently dropped. The value must be >= 0, and 0 means there is not limit. */ private int maxRows = 0; private boolean retrieveSchema = true; private List<String> colNames; private List<String> colTypes; private List<JdbcColumnAttributes> colAttributes; private int fetchSize = 50; private boolean emptyResultSet = false; private boolean isScrollable = false; private ReentrantLock transportLock = null; public Builder(Statement statement) throws SQLException { this.statement = statement; this.connection = statement.getConnection(); } public Builder(Connection connection) { this.statement = null; this.connection = connection; } public Builder setClient(TCLIService.Iface client) { this.client = client; return this; } public Builder setStmtHandle(TOperationHandle stmtHandle) { this.stmtHandle = stmtHandle; return this; } public Builder setSessionHandle(TSessionHandle sessHandle) { this.sessHandle = sessHandle; return this; } public Builder setMaxRows(int maxRows) { this.maxRows = maxRows; return this; } public Builder setSchema(List<String> colNames, List<String> colTypes) { // no column attributes provided - create list of null attributes. List<JdbcColumnAttributes> colAttributes = new ArrayList<JdbcColumnAttributes>(); for (int idx = 0; idx < colTypes.size(); ++idx) { colAttributes.add(null); } return setSchema(colNames, colTypes, colAttributes); } public Builder setSchema(List<String> colNames, List<String> colTypes, List<JdbcColumnAttributes> colAttributes) { this.colNames = new ArrayList<String>(); this.colNames.addAll(colNames); this.colTypes = new ArrayList<String>(); this.colTypes.addAll(colTypes); this.colAttributes = new ArrayList<JdbcColumnAttributes>(); this.colAttributes.addAll(colAttributes); this.retrieveSchema = false; return this; } public Builder setFetchSize(int fetchSize) { this.fetchSize = fetchSize; return this; } public Builder setEmptyResultSet(boolean emptyResultSet) { this.emptyResultSet = emptyResultSet; return this; } public Builder setScrollable(boolean setScrollable) { this.isScrollable = setScrollable; return this; } public Builder setTransportLock(ReentrantLock transportLock) { this.transportLock = transportLock; return this; } public HiveQueryResultSet build() throws SQLException { return new HiveQueryResultSet(this); } public TProtocolVersion getProtocolVersion() throws SQLException { return ((HiveConnection)connection).getProtocol(); } } protected HiveQueryResultSet(Builder builder) throws SQLException { this.statement = builder.statement; this.client = builder.client; this.stmtHandle = builder.stmtHandle; this.sessHandle = builder.sessHandle; this.fetchSize = builder.fetchSize; columnNames = new ArrayList<String>(); normalizedColumnNames = new ArrayList<String>(); columnTypes = new ArrayList<String>(); columnAttributes = new ArrayList<JdbcColumnAttributes>(); if (builder.retrieveSchema) { retrieveSchema(); } else { this.setSchema(builder.colNames, builder.colTypes, builder.colAttributes); } this.emptyResultSet = builder.emptyResultSet; if (builder.emptyResultSet) { this.maxRows = 0; } else { this.maxRows = builder.maxRows; } this.isScrollable = builder.isScrollable; this.protocol = builder.getProtocolVersion(); } /** * Generate ColumnAttributes object from a TTypeQualifiers * @param primitiveTypeEntry primitive type * @return generated ColumnAttributes, or null */ private static JdbcColumnAttributes getColumnAttributes( TPrimitiveTypeEntry primitiveTypeEntry) { JdbcColumnAttributes ret = null; if (primitiveTypeEntry.isSetTypeQualifiers()) { TTypeQualifiers tq = primitiveTypeEntry.getTypeQualifiers(); switch (primitiveTypeEntry.getType()) { case CHAR_TYPE: case VARCHAR_TYPE: TTypeQualifierValue val = tq.getQualifiers().get(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH); if (val != null) { // precision is char length ret = new JdbcColumnAttributes(val.getI32Value(), 0); } break; case DECIMAL_TYPE: TTypeQualifierValue prec = tq.getQualifiers().get(TCLIServiceConstants.PRECISION); TTypeQualifierValue scale = tq.getQualifiers().get(TCLIServiceConstants.SCALE); ret = new JdbcColumnAttributes(prec == null ? HiveDecimal.USER_DEFAULT_PRECISION : prec.getI32Value(), scale == null ? HiveDecimal.USER_DEFAULT_SCALE : scale.getI32Value()); break; default: break; } } return ret; } /** * Retrieve schema from the server */ private void retrieveSchema() throws SQLException { try { TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle); // TODO need session handle TGetResultSetMetadataResp metadataResp; metadataResp = client.GetResultSetMetadata(metadataReq); Utils.verifySuccess(metadataResp.getStatus()); StringBuilder namesSb = new StringBuilder(); StringBuilder typesSb = new StringBuilder(); TTableSchema schema = metadataResp.getSchema(); if (schema == null || !schema.isSetColumns()) { // TODO: should probably throw an exception here. return; } setSchema(new TableSchema(schema)); List<TColumnDesc> columns = schema.getColumns(); for (int pos = 0; pos < schema.getColumnsSize(); pos++) { if (pos != 0) { namesSb.append(","); typesSb.append(","); } String columnName = columns.get(pos).getColumnName(); columnNames.add(columnName); normalizedColumnNames.add(columnName.toLowerCase()); TPrimitiveTypeEntry primitiveTypeEntry = columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry(); String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType()); columnTypes.add(columnTypeName); columnAttributes.add(getColumnAttributes(primitiveTypeEntry)); } } catch (SQLException eS) { throw eS; // rethrow the SQLException as is } catch (Exception ex) { ex.printStackTrace(); throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex); } } /** * Set the specified schema to the resultset * @param colNames * @param colTypes */ private void setSchema(List<String> colNames, List<String> colTypes, List<JdbcColumnAttributes> colAttributes) { columnNames.addAll(colNames); columnTypes.addAll(colTypes); columnAttributes.addAll(colAttributes); for (String colName : colNames) { normalizedColumnNames.add(colName.toLowerCase()); } } @Override public void close() throws SQLException { if (this.statement != null && (this.statement instanceof HiveStatement)) { HiveStatement s = (HiveStatement) this.statement; s.closeClientOperation(); } else { // for those stmtHandle passed from HiveDatabaseMetaData instead of Statement closeOperationHandle(stmtHandle); } // Need reset during re-open when needed client = null; stmtHandle = null; sessHandle = null; isClosed = true; } private void closeOperationHandle(TOperationHandle stmtHandle) throws SQLException { try { if (stmtHandle != null) { TCloseOperationReq closeReq = new TCloseOperationReq(stmtHandle); TCloseOperationResp closeResp = client.CloseOperation(closeReq); Utils.verifySuccessWithInfo(closeResp.getStatus()); } } catch (SQLException e) { throw e; } catch (Exception e) { throw new SQLException(e.toString(), "08S01", e); } } /** * Moves the cursor down one row from its current position. * * @see java.sql.ResultSet#next() * @throws SQLException * if a database access error occurs. */ public boolean next() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (emptyResultSet || (maxRows > 0 && rowsFetched >= maxRows)) { return false; } /** * Poll on the operation status, till the operation is complete. * We need to wait only for HiveStatement to complete. * HiveDatabaseMetaData which also uses this ResultSet returns only after the RPC is complete. */ if ((statement != null) && (statement instanceof HiveStatement)) { ((HiveStatement) statement).waitForOperationToComplete(); } try { TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT; if (fetchFirst) { // If we are asked to start from begining, clear the current fetched resultset orientation = TFetchOrientation.FETCH_FIRST; fetchedRows = null; fetchedRowsItr = null; fetchFirst = false; } if (fetchedRows == null || !fetchedRowsItr.hasNext()) { TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle, orientation, fetchSize); TFetchResultsResp fetchResp; fetchResp = client.FetchResults(fetchReq); Utils.verifySuccessWithInfo(fetchResp.getStatus()); TRowSet results = fetchResp.getResults(); fetchedRows = RowSetFactory.create(results, protocol); fetchedRowsItr = fetchedRows.iterator(); } if (fetchedRowsItr.hasNext()) { row = fetchedRowsItr.next(); } else { return false; } rowsFetched++; } catch (SQLException eS) { throw eS; } catch (Exception ex) { ex.printStackTrace(); throw new SQLException("Error retrieving next row", ex); } // NOTE: fetchOne dosn't throw new SQLException("Method not supported"). return true; } @Override public ResultSetMetaData getMetaData() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return super.getMetaData(); } @Override public void setFetchSize(int rows) throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } fetchSize = rows; } @Override public int getType() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (isScrollable) { return ResultSet.TYPE_SCROLL_INSENSITIVE; } else { return ResultSet.TYPE_FORWARD_ONLY; } } @Override public int getFetchSize() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return fetchSize; } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { //JDK 1.7 throw new SQLException("Method not supported"); } public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { //JDK 1.7 throw new SQLException("Method not supported"); } /** * Moves the cursor before the first row of the resultset. * * @see java.sql.ResultSet#next() * @throws SQLException * if a database access error occurs. */ @Override public void beforeFirst() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (!isScrollable) { throw new SQLException("Method not supported for TYPE_FORWARD_ONLY resultset"); } fetchFirst = true; rowsFetched = 0; } @Override public boolean isBeforeFirst() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return (rowsFetched == 0); } @Override public int getRow() throws SQLException { return rowsFetched; } @Override public boolean isClosed() { return isClosed; } }
jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.jdbc; import static org.apache.hive.service.rpc.thrift.TCLIServiceConstants.TYPE_NAMES; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.locks.ReentrantLock; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hive.service.cli.RowSet; import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TCLIServiceConstants; import org.apache.hive.service.rpc.thrift.TCloseOperationReq; import org.apache.hive.service.rpc.thrift.TCloseOperationResp; import org.apache.hive.service.rpc.thrift.TColumnDesc; import org.apache.hive.service.rpc.thrift.TFetchOrientation; import org.apache.hive.service.rpc.thrift.TFetchResultsReq; import org.apache.hive.service.rpc.thrift.TFetchResultsResp; import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataReq; import org.apache.hive.service.rpc.thrift.TGetResultSetMetadataResp; import org.apache.hive.service.rpc.thrift.TOperationHandle; import org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry; import org.apache.hive.service.rpc.thrift.TProtocolVersion; import org.apache.hive.service.rpc.thrift.TRowSet; import org.apache.hive.service.rpc.thrift.TSessionHandle; import org.apache.hive.service.rpc.thrift.TTableSchema; import org.apache.hive.service.rpc.thrift.TTypeQualifierValue; import org.apache.hive.service.rpc.thrift.TTypeQualifiers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * HiveQueryResultSet. * */ public class HiveQueryResultSet extends HiveBaseResultSet { public static final Logger LOG = LoggerFactory.getLogger(HiveQueryResultSet.class); private TCLIService.Iface client; private TOperationHandle stmtHandle; private TSessionHandle sessHandle; private int maxRows; private int fetchSize; private int rowsFetched = 0; private RowSet fetchedRows; private Iterator<Object[]> fetchedRowsItr; private boolean isClosed = false; private boolean emptyResultSet = false; private boolean isScrollable = false; private boolean fetchFirst = false; private final TProtocolVersion protocol; public static class Builder { private final Connection connection; private final Statement statement; private TCLIService.Iface client = null; private TOperationHandle stmtHandle = null; private TSessionHandle sessHandle = null; /** * Sets the limit for the maximum number of rows that any ResultSet object produced by this * Statement can contain to the given number. If the limit is exceeded, the excess rows * are silently dropped. The value must be >= 0, and 0 means there is not limit. */ private int maxRows = 0; private boolean retrieveSchema = true; private List<String> colNames; private List<String> colTypes; private List<JdbcColumnAttributes> colAttributes; private int fetchSize = 50; private boolean emptyResultSet = false; private boolean isScrollable = false; private ReentrantLock transportLock = null; public Builder(Statement statement) throws SQLException { this.statement = statement; this.connection = statement.getConnection(); } public Builder(Connection connection) { this.statement = null; this.connection = connection; } public Builder setClient(TCLIService.Iface client) { this.client = client; return this; } public Builder setStmtHandle(TOperationHandle stmtHandle) { this.stmtHandle = stmtHandle; return this; } public Builder setSessionHandle(TSessionHandle sessHandle) { this.sessHandle = sessHandle; return this; } public Builder setMaxRows(int maxRows) { this.maxRows = maxRows; return this; } public Builder setSchema(List<String> colNames, List<String> colTypes) { // no column attributes provided - create list of null attributes. List<JdbcColumnAttributes> colAttributes = new ArrayList<JdbcColumnAttributes>(); for (int idx = 0; idx < colTypes.size(); ++idx) { colAttributes.add(null); } return setSchema(colNames, colTypes, colAttributes); } public Builder setSchema(List<String> colNames, List<String> colTypes, List<JdbcColumnAttributes> colAttributes) { this.colNames = new ArrayList<String>(); this.colNames.addAll(colNames); this.colTypes = new ArrayList<String>(); this.colTypes.addAll(colTypes); this.colAttributes = new ArrayList<JdbcColumnAttributes>(); this.colAttributes.addAll(colAttributes); this.retrieveSchema = false; return this; } public Builder setFetchSize(int fetchSize) { this.fetchSize = fetchSize; return this; } public Builder setEmptyResultSet(boolean emptyResultSet) { this.emptyResultSet = emptyResultSet; return this; } public Builder setScrollable(boolean setScrollable) { this.isScrollable = setScrollable; return this; } public Builder setTransportLock(ReentrantLock transportLock) { this.transportLock = transportLock; return this; } public HiveQueryResultSet build() throws SQLException { return new HiveQueryResultSet(this); } public TProtocolVersion getProtocolVersion() throws SQLException { return ((HiveConnection)connection).getProtocol(); } } protected HiveQueryResultSet(Builder builder) throws SQLException { this.statement = builder.statement; this.client = builder.client; this.stmtHandle = builder.stmtHandle; this.sessHandle = builder.sessHandle; this.fetchSize = builder.fetchSize; columnNames = new ArrayList<String>(); normalizedColumnNames = new ArrayList<String>(); columnTypes = new ArrayList<String>(); columnAttributes = new ArrayList<JdbcColumnAttributes>(); if (builder.retrieveSchema) { retrieveSchema(); } else { this.setSchema(builder.colNames, builder.colTypes, builder.colAttributes); } this.emptyResultSet = builder.emptyResultSet; if (builder.emptyResultSet) { this.maxRows = 0; } else { this.maxRows = builder.maxRows; } this.isScrollable = builder.isScrollable; this.protocol = builder.getProtocolVersion(); } /** * Generate ColumnAttributes object from a TTypeQualifiers * @param primitiveTypeEntry primitive type * @return generated ColumnAttributes, or null */ private static JdbcColumnAttributes getColumnAttributes( TPrimitiveTypeEntry primitiveTypeEntry) { JdbcColumnAttributes ret = null; if (primitiveTypeEntry.isSetTypeQualifiers()) { TTypeQualifiers tq = primitiveTypeEntry.getTypeQualifiers(); switch (primitiveTypeEntry.getType()) { case CHAR_TYPE: case VARCHAR_TYPE: TTypeQualifierValue val = tq.getQualifiers().get(TCLIServiceConstants.CHARACTER_MAXIMUM_LENGTH); if (val != null) { // precision is char length ret = new JdbcColumnAttributes(val.getI32Value(), 0); } break; case DECIMAL_TYPE: TTypeQualifierValue prec = tq.getQualifiers().get(TCLIServiceConstants.PRECISION); TTypeQualifierValue scale = tq.getQualifiers().get(TCLIServiceConstants.SCALE); ret = new JdbcColumnAttributes(prec == null ? HiveDecimal.USER_DEFAULT_PRECISION : prec.getI32Value(), scale == null ? HiveDecimal.USER_DEFAULT_SCALE : scale.getI32Value()); break; default: break; } } return ret; } /** * Retrieve schema from the server */ private void retrieveSchema() throws SQLException { try { TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle); // TODO need session handle TGetResultSetMetadataResp metadataResp; metadataResp = client.GetResultSetMetadata(metadataReq); Utils.verifySuccess(metadataResp.getStatus()); StringBuilder namesSb = new StringBuilder(); StringBuilder typesSb = new StringBuilder(); TTableSchema schema = metadataResp.getSchema(); if (schema == null || !schema.isSetColumns()) { // TODO: should probably throw an exception here. return; } setSchema(new TableSchema(schema)); List<TColumnDesc> columns = schema.getColumns(); for (int pos = 0; pos < schema.getColumnsSize(); pos++) { if (pos != 0) { namesSb.append(","); typesSb.append(","); } String columnName = columns.get(pos).getColumnName(); columnNames.add(columnName); normalizedColumnNames.add(columnName.toLowerCase()); TPrimitiveTypeEntry primitiveTypeEntry = columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry(); String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType()); columnTypes.add(columnTypeName); columnAttributes.add(getColumnAttributes(primitiveTypeEntry)); } } catch (SQLException eS) { throw eS; // rethrow the SQLException as is } catch (Exception ex) { ex.printStackTrace(); throw new SQLException("Could not create ResultSet: " + ex.getMessage(), ex); } } /** * Set the specified schema to the resultset * @param colNames * @param colTypes */ private void setSchema(List<String> colNames, List<String> colTypes, List<JdbcColumnAttributes> colAttributes) { columnNames.addAll(colNames); columnTypes.addAll(colTypes); columnAttributes.addAll(colAttributes); for (String colName : colNames) { normalizedColumnNames.add(colName.toLowerCase()); } } @Override public void close() throws SQLException { if (this.statement != null && (this.statement instanceof HiveStatement)) { HiveStatement s = (HiveStatement) this.statement; s.closeClientOperation(); } else { // for those stmtHandle passed from HiveDatabaseMetaData instead of Statement closeOperationHandle(stmtHandle); } // Need reset during re-open when needed client = null; stmtHandle = null; sessHandle = null; isClosed = true; } private void closeOperationHandle(TOperationHandle stmtHandle) throws SQLException { try { if (stmtHandle != null) { TCloseOperationReq closeReq = new TCloseOperationReq(stmtHandle); TCloseOperationResp closeResp = client.CloseOperation(closeReq); Utils.verifySuccessWithInfo(closeResp.getStatus()); } } catch (SQLException e) { throw e; } catch (Exception e) { throw new SQLException(e.toString(), "08S01", e); } } /** * Moves the cursor down one row from its current position. * * @see java.sql.ResultSet#next() * @throws SQLException * if a database access error occurs. */ public boolean next() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (emptyResultSet || (maxRows > 0 && rowsFetched >= maxRows)) { return false; } /** * Poll on the operation status, till the operation is complete. * We need to wait only for HiveStatement to complete. * HiveDatabaseMetaData which also uses this ResultSet returns only after the RPC is complete. */ if ((statement != null) && (statement instanceof HiveStatement)) { ((HiveStatement) statement).waitForOperationToComplete(); } try { TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT; if (fetchFirst) { // If we are asked to start from begining, clear the current fetched resultset orientation = TFetchOrientation.FETCH_FIRST; fetchedRows = null; fetchedRowsItr = null; fetchFirst = false; } if (fetchedRows == null || !fetchedRowsItr.hasNext()) { TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle, orientation, fetchSize); TFetchResultsResp fetchResp; fetchResp = client.FetchResults(fetchReq); Utils.verifySuccessWithInfo(fetchResp.getStatus()); TRowSet results = fetchResp.getResults(); fetchedRows = RowSetFactory.create(results, protocol); fetchedRowsItr = fetchedRows.iterator(); } String rowStr = ""; if (fetchedRowsItr.hasNext()) { row = fetchedRowsItr.next(); } else { return false; } rowsFetched++; if (LOG.isDebugEnabled()) { LOG.debug("Fetched row string: " + rowStr); } } catch (SQLException eS) { throw eS; } catch (Exception ex) { ex.printStackTrace(); throw new SQLException("Error retrieving next row", ex); } // NOTE: fetchOne dosn't throw new SQLException("Method not supported"). return true; } @Override public ResultSetMetaData getMetaData() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return super.getMetaData(); } @Override public void setFetchSize(int rows) throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } fetchSize = rows; } @Override public int getType() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (isScrollable) { return ResultSet.TYPE_SCROLL_INSENSITIVE; } else { return ResultSet.TYPE_FORWARD_ONLY; } } @Override public int getFetchSize() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return fetchSize; } public <T> T getObject(String columnLabel, Class<T> type) throws SQLException { //JDK 1.7 throw new SQLException("Method not supported"); } public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { //JDK 1.7 throw new SQLException("Method not supported"); } /** * Moves the cursor before the first row of the resultset. * * @see java.sql.ResultSet#next() * @throws SQLException * if a database access error occurs. */ @Override public void beforeFirst() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } if (!isScrollable) { throw new SQLException("Method not supported for TYPE_FORWARD_ONLY resultset"); } fetchFirst = true; rowsFetched = 0; } @Override public boolean isBeforeFirst() throws SQLException { if (isClosed) { throw new SQLException("Resultset is closed"); } return (rowsFetched == 0); } @Override public int getRow() throws SQLException { return rowsFetched; } @Override public boolean isClosed() { return isClosed; } }
HIVE-14991: JDBC result set iterator has useless DEBUG log (Prasanth Jayachandran reviewed by Vaibhav Gumashta)
jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
HIVE-14991: JDBC result set iterator has useless DEBUG log (Prasanth Jayachandran reviewed by Vaibhav Gumashta)
<ide><path>dbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java <ide> fetchedRowsItr = fetchedRows.iterator(); <ide> } <ide> <del> String rowStr = ""; <ide> if (fetchedRowsItr.hasNext()) { <ide> row = fetchedRowsItr.next(); <ide> } else { <ide> } <ide> <ide> rowsFetched++; <del> if (LOG.isDebugEnabled()) { <del> LOG.debug("Fetched row string: " + rowStr); <del> } <del> <ide> } catch (SQLException eS) { <ide> throw eS; <ide> } catch (Exception ex) {
Java
mit
cf78ea921b0ca84f08dbe3bebf147399fef554f1
0
CrypticStorm/LZLib
/* * Copyright (c) 2015 Legend Zero LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.legendzero.lzlib.gui; import org.bukkit.entity.Player; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import java.util.function.Consumer; import java.util.function.Function; public class GuiContents { private final Plugin plugin; private final InventoryType type; private final String name; private final int size; private final Consumer<? super Player> consumer; private GuiContents parent; private final GuiItem[] itemStackFunctions; private final GuiClickHandler[] clickHandlers; public GuiContents(Plugin plugin, InventoryType type, String name, int size, Consumer<? super Player> consumer) { this.plugin = plugin; this.type = type; this.name = name; this.size = size; this.consumer = consumer; this.parent = null; this.itemStackFunctions = new GuiItem[this.size]; this.clickHandlers = new GuiClickHandler[this.size]; } public GuiContents(Plugin plugin, InventoryType type, String name, int size) { this(plugin, type, name, size, null); } public Plugin getPlugin() { return this.plugin; } public InventoryType getType() { return this.type; } public String getName() { return this.name; } public int getSize() { return this.size; } public Consumer<? super Player> getConsumer() { return this.consumer; } public GuiContents getParent() { return this.parent; } public boolean hasItem(int slot) { return this.itemStackFunctions[slot] != null; } public boolean hasAction(int slot) { return this.clickHandlers[slot] != null; } public ItemStack[] getItems(Player player) { ItemStack[] contents = new ItemStack[this.size]; for (int i = 0; i < this.size; i++) { contents[i] = this.itemStackFunctions[i].apply(player); } return contents; } public void setParent(GuiContents parent) { this.parent = parent; } public void set(int slot, GuiItem item) { this.itemStackFunctions[slot] = item; } public void set(int slot, GuiClickHandler clickHandler) { this.clickHandlers[slot] = clickHandler; } public void set(int slot, GuiItem item, GuiClickHandler clickHandler) { this.set(slot, item); this.set(slot, clickHandler); } public void fillEmpty(GuiItem item) { for (int i = 0; i < this.size; i++) { if (this.itemStackFunctions[i] == null) { this.itemStackFunctions[i] = item; } } } public void onClick(InventoryClickEvent event) { GuiClickHandler clickHandler = this.clickHandlers[event.getSlot()]; if (clickHandler != null) { clickHandler.accept(event); } } public void open(Player player) { InventoryHolder inventoryHolder = new GuiInventoryHolder(this, player); Inventory inventory = inventoryHolder.getInventory(); player.openInventory(inventory); } public void update(Inventory inventory, Player player) { this.consumer.accept(player); inventory.setContents(this.getItems(player)); } public void back(Player player) { if (this.parent != null) { this.parent.open(player); } } public void close(Player player) { player.closeInventory(); } }
src/main/java/com/legendzero/lzlib/gui/GuiContents.java
/* * Copyright (c) 2015 Legend Zero LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.legendzero.lzlib.gui; import org.bukkit.entity.Player; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryHolder; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import java.util.function.Function; public class GuiContents { private final Plugin plugin; private final InventoryType type; private final String name; private final int size; private GuiContents parent; private final GuiItem[] itemStackFunctions; private final GuiClickHandler[] clickHandlers; public GuiContents(Plugin plugin, InventoryType type, String name, int size) { this.plugin = plugin; this.type = type; this.name = name; this.size = size; this.parent = null; this.itemStackFunctions = new GuiItem[this.size]; this.clickHandlers = new GuiClickHandler[this.size]; } public Plugin getPlugin() { return this.plugin; } public InventoryType getType() { return this.type; } public String getName() { return this.name; } public int getSize() { return this.size; } public GuiContents getParent() { return this.parent; } public boolean hasItem(int slot) { return this.itemStackFunctions[slot] != null; } public boolean hasAction(int slot) { return this.clickHandlers[slot] != null; } public ItemStack[] getItems(Player player) { ItemStack[] contents = new ItemStack[this.size]; for (int i = 0; i < this.size; i++) { contents[i] = this.itemStackFunctions[i].apply(player); } return contents; } public void setParent(GuiContents parent) { this.parent = parent; } public void set(int slot, GuiItem item) { this.itemStackFunctions[slot] = item; } public void set(int slot, GuiClickHandler clickHandler) { this.clickHandlers[slot] = clickHandler; } public void set(int slot, GuiItem item, GuiClickHandler clickHandler) { this.set(slot, item); this.set(slot, clickHandler); } public void fillEmpty(GuiItem item) { for (int i = 0; i < this.size; i++) { if (this.itemStackFunctions[i] == null) { this.itemStackFunctions[i] = item; } } } public void onClick(InventoryClickEvent event) { GuiClickHandler clickHandler = this.clickHandlers[event.getSlot()]; if (clickHandler != null) { clickHandler.accept(event); } } public void open(Player player) { InventoryHolder inventoryHolder = new GuiInventoryHolder(this, player); Inventory inventory = inventoryHolder.getInventory(); player.openInventory(inventory); } public void update(Inventory inventory, Player player) { inventory.setContents(this.getItems(player)); } public void back(Player player) { if (this.parent != null) { this.parent.open(player); } } public void close(Player player) { player.closeInventory(); } }
Add Consumer to GuiContents for pre-processing of actions before updating contents.
src/main/java/com/legendzero/lzlib/gui/GuiContents.java
Add Consumer to GuiContents for pre-processing of actions before updating contents.
<ide><path>rc/main/java/com/legendzero/lzlib/gui/GuiContents.java <ide> import org.bukkit.inventory.ItemStack; <ide> import org.bukkit.plugin.Plugin; <ide> <add>import java.util.function.Consumer; <ide> import java.util.function.Function; <ide> <ide> public class GuiContents { <ide> private final InventoryType type; <ide> private final String name; <ide> private final int size; <add> private final Consumer<? super Player> consumer; <ide> private GuiContents parent; <ide> private final GuiItem[] itemStackFunctions; <ide> private final GuiClickHandler[] clickHandlers; <ide> <del> public GuiContents(Plugin plugin, InventoryType type, String name, int size) { <add> public GuiContents(Plugin plugin, InventoryType type, String name, int size, Consumer<? super Player> consumer) { <ide> this.plugin = plugin; <ide> this.type = type; <ide> this.name = name; <ide> this.size = size; <add> this.consumer = consumer; <ide> this.parent = null; <ide> this.itemStackFunctions = new GuiItem[this.size]; <ide> this.clickHandlers = new GuiClickHandler[this.size]; <add> } <add> <add> public GuiContents(Plugin plugin, InventoryType type, String name, int size) { <add> this(plugin, type, name, size, null); <ide> } <ide> <ide> public Plugin getPlugin() { <ide> <ide> public int getSize() { <ide> return this.size; <add> } <add> <add> public Consumer<? super Player> getConsumer() { <add> return this.consumer; <ide> } <ide> <ide> public GuiContents getParent() { <ide> clickHandler.accept(event); <ide> } <ide> } <add> <ide> public void open(Player player) { <ide> InventoryHolder inventoryHolder = new GuiInventoryHolder(this, player); <ide> Inventory inventory = inventoryHolder.getInventory(); <ide> } <ide> <ide> public void update(Inventory inventory, Player player) { <add> this.consumer.accept(player); <ide> inventory.setContents(this.getItems(player)); <ide> } <ide>
Java
epl-1.0
b3038260984f50c079bd99e0053bd067e8ddd1bf
0
jtrfp/terminal-recall,jtrfp/terminal-recall,jtrfp/terminal-recall
/******************************************************************************* * This file is part of TERMINAL RECALL * Copyright (c) 2012, 2013 Chuck Ritola. * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the COPYING and CREDITS files for more details. * * Contributors: * chuck - initial API and implementation ******************************************************************************/ package org.jtrfp.trcl.obj; import java.util.concurrent.Future; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.DummyFuture; import org.jtrfp.trcl.Model; import org.jtrfp.trcl.PrimitiveList; import org.jtrfp.trcl.RenderMode; import org.jtrfp.trcl.SelectableTexture; import org.jtrfp.trcl.Texture; import org.jtrfp.trcl.TextureDescription; import org.jtrfp.trcl.Tickable; import org.jtrfp.trcl.Triangle; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.file.TNLFile.Segment; import org.jtrfp.trcl.file.TNLFile.Segment.FlickerLightType; import org.jtrfp.trcl.math.IntRandomTransferFunction; public class TunnelSegment extends WorldObject { public static final int TUNNEL_DIA_SCALAR=128; public static final int TUNNEL_SEG_LEN=65535; Segment segment; private final double segmentLength; private final double endX,endY; public TunnelSegment(TR tr, Segment s, Future<TextureDescription>[] tunnelTexturePalette, double segLen, double endX, double endY) { super(tr, createModel(s,segLen, tunnelTexturePalette,endX,endY,tr)); segmentLength=segLen; this.endX=endX; this.endY=endY; this.segment=s; } public static double getStartWidth(Segment s) {return TR.legacy2Modern(s.getStartWidth()*TUNNEL_DIA_SCALAR*3);} public static double getEndWidth(Segment s) {return TR.legacy2Modern(s.getEndWidth()*TUNNEL_DIA_SCALAR*3);} public static double getStartHeight(Segment s) {return TR.legacy2Modern(s.getStartHeight()*TUNNEL_DIA_SCALAR*3);} public static double getEndHeight(Segment s) {return TR.legacy2Modern(s.getEndHeight()*TUNNEL_DIA_SCALAR*3);} private static final IntRandomTransferFunction flickerRandom = new IntRandomTransferFunction(); private static Model createModel(Segment s,double segLen, Future<TextureDescription>[] tunnelTexturePalette, double endX,double endY, final TR tr) { Model m = new Model(false,tr); final int numPolys=s.getNumPolygons(); double startWidth=getStartWidth(s); double startHeight=getStartHeight(s); double endWidth=getEndWidth(s); double endHeight=getEndHeight(s); //TODO: x,y, rotation double startAngle1=((double)s.getStartAngle1()/65535.)*2.*Math.PI; double startAngle2=((double)s.getStartAngle2()/65535.)*2.*Math.PI; double startAngle=startAngle1; final double endAngle1=((double)s.getEndAngle1()/65535.)*2.*Math.PI; final double endAngle2=((double)s.getEndAngle2()/65535.)*2.*Math.PI; double endAngle=endAngle1; final double dAngleStart=(startAngle2-startAngle1)/(double)numPolys; final double dAngleEnd=(endAngle2-endAngle1)/(double)numPolys; final double startX=0; final double startY=0; final double zStart=0; final double zEnd=segLen; final int numPolygonsMinusOne=s.getNumPolygons()-1; final int lightPoly = s.getLightPolygon(); final double [] u=new double[4]; final double [] v=new double[4]; u[0]=0;u[1]=0;u[2]=1;u[3]=1; v[0]=0;v[1]=1; v[2]=1;v[3]=0; //Poly quads for(int pi=0; pi<numPolygonsMinusOne; pi++){ Vector3D p0=segPoint(startAngle,zStart,startWidth,startHeight,startX,startY); Vector3D p1=segPoint(endAngle,zEnd,endWidth,endHeight,endX,endY); Vector3D p2=segPoint(endAngle+dAngleEnd,zEnd,endWidth,endHeight,endX,endY); Vector3D p3=segPoint(startAngle+dAngleStart,zStart,startWidth,startHeight,startX,startY); Future<TextureDescription> tex = tunnelTexturePalette[s.getPolyTextureIndices().get(pi)]; final FlickerLightType flt=s.getFlickerLightType(); if(pi==lightPoly&&flt!=FlickerLightType.noLight){ try{ final Texture t = (Texture)tex.get(); @SuppressWarnings("unchecked") Future<Texture> [] frames = new Future[] {//TODO: Figure out why dummies must be added new DummyFuture<Texture>(new Texture(t,0,.5,.5,.5)),//ON new DummyFuture<Texture>(new Texture(t,.505,.5,.501,.5)),//OFF new DummyFuture<Texture>(new Texture(t,0,0,0,0)),//DUMMY new DummyFuture<Texture>(new Texture(t,0,0,0,0))//DUMMY }; final SelectableTexture st=new SelectableTexture(frames); tex = new DummyFuture<TextureDescription>(st); final int flickerThresh= flt==FlickerLightType.off1p5Sec?(int)(-.3*(double)Integer.MAX_VALUE): flt==FlickerLightType.on1p5Sec?(int)(.4*(double)Integer.MAX_VALUE): flt==FlickerLightType.on1Sec?(int)(.25*(double)Integer.MAX_VALUE):Integer.MAX_VALUE; PrimitiveList.animators.add(new Tickable(){ @Override public void tick() { if(flickerRandom.transfer(Math.abs((int)System.currentTimeMillis()))>flickerThresh)st.setFrame(1); else st.setFrame(0); } }); }catch(Exception e){e.printStackTrace();} }else{}//No light m.addTriangles(Triangle.quad2Triangles( new double[]{p0.getX(),p1.getX(),p2.getX(),p3.getX()}, new double[]{p0.getY(),p1.getY(),p2.getY(),p3.getY()}, new double[]{p0.getZ(),p1.getZ(),p2.getZ(),p3.getZ()}, u, v, tex, RenderMode.DYNAMIC, new Vector3D [] { new Vector3D( -Math.cos(startAngle), -Math.sin(startAngle),0), new Vector3D( -Math.cos(endAngle), -Math.sin(endAngle),0), new Vector3D( -Math.cos(endAngle+dAngleEnd), -Math.sin(endAngle+dAngleEnd),0), new Vector3D( -Math.cos(startAngle+dAngleStart), -Math.sin(startAngle+dAngleStart),0) } )); startAngle+=dAngleStart; endAngle+=dAngleEnd; }//for(polygons) //The slice quad Vector3D p0=segPoint(startAngle,zStart,startWidth,startHeight,startX,startY); Vector3D p1=segPoint(endAngle,zEnd,endWidth,endHeight,endX,endY); Vector3D p2=segPoint(endAngle1,zEnd,endWidth,endHeight,endX,endY); Vector3D p3=segPoint(startAngle1,zStart,startWidth,startHeight,startX,startY); m.addTriangles(Triangle.quad2Triangles( new double[]{p0.getX(),p1.getX(),p2.getX(),p3.getX()}, new double[]{p0.getY(),p1.getY(),p2.getY(),p3.getY()}, new double[]{p0.getZ(),p1.getZ(),p2.getZ(),p3.getZ()}, new double[]{0,0,1,1}, new double[]{0,1,1,0}, tunnelTexturePalette[s.getPolyTextureIndices().get(numPolygonsMinusOne)], RenderMode.DYNAMIC, new Vector3D [] { new Vector3D( -Math.cos(startAngle), -Math.sin(startAngle),0), new Vector3D( -Math.cos(endAngle), -Math.sin(endAngle),0), new Vector3D( -Math.cos(endAngle1), -Math.sin(endAngle1),0), new Vector3D( -Math.cos(startAngle1), -Math.sin(startAngle1),0) })); return m.finalizeModel(); } private static Vector3D segPoint(double angle, double z, double w, double h, double x, double y) {return new Vector3D(Math.cos(angle)*w+x,Math.sin(angle)*h+y,z);} public Segment getSegmentData(){return segment;} public double getSegmentLength(){return segmentLength;} public double getEndX(){return endX;} public double getEndY(){return endY;} }//end TunnelSegment
src/main/java/org/jtrfp/trcl/obj/TunnelSegment.java
/******************************************************************************* * This file is part of TERMINAL RECALL * Copyright (c) 2012, 2013 Chuck Ritola. * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Public License v3.0 * which accompanies this distribution, and is available at * http://www.gnu.org/licenses/gpl.html * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the COPYING and CREDITS files for more details. * * Contributors: * chuck - initial API and implementation ******************************************************************************/ package org.jtrfp.trcl.obj; import java.util.concurrent.Future; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.DummyFuture; import org.jtrfp.trcl.Model; import org.jtrfp.trcl.PrimitiveList; import org.jtrfp.trcl.RenderMode; import org.jtrfp.trcl.SelectableTexture; import org.jtrfp.trcl.Texture; import org.jtrfp.trcl.TextureDescription; import org.jtrfp.trcl.Tickable; import org.jtrfp.trcl.Triangle; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.file.TNLFile.Segment; import org.jtrfp.trcl.file.TNLFile.Segment.FlickerLightType; import org.jtrfp.trcl.math.IntRandomTransferFunction; public class TunnelSegment extends WorldObject { public static final int TUNNEL_DIA_SCALAR=128; public static final int TUNNEL_SEG_LEN=65535; Segment segment; private final double segmentLength; private final double endX,endY; public TunnelSegment(TR tr, Segment s, Future<TextureDescription>[] tunnelTexturePalette, double segLen, double endX, double endY) { super(tr, createModel(s,segLen, tunnelTexturePalette,endX,endY,tr)); segmentLength=segLen; this.endX=endX; this.endY=endY; this.segment=s; } public static double getStartWidth(Segment s) {return TR.legacy2Modern(s.getStartWidth()*TUNNEL_DIA_SCALAR*3);} public static double getEndWidth(Segment s) {return TR.legacy2Modern(s.getEndWidth()*TUNNEL_DIA_SCALAR*3);} public static double getStartHeight(Segment s) {return TR.legacy2Modern(s.getStartHeight()*TUNNEL_DIA_SCALAR*3);} public static double getEndHeight(Segment s) {return TR.legacy2Modern(s.getEndHeight()*TUNNEL_DIA_SCALAR*3);} private static final IntRandomTransferFunction flickerRandom = new IntRandomTransferFunction(); private static Model createModel(Segment s,double segLen, Future<TextureDescription>[] tunnelTexturePalette, double endX,double endY, final TR tr) { Model m = new Model(false,tr); final int numPolys=s.getNumPolygons(); double startWidth=getStartWidth(s); double startHeight=getStartHeight(s); double endWidth=getEndWidth(s); double endHeight=getEndHeight(s); //TODO: x,y, rotation double startAngle1=((double)s.getStartAngle1()/65535.)*2.*Math.PI; double startAngle2=((double)s.getStartAngle2()/65535.)*2.*Math.PI; double startAngle=startAngle1; final double endAngle1=((double)s.getEndAngle1()/65535.)*2.*Math.PI; final double endAngle2=((double)s.getEndAngle2()/65535.)*2.*Math.PI; double endAngle=endAngle1; final double dAngleStart=(startAngle2-startAngle1)/(double)numPolys; final double dAngleEnd=(endAngle2-endAngle1)/(double)numPolys; final double startX=0; final double startY=0; final double zStart=0; final double zEnd=segLen; final int numPolygonsMinusOne=s.getNumPolygons()-1; final int lightPoly = s.getLightPolygon(); final double [] u=new double[4]; final double [] v=new double[4]; u[0]=0;u[1]=0;u[2]=1;u[3]=1; v[0]=0;v[1]=1; v[2]=1;v[3]=0; //Poly quads for(int pi=0; pi<numPolygonsMinusOne; pi++){ Vector3D p0=segPoint(startAngle,zStart,startWidth,startHeight,startX,startY); Vector3D p1=segPoint(endAngle,zEnd,endWidth,endHeight,endX,endY); Vector3D p2=segPoint(endAngle+dAngleEnd,zEnd,endWidth,endHeight,endX,endY); Vector3D p3=segPoint(startAngle+dAngleStart,zStart,startWidth,startHeight,startX,startY); Future<TextureDescription> tex = tunnelTexturePalette[s.getPolyTextureIndices().get(pi)]; final FlickerLightType flt=s.getFlickerLightType(); if(pi==lightPoly&&flt!=FlickerLightType.noLight){ try{ final Texture t = (Texture)tex.get(); @SuppressWarnings("unchecked") Future<Texture> [] frames = new Future[] {//TODO: Figure out why dummies must be added new DummyFuture<Texture>(new Texture(t,0,.5,.5,.5)),//ON new DummyFuture<Texture>(new Texture(t,.505,.5,.501,.5)),//OFF new DummyFuture<Texture>(new Texture(t,0,0,0,0)),//DUMMY new DummyFuture<Texture>(new Texture(t,0,0,0,0))//DUMMY }; final SelectableTexture st=new SelectableTexture(frames); tex = new DummyFuture<TextureDescription>(st); final int flickerThresh= flt==FlickerLightType.off1p5Sec?(int)(-.3*(double)Integer.MAX_VALUE): flt==FlickerLightType.on1p5Sec?(int)(.4*(double)Integer.MAX_VALUE): flt==FlickerLightType.on1Sec?(int)(.25*(double)Integer.MAX_VALUE):Integer.MAX_VALUE; PrimitiveList.animators.add(new Tickable(){ @Override public void tick() { if(flickerRandom.transfer(Math.abs((int)System.currentTimeMillis()))>flickerThresh)st.setFrame(1); else st.setFrame(0); } }); }catch(Exception e){e.printStackTrace();} }else{}//No light m.addTriangles(Triangle.quad2Triangles( new double[]{p0.getX(),p1.getX(),p2.getX(),p3.getX()}, new double[]{p0.getY(),p1.getY(),p2.getY(),p3.getY()}, new double[]{p0.getZ(),p1.getZ(),p2.getZ(),p3.getZ()}, u, v, tex, RenderMode.DYNAMIC,new Vector3D( -Math.cos((startAngle+endAngle)/2.), -Math.sin((startAngle+endAngle)/2.),0))); startAngle+=dAngleStart; endAngle+=dAngleEnd; }//for(polygons) //The slice quad Vector3D p0=segPoint(startAngle,zStart,startWidth,startHeight,startX,startY); Vector3D p1=segPoint(endAngle,zEnd,endWidth,endHeight,endX,endY); Vector3D p2=segPoint(endAngle1,zEnd,endWidth,endHeight,endX,endY); Vector3D p3=segPoint(startAngle1,zStart,startWidth,startHeight,startX,startY); m.addTriangles(Triangle.quad2Triangles( new double[]{p0.getX(),p1.getX(),p2.getX(),p3.getX()}, new double[]{p0.getY(),p1.getY(),p2.getY(),p3.getY()}, new double[]{p0.getZ(),p1.getZ(),p2.getZ(),p3.getZ()}, new double[]{0,0,1,1}, new double[]{0,1,1,0}, tunnelTexturePalette[s.getPolyTextureIndices().get(numPolygonsMinusOne)], RenderMode.DYNAMIC,new Vector3D( -Math.cos((startAngle+endAngle)/2.), -Math.sin((startAngle+endAngle)/2.),0))); return m.finalizeModel(); } private static Vector3D segPoint(double angle, double z, double w, double h, double x, double y) {return new Vector3D(Math.cos(angle)*w+x,Math.sin(angle)*h+y,z);} public Segment getSegmentData(){return segment;} public double getSegmentLength(){return segmentLength;} public double getEndX(){return endX;} public double getEndY(){return endY;} }//end TunnelSegment
Added vertex normal support to tunnel segments.
src/main/java/org/jtrfp/trcl/obj/TunnelSegment.java
Added vertex normal support to tunnel segments.
<ide><path>rc/main/java/org/jtrfp/trcl/obj/TunnelSegment.java <ide> <ide> u, <ide> v, <del> tex, RenderMode.DYNAMIC,new Vector3D( <del> -Math.cos((startAngle+endAngle)/2.), <del> -Math.sin((startAngle+endAngle)/2.),0))); <add> tex, RenderMode.DYNAMIC, <add> new Vector3D [] { <add> new Vector3D( <add> -Math.cos(startAngle), <add> -Math.sin(startAngle),0), <add> new Vector3D( <add> -Math.cos(endAngle), <add> -Math.sin(endAngle),0), <add> new Vector3D( <add> -Math.cos(endAngle+dAngleEnd), <add> -Math.sin(endAngle+dAngleEnd),0), <add> new Vector3D( <add> -Math.cos(startAngle+dAngleStart), <add> -Math.sin(startAngle+dAngleStart),0) <add> } <add> )); <ide> startAngle+=dAngleStart; <ide> endAngle+=dAngleEnd; <ide> }//for(polygons) <ide> new double[]{0,0,1,1}, <ide> new double[]{0,1,1,0}, <ide> tunnelTexturePalette[s.getPolyTextureIndices().get(numPolygonsMinusOne)], <del> RenderMode.DYNAMIC,new Vector3D( <del> -Math.cos((startAngle+endAngle)/2.), <del> -Math.sin((startAngle+endAngle)/2.),0))); <add> RenderMode.DYNAMIC, <add> new Vector3D [] { <add> new Vector3D( <add> -Math.cos(startAngle), <add> -Math.sin(startAngle),0), <add> new Vector3D( <add> -Math.cos(endAngle), <add> -Math.sin(endAngle),0), <add> new Vector3D( <add> -Math.cos(endAngle1), <add> -Math.sin(endAngle1),0), <add> new Vector3D( <add> -Math.cos(startAngle1), <add> -Math.sin(startAngle1),0) <add> })); <ide> <ide> return m.finalizeModel(); <ide> }
JavaScript
agpl-3.0
f99cbc7a79d36d8b14175417ff299ae18a0f2b8d
0
media-centre/makenews,media-centre/makenews,media-centre/makenews,media-centre/makenews
/* eslint max-nested-callbacks: [2, 5] max-len:0*/ "use strict"; import RssParser from "../../src/rss/RssParser"; import HttpResponseHandler from "../../../common/src/HttpResponseHandler.js"; import CryptUtil from "../../src/util/CryptUtil.js"; import LogTestHelper from "../helpers/LogTestHelper"; import Logger from "../../src/logging/Logger"; import { expect } from "chai"; import nock from "nock"; import restRequest from "request"; import sinon from "sinon"; describe("RssParser", () => { let sandbox = null; before("RssParser", () => { sinon.stub(Logger, "instance").returns(LogTestHelper.instance()); }); after("RssParser", () => { Logger.instance.restore(); }); beforeEach("RssParser", () => { sandbox = sinon.sandbox.create(); }); afterEach("RssParser", () => { sandbox.restore(); }); it("should reject if the url is not a feed", (done) => { let data = `<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8"> <TITLE>302 Moved</TITLE></HEAD><BODY> <H1>302 Moved</H1> The document has moved <A HREF="http://www.google.co.in/?gfe_rd=cr&amp;ei=h91eVqj4N-my8wexop6oAg">here</A>. </BODY></HTML>`; nock("http://www.google.com") .get("/users") .reply(HttpResponseHandler.codes.OK, data); let url = "http://www.google.com/users"; restRequest(url).on("response", function(res) { let rssParser = new RssParser(res); rssParser.parse().catch((error) => { expect(error).to.eq("Not a feed"); done(); }); }); }); it("should resolve with parsed items for proper url", (done) => { let data = `<?xml version="1.0" encoding="utf-8" ?> <rss version="2.0" xml:base="http://www.nasa.gov/" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:media="http://search.yahoo.com/mrss/"> <channel> <item> <title>NASA Administrator Remembers Apollo-Era Astronaut Edgar Mitchell</title> <link>http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell</link> <description>The following is a statement from NASA Administrator Charles Bolden on the passing of NASA astronaut Edgar Mitchell:</description> </item> <item> <title>NASA Television to Air Russian Spacewalk</title> <link>http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk</link> <description>NASA Television will broadcast live coverage of a 5.5-hour spacewalk by two Russian cosmonauts aboard the International Space Station beginning at 7:30 a.m. EST Wednesday, Feb. 3.</description> </item> </channel> </rss>`; let url = "http://www.nasa.com/images/?service=rss"; nock("http://www.nasa.com/images") .get("/?service=rss") .reply(HttpResponseHandler.codes.OK, data); let hmacStub = sandbox.stub(CryptUtil, "hmac"); hmacStub.withArgs("sha256", "appSecretKey", "hex", "http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell").returns("test-guid-1"); hmacStub.withArgs("sha256", "appSecretKey", "hex", "http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk").returns("test-guid-2"); let expectedFeeds = { "items": [{ "guid": "test-guid-1", "title": "NASA Administrator Remembers Apollo-Era Astronaut Edgar Mitchell", "link": "http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell", "description": "The following is a statement from NASA Administrator Charles Bolden on the passing of NASA astronaut Edgar Mitchell:", "pubDate": null, "enclosures": [], "image": {} }, { "guid": "test-guid-2", "title": "NASA Television to Air Russian Spacewalk", "link": "http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk", "description": "NASA Television will broadcast live coverage of a 5.5-hour spacewalk by two Russian cosmonauts aboard the International Space Station beginning at 7:30 a.m. EST Wednesday, Feb. 3.", "pubDate": null, "enclosures": [], "image": {} }] }; restRequest(url).on("response", function(res) { let rssParser = new RssParser(res); rssParser.parse().then((feedJson) => { expect(feedJson.items).deep.equal(expectedFeeds.items); done(); }); }); }); });
server/test/parsers/RssParserSpec.js
/* eslint max-nested-callbacks: [2, 5] max-len:0*/ "use strict"; import RssParser from "../../src/rss/RssParser"; import HttpResponseHandler from "../../../common/src/HttpResponseHandler.js"; import CryptUtil from "../../src/util/CryptUtil.js"; import LogTestHelper from "../helpers/LogTestHelper"; import Logger from "../../src/logging/Logger"; import { expect } from "chai"; import nock from "nock"; import restRequest from "request"; import sinon from "sinon"; describe("RssParser", () => { let sandbox = null; before("RssParser", () => { sinon.stub(Logger, "instance").returns(LogTestHelper.instance()); }); after("RssParser", () => { Logger.instance.restore(); }); beforeEach("RssParser", () => { sandbox = sinon.sandbox.create(); }); afterEach("RssParser", () => { sandbox.restore(); }); it("should reject if the url is not a feed", (done) => { let data = `<HTML><HEAD><meta http-equiv="content-type" content="text/html;charset=utf-8"> <TITLE>302 Moved</TITLE></HEAD><BODY> <H1>302 Moved</H1> The document has moved <A HREF="http://www.google.co.in/?gfe_rd=cr&amp;ei=h91eVqj4N-my8wexop6oAg">here</A>. </BODY></HTML>`; nock("http://www.google.com") .get("/users") .reply(HttpResponseHandler.codes.OK, data); let url = "http://www.google.com/users"; restRequest(url).on("response", function(res) { let rssParser = new RssParser(res); rssParser.parse().catch((error) => { expect(error).to.eq("Not a feed"); done(); }); }); }); it("should resolve with parsed items for proper url", (done) => { let data = `<?xml version="1.0" encoding="utf-8" ?> <rss version="2.0" xml:base="http://www.nasa.gov/" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:media="http://search.yahoo.com/mrss/"> <channel> <item> <title>NASA Administrator Remembers Apollo-Era Astronaut Edgar Mitchell</title> <link>http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell</link> <description>The following is a statement from NASA Administrator Charles Bolden on the passing of NASA astronaut Edgar Mitchell:</description> </item> <item> <title>NASA Television to Air Russian Spacewalk</title> <link>http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk</link> <description>NASA Television will broadcast live coverage of a 5.5-hour spacewalk by two Russian cosmonauts aboard the International Space Station beginning at 7:30 a.m. EST Wednesday, Feb. 3.</description> </item> </channel> </rss>`; let url = "http://www.nasa.com/images/?service=rss"; nock("http://www.nasa.com/images") .get("/?service=rss") .reply(HttpResponseHandler.codes.OK, data); let hmacStub = sandbox.stub(CryptUtil, "hmac"); hmacStub.withArgs("sha256", "appSecretKey", "hex", "http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell").returns("test-guid-1"); hmacStub.withArgs("sha256", "appSecretKey", "hex", "http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk").returns("test-guid-2"); let expectedFeeds = { "items": [{ "guid": "test-guid-1", "title": "NASA Administrator Remembers Apollo-Era Astronaut Edgar Mitchell", "link": "http://www.nasa.gov/press-release/nasa-administrator-remembers-apollo-era-astronaut-edgar-mitchell", "description": "The following is a statement from NASA Administrator Charles Bolden on the passing of NASA astronaut Edgar Mitchell:", "pubDate": null, "enclosures": [], "image": {} }, { "guid": "test-guid-2", "title": "NASA Television to Air Russian Spacewalk", "link": "http://www.nasa.gov/press-release/nasa-television-to-air-russian-spacewalk", "description": "NASA Television will broadcast live coverage of a 5.5-hour spacewalk by two Russian cosmonauts aboard the International Space Station beginning at 7:30 a.m. EST Wednesday, Feb. 3.", "pubDate": null, "enclosures": [], "image": {} }] }; restRequest(url).on("response", function(res) { let rssParser = new RssParser(res); rssParser.parse().then((feedJson) => { expect(feedJson.items).deep.equal(expectedFeeds.items); done(); }).catch(error => { console.log(error); }); }); }); });
[Vik] fixed eslint
server/test/parsers/RssParserSpec.js
[Vik] fixed eslint
<ide><path>erver/test/parsers/RssParserSpec.js <ide> rssParser.parse().then((feedJson) => { <ide> expect(feedJson.items).deep.equal(expectedFeeds.items); <ide> done(); <del> }).catch(error => { <del> console.log(error); <ide> }); <ide> }); <ide> });
Java
apache-2.0
380ef3155142983dd5a74a139e2673f5bfc0e5f3
0
datacite/mds,ulbricht/mds,ulbricht/mds,datacite/mds,ulbricht/mds,datacite/mds,datacite/mds,ulbricht/mds,datacite/mds
package org.datacite.mds.util; import java.net.IDN; import java.net.URL; import javax.validation.ConstraintValidatorContext; import org.apache.commons.validator.UrlValidator; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; /** * Util class with validation related static methods. */ public class ValidationUtils { /** * shortcut for building a constraint violation attached to a specific node * * @param context * ConstraintValidatorContext * @param message * message of the violation (e.g. template to be evaluated) * @param node * node to attach the violation to */ public static void addConstraintViolation(ConstraintValidatorContext context, String message, String node) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(message).addNode(node).addConstraintViolation(); } /** * shortcut for building a constraint violation * * @param context * ConstraintValidatorContext * @param message * message of the violation (e.g. template to be evaluated) */ public static void addConstraintViolation(ConstraintValidatorContext context, String message) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); } /** * <p> * Copy a validation error from a field to another field. This makes * sense for fields not rendered but might cause a validation error * (typically a assertTrue Annotation on method level) * </p> * * <p> * If there is no error in the specified field, this method does nothing. * </p> * * @param result * binding result * @param fromfield * name of field to be copied from * @param toField * name of field to be copied to */ public static void copyFieldErrorToField(BindingResult result, String fromField, String toField) { FieldError fieldError = result.getFieldError(fromField); if (fieldError != null) { FieldError newError = new FieldError(fieldError.getObjectName(), toField, fieldError.getDefaultMessage()); result.addError(newError); } } /** * * validation method to check a string, if it is a valid hostname. E.g. it * must not contain a path, port or schema. * * @param str * String to be checked * @return true if the given string is a valid hostname, false otherwise */ public static boolean isHostname(String str) { try { URL url = new URL("http://" + str); if (!url.getHost().equals(str)) { // domain should only consists of the pure host name return false; } str = IDN.toASCII(str); // convert international domain names (IDN) if (str.matches(".*\\.xn--[^.]*$")) { // UrlValidator doesn't handle top level IDNs // so we add .org if necessary str += ".org"; } UrlValidator urlValidator = new UrlValidator(); if (!urlValidator.isValid("http://" + str)) { // url should be valid, e.g. "test.t" or "com" should be fail return false; } } catch (Exception ex) { // url should be well formed return false; } return true; } }
src/main/java/org/datacite/mds/util/ValidationUtils.java
package org.datacite.mds.util; import java.net.IDN; import java.net.URL; import javax.validation.ConstraintValidatorContext; import org.apache.commons.validator.UrlValidator; import org.springframework.validation.BindingResult; import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; /** * Util class with validation related static methods. */ public class ValidationUtils { /** * shortcut for building a constraint violation attached to a specific node * * @param context * ConstraintValidatorContext * @param message * message of the violation (e.g. template to be evaluated) * @param node * node to attach the violation to */ public static void addConstraintViolation(ConstraintValidatorContext context, String message, String node) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(message).addNode(node).addConstraintViolation(); } /** * shortcut for building a constraint violation * * @param context * ConstraintValidatorContext * @param message * message of the violation (e.g. template to be evaluated) */ public static void addConstraintViolation(ConstraintValidatorContext context, String message) { context.disableDefaultConstraintViolation(); context.buildConstraintViolationWithTemplate(message).addConstraintViolation(); } /** * <p> * Copy a validation error from a field to the object itself. This makes * sense for fields not rendered but might cause a validation error * (typically a assertTrue Annotation on method level) * </p> * * <p> * If there is no error in the specified field, this method does nothing. * </p> * * @param result * binding result * @param field * name of field to be copied to object level */ public static void copyFieldErrorToObject(BindingResult result, String field) { FieldError fieldError = result.getFieldError(field); if (fieldError != null) { ObjectError error = new ObjectError(fieldError.getObjectName(), fieldError.getDefaultMessage()); result.addError(error); } } public static void copyFieldErrorToField(BindingResult result, String fromField, String toField) { FieldError fieldError = result.getFieldError(fromField); if (fieldError != null) { FieldError newError = new FieldError(fieldError.getObjectName(), toField, fieldError.getDefaultMessage()); result.addError(newError); } } /** * * validation method to check a string, if it is a valid hostname. E.g. it * must not contain a path, port or schema. * * @param str * String to be checked * @return true if the given string is a valid hostname, false otherwise */ public static boolean isHostname(String str) { try { URL url = new URL("http://" + str); if (!url.getHost().equals(str)) { // domain should only consists of the pure host name return false; } str = IDN.toASCII(str); // convert international domain names (IDN) if (str.matches(".*\\.xn--[^.]*$")) { // UrlValidator doesn't handle top level IDNs // so we add .org if necessary str += ".org"; } UrlValidator urlValidator = new UrlValidator(); if (!urlValidator.isValid("http://" + str)) { // url should be valid, e.g. "test.t" or "com" should be fail return false; } } catch (Exception ex) { // url should be well formed return false; } return true; } }
remove unsused method ValidationUtils.copyFieldErrorToObject
src/main/java/org/datacite/mds/util/ValidationUtils.java
remove unsused method ValidationUtils.copyFieldErrorToObject
<ide><path>rc/main/java/org/datacite/mds/util/ValidationUtils.java <ide> <ide> /** <ide> * <p> <del> * Copy a validation error from a field to the object itself. This makes <add> * Copy a validation error from a field to another field. This makes <ide> * sense for fields not rendered but might cause a validation error <ide> * (typically a assertTrue Annotation on method level) <ide> * </p> <ide> * <ide> * @param result <ide> * binding result <del> * @param field <del> * name of field to be copied to object level <add> * @param fromfield <add> * name of field to be copied from <add> * @param toField <add> * name of field to be copied to <ide> */ <del> public static void copyFieldErrorToObject(BindingResult result, String field) { <del> FieldError fieldError = result.getFieldError(field); <del> if (fieldError != null) { <del> ObjectError error = new ObjectError(fieldError.getObjectName(), fieldError.getDefaultMessage()); <del> result.addError(error); <del> } <del> } <del> <ide> public static void copyFieldErrorToField(BindingResult result, String fromField, String toField) { <ide> FieldError fieldError = result.getFieldError(fromField); <ide> if (fieldError != null) {
Java
mit
4dc2cbdd261177735d905effdf154d58f6e0925d
0
koustuvsinha/benchmarker
package com.koustuvsinha.benchmarker.views; import android.app.Activity; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.koustuvsinha.benchmarker.R; import com.koustuvsinha.benchmarker.adaptors.DbResultAdaptor; import com.koustuvsinha.benchmarker.models.DbResultModel; import com.koustuvsinha.benchmarker.services.DbTestResultsReceiverService; import com.koustuvsinha.benchmarker.utils.BusProvider; import com.squareup.otto.Subscribe; import jp.wasabeef.recyclerview.animators.SlideInLeftAnimator; public class DbTestResultDetails extends Fragment { private DbTestResultsReceiverService testResultsReceiver; private RecyclerView mRecyclerView; private DbResultAdaptor mAdapter; private RecyclerView.LayoutManager mLayoutManager; private OnFragmentInteractionListener mListener; public static final String PAGE_NAME = "Logs"; public static DbTestResultDetails newInstance() { DbTestResultDetails fragment = new DbTestResultDetails(); return fragment; } public DbTestResultDetails() { // Required empty public constructor } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); BusProvider.getInstance().getBus().register(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View v = inflater.inflate(R.layout.fragment_db_test_result_details, container, false); mRecyclerView = (RecyclerView) v.findViewById(R.id.resultListView); //mRecyclerView.setHasFixedSize(true); mRecyclerView.setItemAnimator(new SlideInLeftAnimator()); mRecyclerView.getItemAnimator().setAddDuration(500); mLayoutManager = new LinearLayoutManager(getActivity()); mRecyclerView.setLayoutManager(mLayoutManager); mAdapter = new DbResultAdaptor(); mRecyclerView.setAdapter(mAdapter); mAdapter.setResults(new DbResultModel("Starting Application...")); return v; } @Subscribe public void updateResultList(DbResultModel result) { mAdapter.setResults(result); mAdapter.notifyDataSetChanged(); mRecyclerView.scrollToPosition(mAdapter.getItemCount()-1); } // TODO: Rename method, update argument and hook method into UI event public void onButtonPressed(Uri uri) { if (mListener != null) { mListener.onFragmentInteraction(uri); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnFragmentInteractionListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnFragmentInteractionListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } /** * This interface must be implemented by activities that contain this * fragment to allow an interaction in this fragment to be communicated * to the activity and potentially other fragments contained in that * activity. * <p/> * See the Android Training lesson <a href= * "http://developer.android.com/training/basics/fragments/communicating.html" * >Communicating with Other Fragments</a> for more information. */ public interface OnFragmentInteractionListener { // TODO: Update argument type and name public void onFragmentInteraction(Uri uri); } }
app/src/main/java/com/koustuvsinha/benchmarker/views/DbTestResultDetails.java
package com.koustuvsinha.benchmarker.views; import android.app.Activity; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.koustuvsinha.benchmarker.R; import com.koustuvsinha.benchmarker.adaptors.DbResultAdaptor; import com.koustuvsinha.benchmarker.models.DbResultModel; import com.koustuvsinha.benchmarker.services.DbTestResultsReceiverService; import com.koustuvsinha.benchmarker.utils.BusProvider; import com.squareup.otto.Subscribe; import jp.wasabeef.recyclerview.animators.SlideInLeftAnimator; public class DbTestResultDetails extends Fragment { private DbTestResultsReceiverService testResultsReceiver; private RecyclerView mRecyclerView; private DbResultAdaptor mAdapter; private RecyclerView.LayoutManager mLayoutManager; private OnFragmentInteractionListener mListener; public static final String PAGE_NAME = "Detailed Test Results"; public static DbTestResultDetails newInstance() { DbTestResultDetails fragment = new DbTestResultDetails(); return fragment; } public DbTestResultDetails() { // Required empty public constructor } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); BusProvider.getInstance().getBus().register(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View v = inflater.inflate(R.layout.fragment_db_test_result_details, container, false); mRecyclerView = (RecyclerView) v.findViewById(R.id.resultListView); //mRecyclerView.setHasFixedSize(true); mRecyclerView.setItemAnimator(new SlideInLeftAnimator()); mRecyclerView.getItemAnimator().setAddDuration(500); mLayoutManager = new LinearLayoutManager(getActivity()); mRecyclerView.setLayoutManager(mLayoutManager); mAdapter = new DbResultAdaptor(); mRecyclerView.setAdapter(mAdapter); mAdapter.setResults(new DbResultModel("Starting Application...")); return v; } @Subscribe public void updateResultList(DbResultModel result) { mAdapter.setResults(result); mAdapter.notifyDataSetChanged(); mRecyclerView.scrollToPosition(mAdapter.getItemCount()-1); } // TODO: Rename method, update argument and hook method into UI event public void onButtonPressed(Uri uri) { if (mListener != null) { mListener.onFragmentInteraction(uri); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnFragmentInteractionListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnFragmentInteractionListener"); } } @Override public void onDetach() { super.onDetach(); mListener = null; } /** * This interface must be implemented by activities that contain this * fragment to allow an interaction in this fragment to be communicated * to the activity and potentially other fragments contained in that * activity. * <p/> * See the Android Training lesson <a href= * "http://developer.android.com/training/basics/fragments/communicating.html" * >Communicating with Other Fragments</a> for more information. */ public interface OnFragmentInteractionListener { // TODO: Update argument type and name public void onFragmentInteraction(Uri uri); } }
Update fragment header name
app/src/main/java/com/koustuvsinha/benchmarker/views/DbTestResultDetails.java
Update fragment header name
<ide><path>pp/src/main/java/com/koustuvsinha/benchmarker/views/DbTestResultDetails.java <ide> private RecyclerView.LayoutManager mLayoutManager; <ide> <ide> private OnFragmentInteractionListener mListener; <del> public static final String PAGE_NAME = "Detailed Test Results"; <add> public static final String PAGE_NAME = "Logs"; <ide> <ide> <ide> public static DbTestResultDetails newInstance() {
Java
apache-2.0
47297fabf5409e2bd663b2555c2c9684138fa756
0
Gigaspaces/xap-openspaces,Gigaspaces/xap-openspaces,Gigaspaces/xap-openspaces
/******************************************************************************* * * Copyright (c) 2012 GigaSpaces Technologies Ltd. All rights reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.openspaces.utest.core.util; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import junit.framework.TestCase; import org.junit.Before; import org.junit.Test; import org.openspaces.core.util.StringPropertiesUtils; public class StringPropertiesUtilsTest extends TestCase { private final String key = "key"; private final String missingkey = "missingkey"; private Map<String,String> map; @Before @Override public void setUp() { map = new HashMap<String,String>(); } @Test public void testBooleanTrue() { StringPropertiesUtils.putBoolean(map,key,true); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); map.put(key, "true"); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); map.put(key, "TRUE"); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); assertTrue(StringPropertiesUtils.getBoolean(map, missingkey, true)); } @Test public void testBooleanFalse() { StringPropertiesUtils.putBoolean(map,key,false); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); map.put(key, "false"); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); map.put(key, "FALSE"); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); assertFalse(StringPropertiesUtils.getBoolean(map, missingkey, false)); } @Test public void testInteger() { StringPropertiesUtils.putInteger(map,key,Integer.MAX_VALUE); assertEquals(Integer.MAX_VALUE,StringPropertiesUtils.getInteger(map, key, 0)); assertEquals(1,StringPropertiesUtils.getInteger(map, missingkey, 1)); } @Test public void testIntegerOverflowError() { try { StringPropertiesUtils.putLong(map,key,Long.MAX_VALUE); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testIntegerUnderflowError() { try { StringPropertiesUtils.putLong(map,key,Long.MIN_VALUE); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testIntegerParsingError() { try { map.put(key, "notaninteger"); assertEquals(1,StringPropertiesUtils.getIntegerIgnoreExceptions(map, key, 1)); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testLong() { StringPropertiesUtils.putLong(map,key,Long.MAX_VALUE); assertEquals(Long.MAX_VALUE,StringPropertiesUtils.getLong(map, key, 0)); assertEquals(1,StringPropertiesUtils.getInteger(map, missingkey, 1)); } @Test public void testLongParsingError() { try { map.put(key, "notaninteger"); assertEquals(1,StringPropertiesUtils.getLongIgnoreExceptions(map, key, 1)); StringPropertiesUtils.getLong(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testMap() { Map<String,String> inner = new HashMap<String,String>(); inner.put(key, "value"); StringPropertiesUtils.putMap(map, "prefix.", inner); assertEquals(inner,StringPropertiesUtils.getMap(map, "prefix.", inner)); } @Test public void testArray() { String[] inner = new String[] { "a","b","c"}; StringPropertiesUtils.putArray(map, key, inner," "); assertEquals(Arrays.asList(inner),Arrays.asList(StringPropertiesUtils.getArray(map, key, " ", new String[]{}))); assertEquals(0,StringPropertiesUtils.getArray(map, missingkey, " ", new String[]{}).length); } @Test public void testArrayIllegalArgument() { try { StringPropertiesUtils.putArray(map, key, new String[] { "a b","c"}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } @Test public void testArgumentsArray() { String[] inner = new String[] {"a b", "'b c'","\"c d\"", "\"'d e'\"", "'\"e f\"'"}; String[] expected= new String[] {"a b", "b c" , "c d" , "'d e'" , "\"e f\""}; StringPropertiesUtils.putArgumentsArray(map, key, inner); assertEquals(Arrays.asList(expected), Arrays.asList(StringPropertiesUtils.getArgumentsArray(map, key, new String[]{}))); assertEquals(0,StringPropertiesUtils.getArgumentsArray(map, missingkey, new String[]{}).length); } @Test public void testArgumentsArrayIllegalArgument() { try { StringPropertiesUtils.putArray(map, key, new String[] { "'a' b'"}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } @Test public void testArgumentsArrayIllegalArgument2() { try { StringPropertiesUtils.putArray(map, key, new String[] { "\"a\" b\""}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } public void testKeyValuePairs() { Map<String, String> inner = new HashMap<String,String>(); inner.put("a","1"); inner.put("b","2"); inner.put("c","x=3"); StringPropertiesUtils.putKeyValuePairs(map, key, inner, ",", "="); assertEquals(inner, StringPropertiesUtils.getKeyValuePairs(map, key, ",", "=", new HashMap<String,String>())); } @Test public void testMapWrapper() { Map<String,String> objectProperties = new HashMap<String, String>(); objectProperties.put("foo", "bar"); MapWrapperMock mapWrapper = new MapWrapperMock(objectProperties); StringPropertiesUtils.putMapWrapperObject(map, key, objectProperties, mapWrapper.getClass()); MapWrapperMock recoveredMapWrapper = (MapWrapperMock) StringPropertiesUtils.getMapWrapperObject(map, key, null); assertEquals(mapWrapper.getProperties(),recoveredMapWrapper.getProperties()); assertEquals(mapWrapper,recoveredMapWrapper); } }
src/main/test/org/openspaces/utest/core/util/StringPropertiesUtilsTest.java
/******************************************************************************* * * Copyright (c) 2012 GigaSpaces Technologies Ltd. All rights reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.openspaces.utest.core.util; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import junit.framework.TestCase; import org.junit.Before; import org.junit.Test; import org.openspaces.core.util.StringPropertiesUtils; public class StringPropertiesUtilsTest extends TestCase { private final String key = "key"; private final String missingkey = "missingkey"; private Map<String,String> map; @Before @Override public void setUp() { map = new HashMap<String,String>(); } @Test public void testBooleanTrue() { StringPropertiesUtils.putBoolean(map,key,true); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); map.put(key, "true"); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); map.put(key, "TRUE"); assertTrue(StringPropertiesUtils.getBoolean(map, key, false)); assertTrue(StringPropertiesUtils.getBoolean(map, missingkey, true)); } @Test public void testBooleanFalse() { StringPropertiesUtils.putBoolean(map,key,false); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); map.put(key, "false"); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); map.put(key, "FALSE"); assertFalse(StringPropertiesUtils.getBoolean(map, key, true)); assertFalse(StringPropertiesUtils.getBoolean(map, missingkey, false)); } @Test public void testInteger() { StringPropertiesUtils.putInteger(map,key,Integer.MAX_VALUE); assertEquals(Integer.MAX_VALUE,StringPropertiesUtils.getInteger(map, key, 0)); assertEquals(1,StringPropertiesUtils.getInteger(map, missingkey, 1)); } @Test public void testIntegerOverflowError() { try { StringPropertiesUtils.putLong(map,key,Long.MAX_VALUE); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testIntegerUnderflowError() { try { StringPropertiesUtils.putLong(map,key,Long.MIN_VALUE); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testIntegerParsingError() { try { map.put(key, "notaninteger"); assertEquals(1,StringPropertiesUtils.getIntegerIgnoreExceptions(map, key, 1)); StringPropertiesUtils.getInteger(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testLong() { StringPropertiesUtils.putLong(map,key,Long.MAX_VALUE); assertEquals(Long.MAX_VALUE,StringPropertiesUtils.getLong(map, key, 0)); assertEquals(1,StringPropertiesUtils.getInteger(map, missingkey, 1)); } @Test public void testLongParsingError() { try { map.put(key, "notaninteger"); assertEquals(1,StringPropertiesUtils.getLongIgnoreExceptions(map, key, 1)); StringPropertiesUtils.getLong(map, key, 0); fail(); } catch(NumberFormatException e) { /*expected result*/ } } @Test public void testMap() { Map<String,String> inner = new HashMap<String,String>(); inner.put(key, "value"); StringPropertiesUtils.putMap(map, "prefix.", inner); assertEquals(inner,StringPropertiesUtils.getMap(map, "prefix.", inner)); } @Test public void testArray() { String[] inner = new String[] { "a","b","c"}; StringPropertiesUtils.putArray(map, key, inner," "); assertEquals(Arrays.asList(inner),Arrays.asList(StringPropertiesUtils.getArray(map, key, " ", new String[]{}))); assertEquals(0,StringPropertiesUtils.getArray(map, missingkey, " ", new String[]{}).length); } @Test public void testArrayIllegalArgument() { try { StringPropertiesUtils.putArray(map, key, new String[] { "a b","c"}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } @Test public void testArgumentsArray() { String[] inner = new String[] {"a b", "'b c'","\"c d\"", "\"'d e'\"", "'\"e f\"'"}; String[] expected= new String[] {"a b", "b c" , "c d" , "'d e'" , "\"e f\""}; StringPropertiesUtils.putArgumentsArray(map, key, inner); assertEquals(Arrays.asList(expected), Arrays.asList(StringPropertiesUtils.getArgumentsArray(map, key, new String[]{}))); assertEquals(0,StringPropertiesUtils.getArgumentsArray(map, missingkey, new String[]{}).length); } @Test public void testArgumentsArrayIllegalArgument() { try { StringPropertiesUtils.putArray(map, key, new String[] { "'a' b'"}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } @Test public void testArgumentsArrayIllegalArgument2() { try { StringPropertiesUtils.putArray(map, key, new String[] { "\"a\" b\""}," "); fail(); } catch(IllegalArgumentException e) { /*expected result*/ } } public void testKeyValuePairs() { Map<String, String> inner = new HashMap<String,String>(); inner.put("a","1"); inner.put("b","2"); inner.put("c","x=3"); StringPropertiesUtils.putKeyValuePairs(map, key, inner, ",", "="); assertEquals(inner, StringPropertiesUtils.getKeyValuePairs(map, key, ",", "=", new HashMap<String,String>())); } }
GS-9994 Added unit test for map wrapper objects svn path=/xap/trunk/openspaces/; revision=113677 Former-commit-id: 7914695ef536abaff6331e07614f18e91cf85957
src/main/test/org/openspaces/utest/core/util/StringPropertiesUtilsTest.java
GS-9994 Added unit test for map wrapper objects
<ide><path>rc/main/test/org/openspaces/utest/core/util/StringPropertiesUtilsTest.java <ide> StringPropertiesUtils.putKeyValuePairs(map, key, inner, ",", "="); <ide> assertEquals(inner, StringPropertiesUtils.getKeyValuePairs(map, key, ",", "=", new HashMap<String,String>())); <ide> } <add> <add> @Test <add> public void testMapWrapper() { <add> Map<String,String> objectProperties = new HashMap<String, String>(); <add> objectProperties.put("foo", "bar"); <add> <add> MapWrapperMock mapWrapper = new MapWrapperMock(objectProperties); <add> StringPropertiesUtils.putMapWrapperObject(map, key, objectProperties, mapWrapper.getClass()); <add> MapWrapperMock recoveredMapWrapper = (MapWrapperMock) StringPropertiesUtils.getMapWrapperObject(map, key, null); <add> assertEquals(mapWrapper.getProperties(),recoveredMapWrapper.getProperties()); <add> assertEquals(mapWrapper,recoveredMapWrapper); <add> } <add> <ide> }
Java
apache-2.0
1d6bacf1c897a1acc6bb31317826e49c99b9852f
0
spring-projects/spring-boot,chrylis/spring-boot,vpavic/spring-boot,philwebb/spring-boot,jxblum/spring-boot,mbenson/spring-boot,htynkn/spring-boot,scottfrederick/spring-boot,mdeinum/spring-boot,chrylis/spring-boot,philwebb/spring-boot,wilkinsona/spring-boot,Buzzardo/spring-boot,wilkinsona/spring-boot,spring-projects/spring-boot,aahlenst/spring-boot,mbenson/spring-boot,mdeinum/spring-boot,mbenson/spring-boot,dreis2211/spring-boot,htynkn/spring-boot,htynkn/spring-boot,mdeinum/spring-boot,michael-simons/spring-boot,scottfrederick/spring-boot,spring-projects/spring-boot,mdeinum/spring-boot,aahlenst/spring-boot,spring-projects/spring-boot,dreis2211/spring-boot,michael-simons/spring-boot,Buzzardo/spring-boot,chrylis/spring-boot,shakuzen/spring-boot,dreis2211/spring-boot,michael-simons/spring-boot,htynkn/spring-boot,spring-projects/spring-boot,mdeinum/spring-boot,philwebb/spring-boot,dreis2211/spring-boot,vpavic/spring-boot,wilkinsona/spring-boot,jxblum/spring-boot,Buzzardo/spring-boot,mbenson/spring-boot,philwebb/spring-boot,shakuzen/spring-boot,shakuzen/spring-boot,michael-simons/spring-boot,vpavic/spring-boot,wilkinsona/spring-boot,aahlenst/spring-boot,shakuzen/spring-boot,philwebb/spring-boot,mbenson/spring-boot,scottfrederick/spring-boot,jxblum/spring-boot,spring-projects/spring-boot,dreis2211/spring-boot,philwebb/spring-boot,mdeinum/spring-boot,dreis2211/spring-boot,aahlenst/spring-boot,scottfrederick/spring-boot,aahlenst/spring-boot,shakuzen/spring-boot,Buzzardo/spring-boot,michael-simons/spring-boot,htynkn/spring-boot,vpavic/spring-boot,chrylis/spring-boot,wilkinsona/spring-boot,jxblum/spring-boot,htynkn/spring-boot,jxblum/spring-boot,shakuzen/spring-boot,chrylis/spring-boot,mbenson/spring-boot,Buzzardo/spring-boot,scottfrederick/spring-boot,Buzzardo/spring-boot,michael-simons/spring-boot,aahlenst/spring-boot,chrylis/spring-boot,vpavic/spring-boot,vpavic/spring-boot,scottfrederick/spring-boot,jxblum/spring-boot,wilkinsona/spring-boot
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.web.embedded.tomcat; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import javax.naming.NamingException; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Service; import org.apache.catalina.connector.Connector; import org.apache.catalina.startup.Tomcat; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.naming.ContextBindings; import org.springframework.boot.web.server.GracefulShutdownCallback; import org.springframework.boot.web.server.GracefulShutdownResult; import org.springframework.boot.web.server.PortInUseException; import org.springframework.boot.web.server.Shutdown; import org.springframework.boot.web.server.WebServer; import org.springframework.boot.web.server.WebServerException; import org.springframework.util.Assert; /** * {@link WebServer} that can be used to control a Tomcat web server. Usually this class * should be created using the {@link TomcatReactiveWebServerFactory} or * {@link TomcatServletWebServerFactory}, but not directly. * * @author Brian Clozel * @author Kristine Jetzke * @since 2.0.0 */ public class TomcatWebServer implements WebServer { private static final Log logger = LogFactory.getLog(TomcatWebServer.class); private static final AtomicInteger containerCounter = new AtomicInteger(-1); private final Object monitor = new Object(); private final Map<Service, Connector[]> serviceConnectors = new HashMap<>(); private final Tomcat tomcat; private final boolean autoStart; private final GracefulShutdown gracefulShutdown; private volatile boolean started; /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server */ public TomcatWebServer(Tomcat tomcat) { this(tomcat, true); } /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server * @param autoStart if the server should be started */ public TomcatWebServer(Tomcat tomcat, boolean autoStart) { this(tomcat, autoStart, Shutdown.IMMEDIATE); } /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server * @param autoStart if the server should be started * @param shutdown type of shutdown supported by the server * @since 2.3.0 */ public TomcatWebServer(Tomcat tomcat, boolean autoStart, Shutdown shutdown) { Assert.notNull(tomcat, "Tomcat Server must not be null"); this.tomcat = tomcat; this.autoStart = autoStart; this.gracefulShutdown = (shutdown == Shutdown.GRACEFUL) ? new GracefulShutdown(tomcat) : null; initialize(); } private void initialize() throws WebServerException { logger.info("Tomcat initialized with port(s): " + getPortsDescription(false)); synchronized (this.monitor) { try { addInstanceIdToEngineName(); Context context = findContext(); context.addLifecycleListener((event) -> { if (context.equals(event.getSource()) && Lifecycle.START_EVENT.equals(event.getType())) { // Remove service connectors so that protocol binding doesn't // happen when the service is started. removeServiceConnectors(); } }); // Start the server to trigger initialization listeners this.tomcat.start(); // We can re-throw failure exception directly in the main thread rethrowDeferredStartupExceptions(); try { ContextBindings.bindClassLoader(context, context.getNamingToken(), getClass().getClassLoader()); } catch (NamingException ex) { // Naming is not enabled. Continue } // Unlike Jetty, all Tomcat threads are daemon threads. We create a // blocking non-daemon to stop immediate shutdown startDaemonAwaitThread(); } catch (Exception ex) { stopSilently(); destroySilently(); throw new WebServerException("Unable to start embedded Tomcat", ex); } } } private Context findContext() { for (Container child : this.tomcat.getHost().findChildren()) { if (child instanceof Context) { return (Context) child; } } throw new IllegalStateException("The host does not contain a Context"); } private void addInstanceIdToEngineName() { int instanceId = containerCounter.incrementAndGet(); if (instanceId > 0) { Engine engine = this.tomcat.getEngine(); engine.setName(engine.getName() + "-" + instanceId); } } private void removeServiceConnectors() { for (Service service : this.tomcat.getServer().findServices()) { Connector[] connectors = service.findConnectors().clone(); this.serviceConnectors.put(service, connectors); for (Connector connector : connectors) { service.removeConnector(connector); } } } private void rethrowDeferredStartupExceptions() throws Exception { Container[] children = this.tomcat.getHost().findChildren(); for (Container container : children) { if (container instanceof TomcatEmbeddedContext) { TomcatStarter tomcatStarter = ((TomcatEmbeddedContext) container).getStarter(); if (tomcatStarter != null) { Exception exception = tomcatStarter.getStartUpException(); if (exception != null) { throw exception; } } } if (!LifecycleState.STARTED.equals(container.getState())) { throw new IllegalStateException(container + " failed to start"); } } } private void startDaemonAwaitThread() { Thread awaitThread = new Thread("container-" + (containerCounter.get())) { @Override public void run() { TomcatWebServer.this.tomcat.getServer().await(); } }; awaitThread.setContextClassLoader(getClass().getClassLoader()); awaitThread.setDaemon(false); awaitThread.start(); } @Override public void start() throws WebServerException { synchronized (this.monitor) { if (this.started) { return; } try { addPreviouslyRemovedConnectors(); Connector connector = this.tomcat.getConnector(); if (connector != null && this.autoStart) { performDeferredLoadOnStartup(); } checkThatConnectorsHaveStarted(); this.started = true; logger.info("Tomcat started on port(s): " + getPortsDescription(true) + " with context path '" + getContextPath() + "'"); } catch (ConnectorStartFailedException ex) { stopSilently(); throw ex; } catch (Exception ex) { PortInUseException.throwIfPortBindingException(ex, () -> this.tomcat.getConnector().getPort()); throw new WebServerException("Unable to start embedded Tomcat server", ex); } finally { Context context = findContext(); ContextBindings.unbindClassLoader(context, context.getNamingToken(), getClass().getClassLoader()); } } } private void checkThatConnectorsHaveStarted() { checkConnectorHasStarted(this.tomcat.getConnector()); for (Connector connector : this.tomcat.getService().findConnectors()) { checkConnectorHasStarted(connector); } } private void checkConnectorHasStarted(Connector connector) { if (LifecycleState.FAILED.equals(connector.getState())) { throw new ConnectorStartFailedException(connector.getPort()); } } private void stopSilently() { try { stopTomcat(); } catch (LifecycleException ex) { // Ignore } } private void destroySilently() { try { this.tomcat.destroy(); } catch (LifecycleException ex) { // Ignore } } private void stopTomcat() throws LifecycleException { if (Thread.currentThread().getContextClassLoader() instanceof TomcatEmbeddedWebappClassLoader) { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); } this.tomcat.stop(); } private void addPreviouslyRemovedConnectors() { Service[] services = this.tomcat.getServer().findServices(); for (Service service : services) { Connector[] connectors = this.serviceConnectors.get(service); if (connectors != null) { for (Connector connector : connectors) { service.addConnector(connector); if (!this.autoStart) { stopProtocolHandler(connector); } } this.serviceConnectors.remove(service); } } } private void stopProtocolHandler(Connector connector) { try { connector.getProtocolHandler().stop(); } catch (Exception ex) { logger.error("Cannot pause connector: ", ex); } } private void performDeferredLoadOnStartup() { try { for (Container child : this.tomcat.getHost().findChildren()) { if (child instanceof TomcatEmbeddedContext) { ((TomcatEmbeddedContext) child).deferredLoadOnStartup(); } } } catch (Exception ex) { if (ex instanceof WebServerException) { throw (WebServerException) ex; } throw new WebServerException("Unable to start embedded Tomcat connectors", ex); } } Map<Service, Connector[]> getServiceConnectors() { return this.serviceConnectors; } @Override public void stop() throws WebServerException { synchronized (this.monitor) { boolean wasStarted = this.started; try { this.started = false; try { if (this.gracefulShutdown != null) { this.gracefulShutdown.abort(); } stopTomcat(); this.tomcat.destroy(); } catch (LifecycleException ex) { // swallow and continue } } catch (Exception ex) { throw new WebServerException("Unable to stop embedded Tomcat", ex); } finally { if (wasStarted) { containerCounter.decrementAndGet(); } } } } private String getPortsDescription(boolean localPort) { StringBuilder ports = new StringBuilder(); for (Connector connector : this.tomcat.getService().findConnectors()) { if (ports.length() != 0) { ports.append(' '); } int port = localPort ? connector.getLocalPort() : connector.getPort(); ports.append(port).append(" (").append(connector.getScheme()).append(')'); } return ports.toString(); } @Override public int getPort() { Connector connector = this.tomcat.getConnector(); if (connector != null) { return connector.getLocalPort(); } return 0; } private String getContextPath() { return Arrays.stream(this.tomcat.getHost().findChildren()).filter(TomcatEmbeddedContext.class::isInstance) .map(TomcatEmbeddedContext.class::cast).map(TomcatEmbeddedContext::getPath) .collect(Collectors.joining(" ")); } /** * Returns access to the underlying Tomcat server. * @return the Tomcat server */ public Tomcat getTomcat() { return this.tomcat; } @Override public void shutDownGracefully(GracefulShutdownCallback callback) { if (this.gracefulShutdown == null) { callback.shutdownComplete(GracefulShutdownResult.IMMEDIATE); return; } this.gracefulShutdown.shutDownGracefully(callback); } }
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/tomcat/TomcatWebServer.java
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.web.embedded.tomcat; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import javax.naming.NamingException; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Service; import org.apache.catalina.connector.Connector; import org.apache.catalina.startup.Tomcat; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.naming.ContextBindings; import org.springframework.boot.web.server.GracefulShutdownCallback; import org.springframework.boot.web.server.GracefulShutdownResult; import org.springframework.boot.web.server.PortInUseException; import org.springframework.boot.web.server.Shutdown; import org.springframework.boot.web.server.WebServer; import org.springframework.boot.web.server.WebServerException; import org.springframework.util.Assert; /** * {@link WebServer} that can be used to control a Tomcat web server. Usually this class * should be created using the {@link TomcatReactiveWebServerFactory} of * {@link TomcatServletWebServerFactory}, but not directly. * * @author Brian Clozel * @author Kristine Jetzke * @since 2.0.0 */ public class TomcatWebServer implements WebServer { private static final Log logger = LogFactory.getLog(TomcatWebServer.class); private static final AtomicInteger containerCounter = new AtomicInteger(-1); private final Object monitor = new Object(); private final Map<Service, Connector[]> serviceConnectors = new HashMap<>(); private final Tomcat tomcat; private final boolean autoStart; private final GracefulShutdown gracefulShutdown; private volatile boolean started; /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server */ public TomcatWebServer(Tomcat tomcat) { this(tomcat, true); } /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server * @param autoStart if the server should be started */ public TomcatWebServer(Tomcat tomcat, boolean autoStart) { this(tomcat, autoStart, Shutdown.IMMEDIATE); } /** * Create a new {@link TomcatWebServer} instance. * @param tomcat the underlying Tomcat server * @param autoStart if the server should be started * @param shutdown type of shutdown supported by the server * @since 2.3.0 */ public TomcatWebServer(Tomcat tomcat, boolean autoStart, Shutdown shutdown) { Assert.notNull(tomcat, "Tomcat Server must not be null"); this.tomcat = tomcat; this.autoStart = autoStart; this.gracefulShutdown = (shutdown == Shutdown.GRACEFUL) ? new GracefulShutdown(tomcat) : null; initialize(); } private void initialize() throws WebServerException { logger.info("Tomcat initialized with port(s): " + getPortsDescription(false)); synchronized (this.monitor) { try { addInstanceIdToEngineName(); Context context = findContext(); context.addLifecycleListener((event) -> { if (context.equals(event.getSource()) && Lifecycle.START_EVENT.equals(event.getType())) { // Remove service connectors so that protocol binding doesn't // happen when the service is started. removeServiceConnectors(); } }); // Start the server to trigger initialization listeners this.tomcat.start(); // We can re-throw failure exception directly in the main thread rethrowDeferredStartupExceptions(); try { ContextBindings.bindClassLoader(context, context.getNamingToken(), getClass().getClassLoader()); } catch (NamingException ex) { // Naming is not enabled. Continue } // Unlike Jetty, all Tomcat threads are daemon threads. We create a // blocking non-daemon to stop immediate shutdown startDaemonAwaitThread(); } catch (Exception ex) { stopSilently(); destroySilently(); throw new WebServerException("Unable to start embedded Tomcat", ex); } } } private Context findContext() { for (Container child : this.tomcat.getHost().findChildren()) { if (child instanceof Context) { return (Context) child; } } throw new IllegalStateException("The host does not contain a Context"); } private void addInstanceIdToEngineName() { int instanceId = containerCounter.incrementAndGet(); if (instanceId > 0) { Engine engine = this.tomcat.getEngine(); engine.setName(engine.getName() + "-" + instanceId); } } private void removeServiceConnectors() { for (Service service : this.tomcat.getServer().findServices()) { Connector[] connectors = service.findConnectors().clone(); this.serviceConnectors.put(service, connectors); for (Connector connector : connectors) { service.removeConnector(connector); } } } private void rethrowDeferredStartupExceptions() throws Exception { Container[] children = this.tomcat.getHost().findChildren(); for (Container container : children) { if (container instanceof TomcatEmbeddedContext) { TomcatStarter tomcatStarter = ((TomcatEmbeddedContext) container).getStarter(); if (tomcatStarter != null) { Exception exception = tomcatStarter.getStartUpException(); if (exception != null) { throw exception; } } } if (!LifecycleState.STARTED.equals(container.getState())) { throw new IllegalStateException(container + " failed to start"); } } } private void startDaemonAwaitThread() { Thread awaitThread = new Thread("container-" + (containerCounter.get())) { @Override public void run() { TomcatWebServer.this.tomcat.getServer().await(); } }; awaitThread.setContextClassLoader(getClass().getClassLoader()); awaitThread.setDaemon(false); awaitThread.start(); } @Override public void start() throws WebServerException { synchronized (this.monitor) { if (this.started) { return; } try { addPreviouslyRemovedConnectors(); Connector connector = this.tomcat.getConnector(); if (connector != null && this.autoStart) { performDeferredLoadOnStartup(); } checkThatConnectorsHaveStarted(); this.started = true; logger.info("Tomcat started on port(s): " + getPortsDescription(true) + " with context path '" + getContextPath() + "'"); } catch (ConnectorStartFailedException ex) { stopSilently(); throw ex; } catch (Exception ex) { PortInUseException.throwIfPortBindingException(ex, () -> this.tomcat.getConnector().getPort()); throw new WebServerException("Unable to start embedded Tomcat server", ex); } finally { Context context = findContext(); ContextBindings.unbindClassLoader(context, context.getNamingToken(), getClass().getClassLoader()); } } } private void checkThatConnectorsHaveStarted() { checkConnectorHasStarted(this.tomcat.getConnector()); for (Connector connector : this.tomcat.getService().findConnectors()) { checkConnectorHasStarted(connector); } } private void checkConnectorHasStarted(Connector connector) { if (LifecycleState.FAILED.equals(connector.getState())) { throw new ConnectorStartFailedException(connector.getPort()); } } private void stopSilently() { try { stopTomcat(); } catch (LifecycleException ex) { // Ignore } } private void destroySilently() { try { this.tomcat.destroy(); } catch (LifecycleException ex) { // Ignore } } private void stopTomcat() throws LifecycleException { if (Thread.currentThread().getContextClassLoader() instanceof TomcatEmbeddedWebappClassLoader) { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); } this.tomcat.stop(); } private void addPreviouslyRemovedConnectors() { Service[] services = this.tomcat.getServer().findServices(); for (Service service : services) { Connector[] connectors = this.serviceConnectors.get(service); if (connectors != null) { for (Connector connector : connectors) { service.addConnector(connector); if (!this.autoStart) { stopProtocolHandler(connector); } } this.serviceConnectors.remove(service); } } } private void stopProtocolHandler(Connector connector) { try { connector.getProtocolHandler().stop(); } catch (Exception ex) { logger.error("Cannot pause connector: ", ex); } } private void performDeferredLoadOnStartup() { try { for (Container child : this.tomcat.getHost().findChildren()) { if (child instanceof TomcatEmbeddedContext) { ((TomcatEmbeddedContext) child).deferredLoadOnStartup(); } } } catch (Exception ex) { if (ex instanceof WebServerException) { throw (WebServerException) ex; } throw new WebServerException("Unable to start embedded Tomcat connectors", ex); } } Map<Service, Connector[]> getServiceConnectors() { return this.serviceConnectors; } @Override public void stop() throws WebServerException { synchronized (this.monitor) { boolean wasStarted = this.started; try { this.started = false; try { if (this.gracefulShutdown != null) { this.gracefulShutdown.abort(); } stopTomcat(); this.tomcat.destroy(); } catch (LifecycleException ex) { // swallow and continue } } catch (Exception ex) { throw new WebServerException("Unable to stop embedded Tomcat", ex); } finally { if (wasStarted) { containerCounter.decrementAndGet(); } } } } private String getPortsDescription(boolean localPort) { StringBuilder ports = new StringBuilder(); for (Connector connector : this.tomcat.getService().findConnectors()) { if (ports.length() != 0) { ports.append(' '); } int port = localPort ? connector.getLocalPort() : connector.getPort(); ports.append(port).append(" (").append(connector.getScheme()).append(')'); } return ports.toString(); } @Override public int getPort() { Connector connector = this.tomcat.getConnector(); if (connector != null) { return connector.getLocalPort(); } return 0; } private String getContextPath() { return Arrays.stream(this.tomcat.getHost().findChildren()).filter(TomcatEmbeddedContext.class::isInstance) .map(TomcatEmbeddedContext.class::cast).map(TomcatEmbeddedContext::getPath) .collect(Collectors.joining(" ")); } /** * Returns access to the underlying Tomcat server. * @return the Tomcat server */ public Tomcat getTomcat() { return this.tomcat; } @Override public void shutDownGracefully(GracefulShutdownCallback callback) { if (this.gracefulShutdown == null) { callback.shutdownComplete(GracefulShutdownResult.IMMEDIATE); return; } this.gracefulShutdown.shutDownGracefully(callback); } }
Fix typo See gh-24245
spring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/tomcat/TomcatWebServer.java
Fix typo
<ide><path>pring-boot-project/spring-boot/src/main/java/org/springframework/boot/web/embedded/tomcat/TomcatWebServer.java <ide> <ide> /** <ide> * {@link WebServer} that can be used to control a Tomcat web server. Usually this class <del> * should be created using the {@link TomcatReactiveWebServerFactory} of <add> * should be created using the {@link TomcatReactiveWebServerFactory} or <ide> * {@link TomcatServletWebServerFactory}, but not directly. <ide> * <ide> * @author Brian Clozel
Java
apache-2.0
6b191e665328dfdf395a4df353e556fadbd5c7fa
0
crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j,crockpotveggies/deeplearning4j
/* * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.multilayer; import org.deeplearning4j.base.MnistFetcher; import org.deeplearning4j.berkeley.Pair; import org.deeplearning4j.datasets.fetchers.MnistDataFetcher; import org.deeplearning4j.datasets.iterator.impl.CifarDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.deeplearning4j.datasets.mnist.MnistManager; import org.deeplearning4j.eval.Evaluation; import org.deeplearning4j.exception.DL4JException; import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.Updater; import org.deeplearning4j.nn.conf.distribution.NormalDistribution; import org.deeplearning4j.nn.conf.distribution.UniformDistribution; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.*; import org.deeplearning4j.nn.gradient.DefaultGradient; import org.deeplearning4j.nn.gradient.Gradient; import org.deeplearning4j.nn.layers.BaseOutputLayer; import org.deeplearning4j.nn.params.DefaultParamInitializer; import org.deeplearning4j.nn.params.PretrainParamInitializer; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.optimize.api.IterationListener; import org.deeplearning4j.optimize.listeners.ScoreIterationListener; import org.deeplearning4j.util.ModelSerializer; import org.junit.Ignore; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.SplitTestAndTrain; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.factory.NDArrayFactory; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.heartbeat.Heartbeat; import org.nd4j.linalg.heartbeat.reports.Environment; import org.nd4j.linalg.heartbeat.reports.Event; import org.nd4j.linalg.heartbeat.reports.Task; import org.nd4j.linalg.heartbeat.utils.EnvironmentUtils; import org.nd4j.linalg.heartbeat.utils.TaskUtils; import org.nd4j.linalg.lossfunctions.LossFunctions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.*; import static org.junit.Assert.*; /** * Created by agibsonccc on 12/27/14. */ public class MultiLayerTest { private static final Logger log = LoggerFactory.getLogger(MultiLayerTest.class); @Test public void testSetParams() { Nd4j.MAX_ELEMENTS_PER_SLICE = Integer.MAX_VALUE; Nd4j.MAX_SLICES_TO_PRINT = Integer.MAX_VALUE; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .activation("tanh") .build()) .layer(1,new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN).nIn(3).nOut(2) .build()) .build(); MultiLayerNetwork network3 = new MultiLayerNetwork(conf); network3.init(); INDArray params = network3.params(); INDArray weights = network3.getLayer(0).getParam(DefaultParamInitializer.WEIGHT_KEY).dup(); INDArray bias = network3.getLayer(0).getParam(DefaultParamInitializer.BIAS_KEY).dup(); network3.setParameters(params); assertEquals(weights, network3.getLayer(0).getParam(DefaultParamInitializer.WEIGHT_KEY)); assertEquals(bias,network3.getLayer(0).getParam(DefaultParamInitializer.BIAS_KEY)); INDArray params4 = network3.params(); assertEquals(params, params4); } @Test public void testBatchNorm() { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .iterations(5) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(2, new BatchNormalization.Builder().nOut(2).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER) .activation("softmax") .nIn(2).nOut(3).build()) .backprop(true).pretrain(false) .build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.setListeners(new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain trainTest = next.splitTestAndTrain(110); network.setLabels(trainTest.getTrain().getLabels()); network.init(); network.fit(trainTest.getTrain()); } @Test public void testBackProp() { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .iterations(5) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(2, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER) .activation("softmax") .nIn(2).nOut(3).build()) .backprop(true).pretrain(false).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.setListeners(new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain trainTest = next.splitTestAndTrain(110); network.setInput(trainTest.getTrain().getFeatureMatrix()); network.setLabels(trainTest.getTrain().getLabels()); network.init(); network.fit(trainTest.getTrain()); DataSet test = trainTest.getTest(); Evaluation eval = new Evaluation(); INDArray output = network.output(test.getFeatureMatrix()); eval.eval(test.getLabels(), output); log.info("Score " + eval.stats()); } @Test public void testDbn() throws Exception { Nd4j.MAX_SLICES_TO_PRINT = -1; Nd4j.MAX_ELEMENTS_PER_SLICE = -1; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .iterations(100) .momentum(0.9) .optimizationAlgo(OptimizationAlgorithm.LBFGS) .regularization(true) .l2(2e-4) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("tanh") .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("softmax").build()) .build(); MultiLayerNetwork d = new MultiLayerNetwork(conf); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); Nd4j.writeTxt(next.getFeatureMatrix(), "iris.txt", "\t"); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain testAndTrain = next.splitTestAndTrain(110); DataSet train = testAndTrain.getTrain(); d.fit(train); DataSet test = testAndTrain.getTest(); Evaluation eval = new Evaluation(); INDArray output = d.output(test.getFeatureMatrix()); eval.eval(test.getLabels(), output); log.info("Score " + eval.stats()); } @Test public void testGradientWithAsList(){ MultiLayerNetwork net1 = new MultiLayerNetwork(getConf()); MultiLayerNetwork net2 = new MultiLayerNetwork(getConf()); net1.init(); net2.init(); DataSet x1 = new IrisDataSetIterator(1,150).next(); DataSet all = new IrisDataSetIterator(150,150).next(); DataSet x2 = all.asList().get(0); //x1 and x2 contain identical data assertArrayEquals(asFloat(x1.getFeatureMatrix()), asFloat(x2.getFeatureMatrix()), 0.0f); assertArrayEquals(asFloat(x1.getLabels()), asFloat(x2.getLabels()), 0.0f); assertEquals(x1, x2); //Set inputs/outputs so gradient can be calculated: net1.feedForward(x1.getFeatureMatrix()); net2.feedForward(x2.getFeatureMatrix()); ((BaseOutputLayer)net1.getLayer(1)).setLabels(x1.getLabels()); ((BaseOutputLayer)net2.getLayer(1)).setLabels(x2.getLabels()); net1.gradient(); net2.gradient(); } /** * This test intended only to test activateSelectedLayers method, it does not involves fully-working AutoEncoder. */ @Test public void testSelectedActivations() { // Train DeepAutoEncoder on very limited trainset final int numRows = 28; final int numColumns = 28; int seed = 123; int numSamples = 3; int iterations = 1; int listenerFreq = iterations/5; log.info("Load data...."); float[][] trainingData = new float[numSamples][numColumns * numRows]; Arrays.fill(trainingData[0],0.95f); Arrays.fill(trainingData[1],0.5f); Arrays.fill(trainingData[2], 0.05f); log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(seed) .iterations(iterations) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .list() .layer(0, new RBM.Builder().nIn(numRows * numColumns).nOut(1000).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new RBM.Builder().nIn(1000).nOut(500).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(2, new RBM.Builder().nIn(500).nOut(250).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(3, new RBM.Builder().nIn(250).nOut(100).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(4, new RBM.Builder().nIn(100).nOut(30).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) //encoding stops .layer(5, new RBM.Builder().nIn(30).nOut(100).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) //decoding starts .layer(6, new RBM.Builder().nIn(100).nOut(250).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(7, new RBM.Builder().nIn(250).nOut(500).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(8, new RBM.Builder().nIn(500).nOut(1000).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(9, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(1000).nOut(numRows*numColumns).build()) .pretrain(true).backprop(true) .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); log.info("Train model...."); int cnt = 0; while(cnt < numSamples) { INDArray input = Nd4j.create(trainingData[cnt]); model.fit(new DataSet(input, input)); cnt++; } // Make two separate selective calls log.info("Testing full cycle..."); List<INDArray> comparableResult = model.feedForward(Nd4j.create(trainingData[0])); INDArray encodeResult = model.activateSelectedLayers(0,4, Nd4j.create(trainingData[0])); log.info("Compare feedForward results with selectedActivation"); assertEquals(comparableResult.get(5), encodeResult); INDArray decodeResults = model.activateSelectedLayers(5,9, encodeResult); log.info("Decode results: " + decodeResults.columns() + " " + decodeResults); log.info("Comparable results: " + comparableResult.get(10).columns() + " " + comparableResult.get(10)); assertEquals(comparableResult.get(10), decodeResults); } private static MultiLayerConfiguration getConf(){ MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345L) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)) .build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation("softmax") .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)) .build()) .build(); return conf; } public static float[] asFloat( INDArray arr ){ int len = arr.length(); float[] f = new float[len]; for( int i=0; i<len; i++ ) f[i] = arr.getFloat(i); return f; } @Test public void testFeedForwardToLayer(){ int nIn = 30; int nOut = 25; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .iterations(5).learningRate(1e-3) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(nIn).nOut(600) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(600).nOut(250) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(2, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(250).nOut(100) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(100).nOut(25) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)).build()) .build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); INDArray input = Nd4j.rand(5,nIn); List<INDArray> activations = network.feedForward(input); assertEquals(5,activations.size()); //4 layers + input List<INDArray> activationsAll = network.feedForwardToLayer(3,input); assertEquals(activations,activationsAll); for( int i=3; i>=0; i-- ){ List<INDArray> activationsPartial = network.feedForwardToLayer(i,input); assertEquals(i+2,activationsPartial.size()); //i+2: for layer 3: input + activations of {0,1,2,3} -> 5 total = 3+2 for( int j=0; j<=i; j++ ){ INDArray exp = activationsAll.get(j); INDArray act = activationsPartial.get(j); assertEquals(exp,act); } } } @Test public void testBackpropGradient(){ //Testing: MultiLayerNetwork.backpropGradient() //i.e., specifically without an output layer int nIn = 10; int nOut = 40; int miniBatch = 5; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(0.1) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(2, new DenseLayer.Builder().nIn(30).nOut(nOut).activation("relu").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); INDArray eps = Nd4j.rand(miniBatch,nOut); INDArray input = Nd4j.rand(miniBatch, nIn); net.feedForward(input); //Need to feed forward before backprop Pair<Gradient,INDArray> pair = net.backpropGradient(eps); INDArray epsOut = pair.getSecond(); assertNotNull(epsOut); assertArrayEquals(new int[]{miniBatch,nIn},epsOut.shape()); Gradient g = pair.getFirst(); Map<String,INDArray> gradMap = g.gradientForVariable(); assertEquals(6, gradMap.size()); //3 layers, weight + bias gradients for each String[] expKeys = {"0_"+DefaultParamInitializer.WEIGHT_KEY,"0_"+DefaultParamInitializer.BIAS_KEY, "1_"+DefaultParamInitializer.WEIGHT_KEY,"2_"+DefaultParamInitializer.BIAS_KEY, "2_"+DefaultParamInitializer.WEIGHT_KEY,"2_"+DefaultParamInitializer.BIAS_KEY}; Set<String> keys = gradMap.keySet(); for( String s : expKeys ){ assertTrue(keys.contains(s)); } /* System.out.println(pair); //Use updater to go from raw gradients -> updates //Apply learning rate, gradient clipping, adagrad/momentum/rmsprop etc Updater updater = UpdaterCreator.getUpdater(net); updater.update(net, g, 0, miniBatch); StepFunction stepFunction = new NegativeGradientStepFunction(); INDArray params = net.params(); System.out.println(Arrays.toString(params.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 10)).dup().data().asFloat())); stepFunction.step(params, g.gradient()); net.setParams(params); //params() may not be in-place System.out.println(Arrays.toString(params.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 10)).dup().data().asFloat())); */ } @Test public void testLayerNames(){ int nIn = 10; int nOut = 40; List<String> layerNameList = new ArrayList<>(); layerNameList.add("dnn1"); layerNameList.add("dnn2"); layerNameList.add("dnn3"); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(0.1) .list() .layer(0, new DenseLayer.Builder().name("dnn1").nIn(nIn).nOut(20).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(1, new DenseLayer.Builder().name("dnn2").nIn(20).nOut(30).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(2, new DenseLayer.Builder().name("dnn3").nIn(30).nOut(nOut).activation("softmax").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); assertEquals(layerNameList.get(0), net.getLayer(0).conf().getLayer().getLayerName()); assertEquals(layerNameList, net.getLayerNames()); assertEquals("softmax", net.getLayer(layerNameList.get(2)).conf().getLayer().getActivationFunction()); } @Test public void testTranspose(){ MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .iterations(100) .momentum(0.9) .regularization(true) .l2(2e-4) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("tanh") .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("softmax").build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Layer layer = net.getLayer(0); int nParamsBackprop = layer.numParams(true); int nParamsBoth = layer.numParams(false); Layer transposed = layer.transpose(); assertArrayEquals(new int[]{4,3},layer.getParam(DefaultParamInitializer.WEIGHT_KEY).shape()); assertArrayEquals(new int[]{1,3},layer.getParam(DefaultParamInitializer.BIAS_KEY).shape()); assertArrayEquals(new int[]{1,4},layer.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).shape()); assertArrayEquals(new int[]{3, 4}, transposed.getParam(DefaultParamInitializer.WEIGHT_KEY).shape()); assertArrayEquals(new int[]{1, 4}, transposed.getParam(DefaultParamInitializer.BIAS_KEY).shape()); assertArrayEquals(new int[]{1, 3}, transposed.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).shape()); INDArray origWeights = layer.getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray transposedWeights = transposed.getParam(DefaultParamInitializer.WEIGHT_KEY); assertEquals(origWeights.transpose(), transposedWeights); assertEquals(layer.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY), transposed.getParam(DefaultParamInitializer.BIAS_KEY)); assertEquals(layer.getParam(DefaultParamInitializer.BIAS_KEY),transposed.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY)); assertEquals(3,((FeedForwardLayer)transposed.conf().getLayer()).getNIn()); assertEquals(4,((FeedForwardLayer)transposed.conf().getLayer()).getNOut()); } @Test public void testScoreExamples(){ Nd4j.getRandom().setSeed(12345); int nIn = 5; int nOut = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345) .regularization(true).l1(0.01).l2(0.01) .learningRate(0.1).activation("tanh").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()) .layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerConfiguration confNoReg = new NeuralNetConfiguration.Builder() .seed(12345) .regularization(false) .learningRate(0.1).activation("tanh").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()) .layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); MultiLayerNetwork netNoReg = new MultiLayerNetwork(confNoReg); netNoReg.init(); netNoReg.setParameters(net.params().dup()); //Score single example, and compare to scoreExamples: INDArray input = Nd4j.rand(3,nIn); INDArray output = Nd4j.rand(3,nOut); DataSet ds = new DataSet(input,output); INDArray scoresWithRegularization = net.scoreExamples(ds,true); INDArray scoresNoRegularization = net.scoreExamples(ds,false); assertArrayEquals(new int[]{3,1},scoresWithRegularization.shape()); assertArrayEquals(new int[]{3,1},scoresNoRegularization.shape()); for( int i=0; i<3; i++ ){ DataSet singleEx = new DataSet(input.getRow(i),output.getRow(i)); double score = net.score(singleEx); double scoreNoReg = netNoReg.score(singleEx); double scoreUsingScoreExamples = scoresWithRegularization.getDouble(i); double scoreUsingScoreExamplesNoReg = scoresNoRegularization.getDouble(i); assertEquals(score,scoreUsingScoreExamples,1e-4); assertEquals(scoreNoReg,scoreUsingScoreExamplesNoReg,1e-4); assertTrue(scoreUsingScoreExamples > scoreUsingScoreExamplesNoReg); //Regularization term increases score // System.out.println(score + "\t" + scoreUsingScoreExamples + "\t|\t" + scoreNoReg + "\t" + scoreUsingScoreExamplesNoReg); } } @Test public void testDataSetScore(){ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation("sigmoid").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(3).nOut(3).build()) .pretrain(false).backprop(true) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); INDArray in = Nd4j.create(new double[]{1.0,2.0,3.0,4.0}); INDArray out = Nd4j.create(new double[]{1,0,0}); double score = net.score(new DataSet(in,out)); } @Test public void testDataSetScoreCNN(){ int miniBatch = 3; int depth = 2; int width = 3; int height = 3; int nOut = 2; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .seed(12345L) .list() .layer(0, new ConvolutionLayer.Builder(2,2).nOut(1).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nOut(2).build()) .setInputType(InputType.convolutionalFlat(height,width,depth)) .pretrain(false).backprop(true).build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); Random r = new Random(12345); INDArray input = Nd4j.rand(miniBatch,depth*width*height); INDArray labels = Nd4j.create(miniBatch,nOut); for( int i=0; i<miniBatch; i++ ){ labels.putScalar(new int[]{i,r.nextInt(nOut)},1.0); } double score = net.score(new DataSet(input,labels)); } @Test public void testPredict() throws Exception{ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(50).nOut(10).build()) .pretrain(false).backprop(true) .setInputType(InputType.convolutional(28,28,1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator ds = new MnistDataSetIterator(10,10); net.fit(ds); DataSetIterator testDs = new MnistDataSetIterator(1,1); DataSet testData = testDs.next(); testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); String actualLables = testData.getLabelName(0); List<String> prediction = net.predict(testData); assertTrue(actualLables != null); assertTrue(prediction.get(0) != null); } @Test @Ignore public void testCid() throws Exception { System.out.println(EnvironmentUtils.buildCId()); Environment environment = EnvironmentUtils.buildEnvironment(); environment.setSerialVersionID(EnvironmentUtils.buildCId()); Task task = TaskUtils.buildTask(Nd4j.create(new double[]{1,2,3,4,5,6})); Heartbeat.getInstance().reportEvent(Event.STANDALONE, environment, task); Thread.sleep(25000); } @Test public void testOutput() throws Exception{ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(50).nOut(10).build()) .pretrain(false).backprop(true) .setInputType(InputType.convolutional(28,28,1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator fullData = new MnistDataSetIterator(1,2); net.fit(fullData); fullData.reset(); DataSet expectedSet = fullData.next(2); INDArray expectedOut = net.output(expectedSet.getFeatureMatrix(), false); fullData.reset(); INDArray actualOut = net.output(fullData); assertEquals(expectedOut, actualOut); } @Test public void testGradientUpdate() throws Exception{ DataSetIterator iter = new IrisDataSetIterator(1,1); Gradient expectedGradient = new DefaultGradient(); expectedGradient.setGradientFor("0_W", Nd4j.ones(4,5)); expectedGradient.setGradientFor("0_b", Nd4j.ones(1,5)); expectedGradient.setGradientFor("1_W", Nd4j.ones(5,3)); expectedGradient.setGradientFor("1_b", Nd4j.ones(1,3)); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(1) .activation("relu").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().name("dnn1").nIn(4).nOut(5).build()) .layer(1, new OutputLayer.Builder().name("output").nIn(5).nOut(3).activation("softmax").weightInit(WeightInit.XAVIER).build()) .backprop(true).pretrain(false) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.fit(iter.next()); // TODO validate actual layer gradientView - issue getting var out of BaseLayer w/o adding MLN getter that gets confused with local gradient vars Gradient actualGradient = net.gradient; assertNotEquals(expectedGradient.getGradientFor("0_W"), actualGradient.getGradientFor("0_W")); net.update(expectedGradient); actualGradient = net.gradient; assertEquals(expectedGradient.getGradientFor("0_W"), actualGradient.getGradientFor("0_W")); // Update params with set net.setParam("0_W", Nd4j.ones(4,5)); net.setParam("0_b", Nd4j.ones(1,5)); net.setParam("1_W", Nd4j.ones(5,3)); net.setParam("1_b", Nd4j.ones(1,3)); INDArray actualParams = net.params(); // Confirm params assertEquals(expectedGradient.gradient(), actualParams); net.update(expectedGradient); actualParams = net.params(); assertEquals(Nd4j.ones(1,43).addi(1), actualParams); } @Test(expected = DL4JException.class) public void testCnnInvalidData(){ int miniBatch = 3; int depth = 2; int width = 5; int height = 5; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .list() .layer(0, new ConvolutionLayer.Builder().kernelSize(2,2).stride(1,1).padding(0,0).nIn(2).nOut(2).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nOut(2).build()) .setInputType(InputType.convolutional(height,width,depth)) .pretrain(false).backprop(true).build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); INDArray inputWrongDepth = Nd4j.rand(new int[]{miniBatch,5,height,width}); //Order: examples, channels, height, width net.feedForward(inputWrongDepth); } @Test public void testApplyingPreTrainConfigAndParams(){ int nIn = 10; int nOut = 10; // Test pretrain true MultiLayerNetwork rbmPre = getRBMModel(true, nIn, nOut); assertTrue(rbmPre.conf().isPretrain()); // check on the network assertTrue(rbmPre.getLayer(0).conf().isPretrain()); // check pretrain layer assertFalse(rbmPre.getLayer(1).conf().isPretrain()); // check none pretrain layer int actualNP = rbmPre.numParams(); assertEquals(2 * (nIn * nOut + nOut) + nIn, actualNP); INDArray params = rbmPre.params(); assertEquals(params.length(), actualNP); // check num params Map<String, INDArray> paramTable = rbmPre.paramTable(); assertTrue(paramTable.containsKey("0_vb")); // check vb exists for pretrain layer rbmPre.setParam("0_vb", Nd4j.ones(10)); params = rbmPre.getParam("0_vb"); assertEquals(Nd4j.ones(10), params); // check set params for vb // Test pretrain false, expect same for true because its not changed when applying update MultiLayerNetwork rbmNoPre = getRBMModel(false, nIn, nOut); assertFalse(rbmNoPre.conf().isPretrain()); assertFalse(rbmNoPre.getLayer(0).conf().isPretrain()); assertFalse(rbmPre.getLayer(1).conf().isPretrain()); actualNP = rbmNoPre.numParams(); assertEquals(2 * (nIn * nOut + nOut) + nIn, actualNP); params = rbmNoPre.params(); assertEquals(params.length(), actualNP); paramTable = rbmPre.paramTable(); assertTrue(paramTable.containsKey("0_vb")); } @Test public void testLayerPreTrainSetFalseAfterPreTrain(){ INDArray input = Nd4j.linspace(1, 10, 10); int nIn = 10; int nOut = 10; MultiLayerNetwork rbmPre = getRBMModel(true, nIn, nOut); rbmPre.fit(input); assertTrue(rbmPre.conf().isPretrain()); // check on the network assertFalse(rbmPre.getLayer(0).conf().isPretrain()); // check pretrain layer assertFalse(rbmPre.getLayer(1).conf().isPretrain()); // check none pretrain layer } public MultiLayerNetwork getRBMModel(boolean preTrain, int nIn, int nOut){ MultiLayerConfiguration rbm = new NeuralNetConfiguration.Builder() .seed(42) .iterations(1) .updater(Updater.NONE) .epsilon(1) .weightInit(WeightInit.UNIFORM) .list( new org.deeplearning4j.nn.conf.layers.RBM.Builder() .lossFunction(LossFunctions.LossFunction.COSINE_PROXIMITY) .activation("identity") .nOut(nIn).build(), new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.COSINE_PROXIMITY) .activation("identity") .nOut(nOut).build() ) .pretrain(preTrain) .setInputType(InputType.feedForward(nOut)) .build(); MultiLayerNetwork network = new MultiLayerNetwork(rbm); network.init(); return network; } @Test public void testIterationCountAndPresistence() throws IOException { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .iterations(1) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(3).nOut(3).build()) .backprop(true).pretrain(false).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); DataSetIterator iter = new IrisDataSetIterator(50, 150); assertEquals(0, network.getLayerWiseConfigurations().getIterationCount()); network.fit(iter); assertEquals(3, network.getLayerWiseConfigurations().getIterationCount()); iter.reset(); network.fit(iter); assertEquals(6, network.getLayerWiseConfigurations().getIterationCount()); iter.reset(); network.fit(iter.next()); assertEquals(7, network.getLayerWiseConfigurations().getIterationCount()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(network, baos, true); byte[] asBytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(asBytes); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(7, net.getLayerWiseConfigurations().getIterationCount()); } }
deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java
/* * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.multilayer; import org.deeplearning4j.base.MnistFetcher; import org.deeplearning4j.berkeley.Pair; import org.deeplearning4j.datasets.fetchers.MnistDataFetcher; import org.deeplearning4j.datasets.iterator.impl.CifarDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator; import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator; import org.deeplearning4j.datasets.mnist.MnistManager; import org.deeplearning4j.eval.Evaluation; import org.deeplearning4j.exception.DL4JException; import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.Updater; import org.deeplearning4j.nn.conf.distribution.NormalDistribution; import org.deeplearning4j.nn.conf.distribution.UniformDistribution; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.*; import org.deeplearning4j.nn.gradient.DefaultGradient; import org.deeplearning4j.nn.gradient.Gradient; import org.deeplearning4j.nn.layers.BaseOutputLayer; import org.deeplearning4j.nn.params.DefaultParamInitializer; import org.deeplearning4j.nn.params.PretrainParamInitializer; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.optimize.api.IterationListener; import org.deeplearning4j.optimize.listeners.ScoreIterationListener; import org.deeplearning4j.util.ModelSerializer; import org.junit.Ignore; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.SplitTestAndTrain; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.factory.NDArrayFactory; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.heartbeat.Heartbeat; import org.nd4j.linalg.heartbeat.reports.Environment; import org.nd4j.linalg.heartbeat.reports.Event; import org.nd4j.linalg.heartbeat.reports.Task; import org.nd4j.linalg.heartbeat.utils.EnvironmentUtils; import org.nd4j.linalg.heartbeat.utils.TaskUtils; import org.nd4j.linalg.lossfunctions.LossFunctions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.*; import static org.junit.Assert.*; /** * Created by agibsonccc on 12/27/14. */ public class MultiLayerTest { private static final Logger log = LoggerFactory.getLogger(MultiLayerTest.class); @Test public void testSetParams() { Nd4j.MAX_ELEMENTS_PER_SLICE = Integer.MAX_VALUE; Nd4j.MAX_SLICES_TO_PRINT = Integer.MAX_VALUE; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .activation("tanh") .build()) .layer(1,new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN).nIn(3).nOut(2) .build()) .build(); MultiLayerNetwork network3 = new MultiLayerNetwork(conf); network3.init(); INDArray params = network3.params(); INDArray weights = network3.getLayer(0).getParam(DefaultParamInitializer.WEIGHT_KEY).dup(); INDArray bias = network3.getLayer(0).getParam(DefaultParamInitializer.BIAS_KEY).dup(); network3.setParameters(params); assertEquals(weights, network3.getLayer(0).getParam(DefaultParamInitializer.WEIGHT_KEY)); assertEquals(bias,network3.getLayer(0).getParam(DefaultParamInitializer.BIAS_KEY)); INDArray params4 = network3.params(); assertEquals(params, params4); } @Test public void testBatchNorm() { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .iterations(5) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(2, new BatchNormalization.Builder().nOut(2).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER) .activation("softmax") .nIn(2).nOut(3).build()) .backprop(true).pretrain(false) .build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.setListeners(new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain trainTest = next.splitTestAndTrain(110); network.setLabels(trainTest.getTrain().getLabels()); network.init(); network.fit(trainTest.getTrain()); } @Test public void testBackProp() { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .iterations(5) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(2, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER) .activation("softmax") .nIn(2).nOut(3).build()) .backprop(true).pretrain(false).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.setListeners(new ScoreIterationListener(1)); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain trainTest = next.splitTestAndTrain(110); network.setInput(trainTest.getTrain().getFeatureMatrix()); network.setLabels(trainTest.getTrain().getLabels()); network.init(); network.fit(trainTest.getTrain()); DataSet test = trainTest.getTest(); Evaluation eval = new Evaluation(); INDArray output = network.output(test.getFeatureMatrix()); eval.eval(test.getLabels(), output); log.info("Score " + eval.stats()); } @Test public void testDbn() throws Exception { Nd4j.MAX_SLICES_TO_PRINT = -1; Nd4j.MAX_ELEMENTS_PER_SLICE = -1; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .iterations(100) .momentum(0.9) .optimizationAlgo(OptimizationAlgorithm.LBFGS) .regularization(true) .l2(2e-4) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("tanh") .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("softmax").build()) .build(); MultiLayerNetwork d = new MultiLayerNetwork(conf); DataSetIterator iter = new IrisDataSetIterator(150, 150); DataSet next = iter.next(); Nd4j.writeTxt(next.getFeatureMatrix(), "iris.txt", "\t"); next.normalizeZeroMeanZeroUnitVariance(); SplitTestAndTrain testAndTrain = next.splitTestAndTrain(110); DataSet train = testAndTrain.getTrain(); d.fit(train); DataSet test = testAndTrain.getTest(); Evaluation eval = new Evaluation(); INDArray output = d.output(test.getFeatureMatrix()); eval.eval(test.getLabels(), output); log.info("Score " + eval.stats()); } @Test public void testGradientWithAsList(){ MultiLayerNetwork net1 = new MultiLayerNetwork(getConf()); MultiLayerNetwork net2 = new MultiLayerNetwork(getConf()); net1.init(); net2.init(); DataSet x1 = new IrisDataSetIterator(1,150).next(); DataSet all = new IrisDataSetIterator(150,150).next(); DataSet x2 = all.asList().get(0); //x1 and x2 contain identical data assertArrayEquals(asFloat(x1.getFeatureMatrix()), asFloat(x2.getFeatureMatrix()), 0.0f); assertArrayEquals(asFloat(x1.getLabels()), asFloat(x2.getLabels()), 0.0f); assertEquals(x1, x2); //Set inputs/outputs so gradient can be calculated: net1.feedForward(x1.getFeatureMatrix()); net2.feedForward(x2.getFeatureMatrix()); ((BaseOutputLayer)net1.getLayer(1)).setLabels(x1.getLabels()); ((BaseOutputLayer)net2.getLayer(1)).setLabels(x2.getLabels()); net1.gradient(); net2.gradient(); } /** * This test intended only to test activateSelectedLayers method, it does not involves fully-working AutoEncoder. */ @Test public void testSelectedActivations() { // Train DeepAutoEncoder on very limited trainset final int numRows = 28; final int numColumns = 28; int seed = 123; int numSamples = 3; int iterations = 1; int listenerFreq = iterations/5; log.info("Load data...."); float[][] trainingData = new float[numSamples][numColumns * numRows]; Arrays.fill(trainingData[0],0.95f); Arrays.fill(trainingData[1],0.5f); Arrays.fill(trainingData[2], 0.05f); log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(seed) .iterations(iterations) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .list() .layer(0, new RBM.Builder().nIn(numRows * numColumns).nOut(1000).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new RBM.Builder().nIn(1000).nOut(500).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(2, new RBM.Builder().nIn(500).nOut(250).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(3, new RBM.Builder().nIn(250).nOut(100).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(4, new RBM.Builder().nIn(100).nOut(30).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) //encoding stops .layer(5, new RBM.Builder().nIn(30).nOut(100).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) //decoding starts .layer(6, new RBM.Builder().nIn(100).nOut(250).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(7, new RBM.Builder().nIn(250).nOut(500).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(8, new RBM.Builder().nIn(500).nOut(1000).lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(9, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(1000).nOut(numRows*numColumns).build()) .pretrain(true).backprop(true) .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); log.info("Train model...."); int cnt = 0; while(cnt < numSamples) { INDArray input = Nd4j.create(trainingData[cnt]); model.fit(new DataSet(input, input)); cnt++; } // Make two separate selective calls log.info("Testing full cycle..."); List<INDArray> comparableResult = model.feedForward(Nd4j.create(trainingData[0])); INDArray encodeResult = model.activateSelectedLayers(0,4, Nd4j.create(trainingData[0])); log.info("Compare feedForward results with selectedActivation"); assertEquals(comparableResult.get(5), encodeResult); INDArray decodeResults = model.activateSelectedLayers(5,9, encodeResult); log.info("Decode results: " + decodeResults.columns() + " " + decodeResults); log.info("Comparable results: " + comparableResult.get(10).columns() + " " + comparableResult.get(10)); assertEquals(comparableResult.get(10), decodeResults); } private static MultiLayerConfiguration getConf(){ MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345L) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)) .build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation("softmax") .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1)) .build()) .build(); return conf; } public static float[] asFloat( INDArray arr ){ int len = arr.length(); float[] f = new float[len]; for( int i=0; i<len; i++ ) f[i] = arr.getFloat(i); return f; } @Test public void testFeedForwardToLayer(){ int nIn = 30; int nOut = 25; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) .iterations(5).learningRate(1e-3) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(nIn).nOut(600) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(600).nOut(250) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(2, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN) .nIn(250).nOut(100) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)) .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(100).nOut(25) .weightInit(WeightInit.DISTRIBUTION).dist(new NormalDistribution(0, 1e-5)).build()) .build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); INDArray input = Nd4j.rand(5,nIn); List<INDArray> activations = network.feedForward(input); assertEquals(5,activations.size()); //4 layers + input List<INDArray> activationsAll = network.feedForwardToLayer(3,input); assertEquals(activations,activationsAll); for( int i=3; i>=0; i-- ){ List<INDArray> activationsPartial = network.feedForwardToLayer(i,input); assertEquals(i+2,activationsPartial.size()); //i+2: for layer 3: input + activations of {0,1,2,3} -> 5 total = 3+2 for( int j=0; j<=i; j++ ){ INDArray exp = activationsAll.get(j); INDArray act = activationsPartial.get(j); assertEquals(exp,act); } } } @Test public void testBackpropGradient(){ //Testing: MultiLayerNetwork.backpropGradient() //i.e., specifically without an output layer int nIn = 10; int nOut = 40; int miniBatch = 5; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(0.1) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(2, new DenseLayer.Builder().nIn(30).nOut(nOut).activation("relu").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); INDArray eps = Nd4j.rand(miniBatch,nOut); INDArray input = Nd4j.rand(miniBatch, nIn); net.feedForward(input); //Need to feed forward before backprop Pair<Gradient,INDArray> pair = net.backpropGradient(eps); INDArray epsOut = pair.getSecond(); assertNotNull(epsOut); assertArrayEquals(new int[]{miniBatch,nIn},epsOut.shape()); Gradient g = pair.getFirst(); Map<String,INDArray> gradMap = g.gradientForVariable(); assertEquals(6, gradMap.size()); //3 layers, weight + bias gradients for each String[] expKeys = {"0_"+DefaultParamInitializer.WEIGHT_KEY,"0_"+DefaultParamInitializer.BIAS_KEY, "1_"+DefaultParamInitializer.WEIGHT_KEY,"2_"+DefaultParamInitializer.BIAS_KEY, "2_"+DefaultParamInitializer.WEIGHT_KEY,"2_"+DefaultParamInitializer.BIAS_KEY}; Set<String> keys = gradMap.keySet(); for( String s : expKeys ){ assertTrue(keys.contains(s)); } /* System.out.println(pair); //Use updater to go from raw gradients -> updates //Apply learning rate, gradient clipping, adagrad/momentum/rmsprop etc Updater updater = UpdaterCreator.getUpdater(net); updater.update(net, g, 0, miniBatch); StepFunction stepFunction = new NegativeGradientStepFunction(); INDArray params = net.params(); System.out.println(Arrays.toString(params.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 10)).dup().data().asFloat())); stepFunction.step(params, g.gradient()); net.setParams(params); //params() may not be in-place System.out.println(Arrays.toString(params.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 10)).dup().data().asFloat())); */ } @Test public void testLayerNames(){ int nIn = 10; int nOut = 40; List<String> layerNameList = new ArrayList<>(); layerNameList.add("dnn1"); layerNameList.add("dnn2"); layerNameList.add("dnn3"); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(0.1) .list() .layer(0, new DenseLayer.Builder().name("dnn1").nIn(nIn).nOut(20).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(1, new DenseLayer.Builder().name("dnn2").nIn(20).nOut(30).activation("relu").weightInit(WeightInit.XAVIER).build()) .layer(2, new DenseLayer.Builder().name("dnn3").nIn(30).nOut(nOut).activation("softmax").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); assertEquals(layerNameList.get(0), net.getLayer(0).conf().getLayer().getLayerName()); assertEquals(layerNameList, net.getLayerNames()); assertEquals("softmax", net.getLayer(layerNameList.get(2)).conf().getLayer().getActivationFunction()); } @Test public void testTranspose(){ MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .iterations(100) .momentum(0.9) .regularization(true) .l2(2e-4) .list() .layer(0, new RBM.Builder(RBM.HiddenUnit.GAUSSIAN, RBM.VisibleUnit.GAUSSIAN) .nIn(4).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("tanh") .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .nIn(3).nOut(3) .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0, 1)) .activation("softmax").build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Layer layer = net.getLayer(0); int nParamsBackprop = layer.numParams(true); int nParamsBoth = layer.numParams(false); Layer transposed = layer.transpose(); assertArrayEquals(new int[]{4,3},layer.getParam(DefaultParamInitializer.WEIGHT_KEY).shape()); assertArrayEquals(new int[]{1,3},layer.getParam(DefaultParamInitializer.BIAS_KEY).shape()); assertArrayEquals(new int[]{1,4},layer.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).shape()); assertArrayEquals(new int[]{3, 4}, transposed.getParam(DefaultParamInitializer.WEIGHT_KEY).shape()); assertArrayEquals(new int[]{1, 4}, transposed.getParam(DefaultParamInitializer.BIAS_KEY).shape()); assertArrayEquals(new int[]{1, 3}, transposed.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY).shape()); INDArray origWeights = layer.getParam(DefaultParamInitializer.WEIGHT_KEY); INDArray transposedWeights = transposed.getParam(DefaultParamInitializer.WEIGHT_KEY); assertEquals(origWeights.transpose(), transposedWeights); assertEquals(layer.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY), transposed.getParam(DefaultParamInitializer.BIAS_KEY)); assertEquals(layer.getParam(DefaultParamInitializer.BIAS_KEY),transposed.getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY)); assertEquals(3,((FeedForwardLayer)transposed.conf().getLayer()).getNIn()); assertEquals(4,((FeedForwardLayer)transposed.conf().getLayer()).getNOut()); } @Test public void testScoreExamples(){ Nd4j.getRandom().setSeed(12345); int nIn = 5; int nOut = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345) .regularization(true).l1(0.01).l2(0.01) .learningRate(0.1).activation("tanh").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()) .layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerConfiguration confNoReg = new NeuralNetConfiguration.Builder() .seed(12345) .regularization(false) .learningRate(0.1).activation("tanh").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()) .layer(2, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); MultiLayerNetwork netNoReg = new MultiLayerNetwork(confNoReg); netNoReg.init(); netNoReg.setParameters(net.params().dup()); //Score single example, and compare to scoreExamples: INDArray input = Nd4j.rand(3,nIn); INDArray output = Nd4j.rand(3,nOut); DataSet ds = new DataSet(input,output); INDArray scoresWithRegularization = net.scoreExamples(ds,true); INDArray scoresNoRegularization = net.scoreExamples(ds,false); assertArrayEquals(new int[]{3,1},scoresWithRegularization.shape()); assertArrayEquals(new int[]{3,1},scoresNoRegularization.shape()); for( int i=0; i<3; i++ ){ DataSet singleEx = new DataSet(input.getRow(i),output.getRow(i)); double score = net.score(singleEx); double scoreNoReg = netNoReg.score(singleEx); double scoreUsingScoreExamples = scoresWithRegularization.getDouble(i); double scoreUsingScoreExamplesNoReg = scoresNoRegularization.getDouble(i); assertEquals(score,scoreUsingScoreExamples,1e-4); assertEquals(scoreNoReg,scoreUsingScoreExamplesNoReg,1e-4); assertTrue(scoreUsingScoreExamples > scoreUsingScoreExamplesNoReg); //Regularization term increases score // System.out.println(score + "\t" + scoreUsingScoreExamples + "\t|\t" + scoreNoReg + "\t" + scoreUsingScoreExamplesNoReg); } } @Test public void testDataSetScore(){ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation("sigmoid").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(3).nOut(3).build()) .pretrain(false).backprop(true) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); INDArray in = Nd4j.create(new double[]{1.0,2.0,3.0,4.0}); INDArray out = Nd4j.create(new double[]{1,0,0}); double score = net.score(new DataSet(in,out)); } @Test public void testDataSetScoreCNN(){ int miniBatch = 3; int depth = 2; int width = 3; int height = 3; int nOut = 2; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .seed(12345L) .list() .layer(0, new ConvolutionLayer.Builder(2,2).nOut(1).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nOut(2).build()) .setInputType(InputType.convolutionalFlat(height,width,depth)) .pretrain(false).backprop(true).build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); Random r = new Random(12345); INDArray input = Nd4j.rand(miniBatch,depth*width*height); INDArray labels = Nd4j.create(miniBatch,nOut); for( int i=0; i<miniBatch; i++ ){ labels.putScalar(new int[]{i,r.nextInt(nOut)},1.0); } double score = net.score(new DataSet(input,labels)); } @Test public void testPredict() throws Exception{ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(50).nOut(10).build()) .pretrain(false).backprop(true) .setInputType(InputType.convolutional(28,28,1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator ds = new MnistDataSetIterator(10,10); net.fit(ds); DataSetIterator testDs = new MnistDataSetIterator(1,1); DataSet testData = testDs.next(); testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); String actualLables = testData.getLabelName(0); List<String> prediction = net.predict(testData); assertTrue(actualLables != null); assertTrue(prediction.get(0) != null); } @Test @Ignore public void testCid() throws Exception { System.out.println(EnvironmentUtils.buildCId()); Environment environment = EnvironmentUtils.buildEnvironment(); environment.setSerialVersionID(EnvironmentUtils.buildCId()); Task task = TaskUtils.buildTask(Nd4j.create(new double[]{1,2,3,4,5,6})); Heartbeat.getInstance().reportEvent(Event.STANDALONE, environment, task); Thread.sleep(25000); } @Test public void testOutput() throws Exception{ Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .regularization(false) .learningRate(1.0) .weightInit(WeightInit.XAVIER) .seed(12345L) .list() .layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(50).nOut(10).build()) .pretrain(false).backprop(true) .setInputType(InputType.convolutional(28,28,1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator fullData = new MnistDataSetIterator(1,2); net.fit(fullData); fullData.reset(); DataSet expectedSet = fullData.next(2); INDArray expectedOut = net.output(expectedSet.getFeatureMatrix(), false); fullData.reset(); INDArray actualOut = net.output(fullData); assertEquals(expectedOut, actualOut); } @Test public void testGradientUpdate() throws Exception{ DataSetIterator iter = new IrisDataSetIterator(1,1); Gradient expectedGradient = new DefaultGradient(); expectedGradient.setGradientFor("0_W", Nd4j.ones(4,5)); expectedGradient.setGradientFor("0_b", Nd4j.ones(1,5)); expectedGradient.setGradientFor("1_W", Nd4j.ones(5,3)); expectedGradient.setGradientFor("1_b", Nd4j.ones(1,3)); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(org.deeplearning4j.nn.conf.Updater.SGD) .learningRate(1) .activation("relu").weightInit(WeightInit.XAVIER) .list() .layer(0, new DenseLayer.Builder().name("dnn1").nIn(4).nOut(5).build()) .layer(1, new OutputLayer.Builder().name("output").nIn(5).nOut(3).activation("softmax").weightInit(WeightInit.XAVIER).build()) .backprop(true).pretrain(false) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.fit(iter.next()); // TODO validate actual layer gradientView - issue getting var out of BaseLayer w/o adding MLN getter that gets confused with local gradient vars Gradient actualGradient = net.gradient; assertNotEquals(expectedGradient.getGradientFor("0_W"), actualGradient.getGradientFor("0_W")); net.update(expectedGradient); actualGradient = net.gradient; assertEquals(expectedGradient.getGradientFor("0_W"), actualGradient.getGradientFor("0_W")); // Update params with set net.setParam("0_W", Nd4j.ones(4,5)); net.setParam("0_b", Nd4j.ones(1,5)); net.setParam("1_W", Nd4j.ones(5,3)); net.setParam("1_b", Nd4j.ones(1,3)); INDArray actualParams = net.params(); // Confirm params assertEquals(expectedGradient.gradient(), actualParams); net.update(expectedGradient); actualParams = net.params(); assertEquals(Nd4j.ones(1,43).addi(1), actualParams); } @Test(expected = DL4JException.class) public void testCnnInvalidData(){ int miniBatch = 3; int depth = 2; int width = 5; int height = 5; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .list() .layer(0, new ConvolutionLayer.Builder().kernelSize(2,2).stride(1,1).padding(0,0).nIn(2).nOut(2).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nOut(2).build()) .setInputType(InputType.convolutional(height,width,depth)) .pretrain(false).backprop(true).build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); INDArray inputWrongDepth = Nd4j.rand(new int[]{miniBatch,5,height,width}); //Order: examples, channels, height, width net.feedForward(inputWrongDepth); } @Test public void testApplyingPreTrainConfigAndParams(){ int nIn = 10; int nOut = 10; // Test pretrain true MultiLayerNetwork rbmPre = getRBMModel(true, nIn, nOut); assertTrue(rbmPre.conf().isPretrain()); // check on the network assertTrue(rbmPre.getLayer(0).conf().isPretrain()); // check pretrain layer assertFalse(rbmPre.getLayer(1).conf().isPretrain()); // check none pretrain layer int actualNP = rbmPre.numParams(); assertEquals(2 * (nIn * nOut + nOut) + nIn, actualNP); INDArray params = rbmPre.params(); assertEquals(params.length(), actualNP); // check num params Map<String, INDArray> paramTable = rbmPre.paramTable(); assertTrue(paramTable.containsKey("0_vb")); // check vb exists for pretrain layer rbmPre.setParam("0_vb", Nd4j.ones(10)); params = rbmPre.getParam("0_vb"); assertEquals(Nd4j.ones(10), params); // check set params for vb // Test pretrain false, expect same for true because its not changed when applying update MultiLayerNetwork rbmNoPre = getRBMModel(false, nIn, nOut); assertFalse(rbmNoPre.conf().isPretrain()); assertFalse(rbmNoPre.getLayer(0).conf().isPretrain()); assertFalse(rbmPre.getLayer(1).conf().isPretrain()); actualNP = rbmNoPre.numParams(); assertEquals(2 * (nIn * nOut + nOut) + nIn, actualNP); params = rbmNoPre.params(); assertEquals(params.length(), actualNP); paramTable = rbmPre.paramTable(); assertTrue(paramTable.containsKey("0_vb")); } @Test public void testLayerPreTrainSetFalseAfterPreTrain(){ INDArray input = Nd4j.linspace(1, 10, 10); int nIn = 10; int nOut = 10; MultiLayerNetwork rbmPre = getRBMModel(true, nIn, nOut); rbmPre.fit(input); assertTrue(rbmPre.conf().isPretrain()); // check on the network assertFalse(rbmPre.getLayer(0).conf().isPretrain()); // check pretrain layer assertFalse(rbmPre.getLayer(1).conf().isPretrain()); // check none pretrain layer } public MultiLayerNetwork getRBMModel(boolean preTrain, int nIn, int nOut){ MultiLayerConfiguration rbm = new NeuralNetConfiguration.Builder() .seed(42) .iterations(1) .updater(Updater.NONE) .epsilon(1) .weightInit(WeightInit.UNIFORM) .list( new org.deeplearning4j.nn.conf.layers.RBM.Builder() .lossFunction(LossFunctions.LossFunction.COSINE_PROXIMITY) .activation("identity") .nOut(nIn).build(), new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.COSINE_PROXIMITY) .activation("identity") .nOut(nOut).build() ) .pretrain(preTrain) .setInputType(InputType.feedForward(nOut)) .build(); MultiLayerNetwork network = new MultiLayerNetwork(rbm); network.init(); return network; } @Test public void testIterationCountAndPresistence() throws IOException { Nd4j.getRandom().setSeed(123); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .iterations(1) .seed(123) .list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).weightInit(WeightInit.XAVIER).activation("tanh").build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax").nIn(3).nOut(3).build()) .backprop(true).pretrain(false).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); DataSetIterator iter = new IrisDataSetIterator(50, 150); assertEquals(0, network.getLayerWiseConfigurations().getIterationCount()); network.fit(iter); assertEquals(3, network.getLayerWiseConfigurations().getIterationCount()); iter.reset(); network.fit(iter); assertEquals(6, network.getLayerWiseConfigurations().getIterationCount()); iter.reset(); network.fit(iter.next()); assertEquals(7, network.getLayerWiseConfigurations().getIterationCount()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(network, baos, true); byte[] asBytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(asBytes); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(7, net.getDefaultConfiguration().getIterationCount()); } }
Fix test for iteration count persistence
deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java
Fix test for iteration count persistence
<ide><path>eeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java <ide> <ide> ByteArrayInputStream bais = new ByteArrayInputStream(asBytes); <ide> MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(bais, true); <del> assertEquals(7, net.getDefaultConfiguration().getIterationCount()); <add> assertEquals(7, net.getLayerWiseConfigurations().getIterationCount()); <ide> } <ide> }
Java
bsd-2-clause
bf8f35c7c1585d06405972448839cde06abc09b7
0
archiecobbs/com4j,archiecobbs/com4j,kohsuke/com4j,kohsuke/com4j,kohsuke/com4j,archiecobbs/com4j,kohsuke/com4j,archiecobbs/com4j
package com4j; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import java.util.NoSuchElementException; /** * Collection of living objects of a {@link ComThread}. This collection does not hold any strong references to the objects. This way the garbage collector can * clean up the objects. This class uses a special combination of a {@link HashMap} and {@link LinkedList}s to provide a (almost) constant runtime performance. * @author Michael Schnell (ScM) */ /*package*/class LiveObjectCollection { /** * The objects of this collection are weakly referenced and stored in linked lists. One list for every pointer value. This guarantees to have constant times * for {@link #add(Com4jObject)} and {@link #remove(Com4jObject)}, assuming every COM object has a different pointer value. Since this is not necessarily the * case for all COM references ({@link Com4jObject#queryInterface(Class)} often returns the same pointer value) the runtime of {@link #remove(Com4jObject)} is * slightly slower, since we need to search the object in a (short) linked list. */ private HashMap<Integer, LinkedList<WeakReference<Com4jObject>>> objects = new HashMap<Integer, LinkedList<WeakReference<Com4jObject>>>(20); /** The count of objects in this collection */ private int count = 0; /** * Adds the given object to the collection * @param object the object to add */ public synchronized void add(Com4jObject object) { LinkedList<WeakReference<Com4jObject>> list = objects.get(object.getPtr()); if (list == null) { list = new LinkedList<WeakReference<Com4jObject>>(); objects.put(object.getPtr(), list); } list.add(new WeakReference<Com4jObject>(object)); count++; } /** * Removes the given object from the collection * @param object the object to remove */ public synchronized void remove(Com4jObject object) { int key = object.getPtr(); List<WeakReference<Com4jObject>> list = objects.get(key); if (list == null) { throw new NoSuchElementException("The Com4jObject " + object + " is not in this collection!"); } Iterator<WeakReference<Com4jObject>> it = list.iterator(); while (it.hasNext()) { Com4jObject colObject = it.next().get(); if (colObject == null || colObject == object) { // if colObject == null, then colObject was already finalized! This is the object we want to remove! // There should be only one finalized object for every call of remove, because every finalization of a Wrapper calls dispose() -> calls // dispose0() -> calls thread.removeLiveObject() -> calls this method. it.remove(); count--; break; } } if (list.isEmpty()) objects.remove(key); // the list is now empty } /** * Returns the count of objects in this collection. * @return the count of objects in this collection */ public synchronized int getCount() { return count; } /** * Returns a snapshot of the collection as a list. * @return a snapshot of the collection as a list */ public synchronized List<WeakReference<Com4jObject>> getSnapshot() { ArrayList<WeakReference<Com4jObject>> snapshot = new ArrayList<WeakReference<Com4jObject>>(count); Iterator<Entry<Integer, LinkedList<WeakReference<Com4jObject>>>> i = objects.entrySet().iterator(); while (i.hasNext()) { Entry<Integer, LinkedList<WeakReference<Com4jObject>>> e = i.next(); // clean up the list since we are walking the list anyway for (Iterator<WeakReference<Com4jObject>> j = e.getValue().iterator(); j.hasNext();) { if (j.next().get() == null) { j.remove(); count--; } } if (e.getValue().isEmpty()) { i.remove(); continue; } snapshot.addAll(e.getValue()); } return snapshot; } /** * Returns whether this collection is empty. */ public boolean isEmpty() { return getCount()==0; } }
runtime/src/com4j/LiveObjectCollection.java
package com4j; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import java.util.NoSuchElementException; /** * Collection of living objects of a {@link ComThread}. This collection does not hold any strong references to the objects. This way the garbage collector can * clean up the objects. This class uses a special combination of a {@link HashMap} and {@link LinkedList}s to provide a (almost) constant runtime performance. * @author Michael Schnell (ScM) */ /*package*/class LiveObjectCollection { /** * The objects of this collection are weakly referenced and stored in linked lists. One list for every pointer value. This guarantees to have constant times * for {@link #add(Com4jObject)} and {@link #remove(Com4jObject)}, assuming every COM object has a different pointer value. Since this is not necessarily the * case for all COM references ({@link Com4jObject#queryInterface(Class)} often returns the same pointer value) the runtime of {@link #remove(Com4jObject)} is * slightly slower, since we need to search the object in a (short) linked list. */ private HashMap<Integer, LinkedList<WeakReference<Com4jObject>>> objects = new HashMap<Integer, LinkedList<WeakReference<Com4jObject>>>(20); /** The count of objects in this collection */ private int count = 0; /** * Adds the given object to the collection * @param object the object to add */ public synchronized void add(Com4jObject object) { LinkedList<WeakReference<Com4jObject>> list = objects.get(object.getPtr()); if (list == null) { list = new LinkedList<WeakReference<Com4jObject>>(); objects.put(object.getPtr(), list); } list.add(new WeakReference<Com4jObject>(object)); count++; } /** * Removes the given object from the collection * @param object the object to remove */ public synchronized void remove(Com4jObject object) { int key = object.getPtr(); List<WeakReference<Com4jObject>> list = objects.get(key); if (list == null) { throw new NoSuchElementException("The Com4jObject " + object + " is not in this collection!"); } Iterator<WeakReference<Com4jObject>> it = list.iterator(); while (it.hasNext()) { Com4jObject colObject = it.next().get(); if (colObject == null || colObject == object) { // if colObject == null, then colObject was already finalized! This is the object we want to remove! // There should be only one finalized object for every call of remove, because every finalization of a Wrapper calls dispose() -> calls // dispose0() -> calls thread.removeLiveObject() -> calls this method. it.remove(); count--; break; } } if (list.isEmpty()) objects.remove(key); // the list is now empty } /** * Returns the count of objects in this collection. * @return the count of objects in this collection */ public synchronized int getCount() { return count; } /** * Returns a snapshot of the collection as a list. * @return a snapshot of the collection as a list */ public synchronized List<WeakReference<Com4jObject>> getSnapshot() { ArrayList<WeakReference<Com4jObject>> snapshot = new ArrayList<WeakReference<Com4jObject>>(count); Iterator<Entry<Integer, LinkedList<WeakReference<Com4jObject>>>> i = objects.entrySet().iterator(); while (i.hasNext()) { Entry<Integer, LinkedList<WeakReference<Com4jObject>>> e = i.next(); // clean up the list since we are walking the list anyway for (Iterator<WeakReference<Com4jObject>> j = e.getValue().iterator(); j.hasNext();) { if (j.next().get() == null) { j.remove(); count--; } } if (e.getValue().isEmpty()) { i.remove(); continue; } snapshot.addAll(e.getValue()); } return snapshot; } /** * Returns whether this collection is empty. * @return whether this collection is empty */ public boolean isEmpty() { return count == 0; } }
access to the count needs to be synchronized git-svn-id: 4ae09e382c2a76b6ebc3f2fe23f30bdcc02c4078@363 590aa308-3dd6-4388-9d27-cebafe3e2bdc
runtime/src/com4j/LiveObjectCollection.java
access to the count needs to be synchronized
<ide><path>untime/src/com4j/LiveObjectCollection.java <ide> } <ide> <ide> /** <del> * Returns whether this collection is empty. <del> * @return whether this collection is empty <del> */ <del> public boolean isEmpty() { <del> return count == 0; <del> } <add> * Returns whether this collection is empty. <add> */ <add> public boolean isEmpty() { <add> return getCount()==0; <add> } <ide> }
JavaScript
apache-2.0
618ff6201e478aeed9d222f1bfad98d34176bd45
0
monetate/closure-compiler,Pimm/closure-compiler,anomaly/closure-compiler,MatrixFrog/closure-compiler,GerHobbelt/closure-compiler,nawawi/closure-compiler,google/closure-compiler,shantanusharma/closure-compiler,MatrixFrog/closure-compiler,anomaly/closure-compiler,mprobst/closure-compiler,GerHobbelt/closure-compiler,monetate/closure-compiler,monetate/closure-compiler,google/closure-compiler,Dominator008/closure-compiler,vobruba-martin/closure-compiler,tdelmas/closure-compiler,tiobe/closure-compiler,monetate/closure-compiler,MatrixFrog/closure-compiler,mprobst/closure-compiler,Yannic/closure-compiler,Pimm/closure-compiler,GerHobbelt/closure-compiler,ChadKillingsworth/closure-compiler,GerHobbelt/closure-compiler,tiobe/closure-compiler,shantanusharma/closure-compiler,tdelmas/closure-compiler,google/closure-compiler,Yannic/closure-compiler,google/closure-compiler,ChadKillingsworth/closure-compiler,Yannic/closure-compiler,brad4d/closure-compiler,tiobe/closure-compiler,Dominator008/closure-compiler,anomaly/closure-compiler,nawawi/closure-compiler,MatrixFrog/closure-compiler,shantanusharma/closure-compiler,Yannic/closure-compiler,brad4d/closure-compiler,mprobst/closure-compiler,mprobst/closure-compiler,brad4d/closure-compiler,anomaly/closure-compiler,tiobe/closure-compiler,nawawi/closure-compiler,ChadKillingsworth/closure-compiler,ChadKillingsworth/closure-compiler,tdelmas/closure-compiler,Dominator008/closure-compiler,tdelmas/closure-compiler,nawawi/closure-compiler,vobruba-martin/closure-compiler,Pimm/closure-compiler,vobruba-martin/closure-compiler,shantanusharma/closure-compiler,vobruba-martin/closure-compiler
/* * Copyright 2016 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @fileoverview Externs definitions for the Sinon library, 1.17 branch. * * Note that this file is incomplete. * * This file defines some virtual types, please don't use these directly, but * follow the official API guidelines. * * @externs * @see http://sinonjs.org/docs/ */ var sinon; sinon.sandbox; /** * @param {!Object=} opt_config * @return {!SinonSandbox} */ sinon.sandbox.create = function(opt_config) {}; /** * @constructor */ var SinonSandbox = function() {}; SinonSandbox.prototype.restore = function() {}; /** * @type {!SinonFakeServer|undefined} */ SinonSandbox.prototype.server; /** * @return {!SinonStub} */ SinonSandbox.prototype.stub = function() {}; /** * @constructor */ var SinonStub = function() {}; /** * @param {*} obj * @return {!SinonStub} */ SinonStub.prototype.returns = function(obj) {}; sinon.fakeServer; /** * @param {!Object=} opt_config * @return {!SinonFakeServer} */ sinon.fakeServer.create = function(opt_config) {}; /** * @constructor */ var SinonFakeServer = function() {}; /** * @type {!Array<!SinonFakeXmlHttpRequest>} */ SinonFakeServer.prototype.requests; /** * @type {boolean|undefined} */ SinonFakeServer.prototype.respondImmediately; SinonFakeServer.prototype.restore = function() {}; /** * Note: incomplete definition because it is tricky. * @param {...*} var_args */ SinonFakeServer.prototype.respondWith = function(var_args) {}; /** * @constructor * @extends {XMLHttpRequest} */ var SinonFakeXmlHttpRequest = function() {}; /** * @type {?string} */ SinonFakeXmlHttpRequest.prototype.requestBody; /** * @param {?number} status * @param {?Object<string, string>} headers * @param {?string} body */ SinonFakeXmlHttpRequest.prototype.respond = function(status, headers, body) {};
contrib/externs/sinon-1.17.js
/* * Copyright 2016 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @fileoverview Externs definitions for the Sinon library, 1.17 branch. * * Note that this file is incomplete. * * This file defines some virtual types, please don't use these directly, but * follow the official API guidelines. * * @externs * @see http://sinonjs.org/docs/ */ var sinon; sinon.sandbox; /** * @param {!Object=} opt_config * @return {!SinonSandbox} */ sinon.sandbox.create = function(opt_config) {}; /** * @constructor */ var SinonSandbox = function() {}; SinonSandbox.prototype.restore = function() {}; /** * @type {!SinonFakeServer|undefined} */ SinonSandbox.prototype.server; /** * @return {!SinonStub} */ SinonSandbox.prototype.stub = function() {}; /** * @constructor */ var SinonStub = function() {}; sinon.fakeServer; /** * @param {!Object=} opt_config * @return {!SinonFakeServer} */ sinon.fakeServer.create = function(opt_config) {}; /** * @constructor */ var SinonFakeServer = function() {}; /** * @type {!Array<!SinonFakeXmlHttpRequest>} */ SinonFakeServer.prototype.requests; /** * @type {boolean|undefined} */ SinonFakeServer.prototype.respondImmediately; SinonFakeServer.prototype.restore = function() {}; /** * Note: incomplete definition because it is tricky. * @param {...*} var_args */ SinonFakeServer.prototype.respondWith = function(var_args) {}; /** * @constructor * @extends {XMLHttpRequest} */ var SinonFakeXmlHttpRequest = function() {}; /** * @type {?string} */ SinonFakeXmlHttpRequest.prototype.requestBody; /** * @param {?number} status * @param {?Object<string, string>} headers * @param {?string} body */ SinonFakeXmlHttpRequest.prototype.respond = function(status, headers, body) {};
Add returns method to the SinonStub type ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=141454919
contrib/externs/sinon-1.17.js
Add returns method to the SinonStub type
<ide><path>ontrib/externs/sinon-1.17.js <ide> */ <ide> var SinonStub = function() {}; <ide> <add>/** <add> * @param {*} obj <add> * @return {!SinonStub} <add> */ <add>SinonStub.prototype.returns = function(obj) {}; <ide> <ide> <ide> sinon.fakeServer;
Java
mit
4a24a616c5439bd745e49b766363ed05d529653f
0
sqlancer/sqlancer,sqlancer/sqlancer
package lama.sqlite3; import java.io.FileWriter; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import lama.DatabaseFacade; import lama.DatabaseProvider; import lama.IgnoreMeException; import lama.Main.QueryManager; import lama.Main.StateLogger; import lama.MainOptions; import lama.Query; import lama.QueryAdapter; import lama.Randomly; import lama.StateToReproduce; import lama.StateToReproduce.SQLite3StateToReproduce; import lama.sqlite3.dml.SQLite3DeleteGenerator; import lama.sqlite3.dml.SQLite3InsertGenerator; import lama.sqlite3.dml.SQLite3UpdateGenerator; import lama.sqlite3.gen.SQLite3AnalyzeGenerator; import lama.sqlite3.gen.SQLite3Common; import lama.sqlite3.gen.SQLite3CreateVirtualRtreeTabelGenerator; import lama.sqlite3.gen.SQLite3ExplainGenerator; import lama.sqlite3.gen.SQLite3PragmaGenerator; import lama.sqlite3.gen.SQLite3ReindexGenerator; import lama.sqlite3.gen.SQLite3TransactionGenerator; import lama.sqlite3.gen.SQLite3VacuumGenerator; import lama.sqlite3.gen.SQLite3VirtualFTSTableCommandGenerator; import lama.sqlite3.gen.ddl.SQLite3AlterTable; import lama.sqlite3.gen.ddl.SQLite3CreateTriggerGenerator; import lama.sqlite3.gen.ddl.SQLite3CreateVirtualFTSTableGenerator; import lama.sqlite3.gen.ddl.SQLite3DropIndexGenerator; import lama.sqlite3.gen.ddl.SQLite3DropTableGenerator; import lama.sqlite3.gen.ddl.SQLite3IndexGenerator; import lama.sqlite3.gen.ddl.SQLite3TableGenerator; import lama.sqlite3.gen.ddl.SQLite3ViewGenerator; import lama.sqlite3.queries.SQLite3MetamorphicQuerySynthesizer; import lama.sqlite3.schema.SQLite3Schema; import lama.sqlite3.schema.SQLite3Schema.Column; import lama.sqlite3.schema.SQLite3Schema.Table; import lama.sqlite3.schema.SQLite3Schema.Table.TableKind; public class SQLite3Provider implements DatabaseProvider { public static enum Action { PRAGMA { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3PragmaGenerator.insertPragma(g.getConnection(), g.getState(), g.getRandomly()); } }, INDEX { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3IndexGenerator.insertIndex(g.getSchema(), g.getState(), g.getRandomly()); } }, INSERT { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { Table randomTable = g.getSchema().getRandomTableOrBailout(t -> !t.isView()); return SQLite3InsertGenerator.insertRow(randomTable, g.getConnection(), g.getRandomly()); } }, VACUUM { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3VacuumGenerator.executeVacuum(); } }, REINDEX { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3ReindexGenerator.executeReindex(g.getConnection(), g.getState(), g.getSchema()); } }, ANALYZE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3AnalyzeGenerator.generateAnalyze(g.getSchema()); } }, DELETE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3DeleteGenerator.deleteContent(g.getSchema().getRandomTableNoViewOrBailout(), g.getConnection(), g.getRandomly()); } }, TRANSACTION_START { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateBeginTransaction(g.getConnection(), g.getState()); } }, ALTER { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3AlterTable.alterTable(g.getSchema(), g.getConnection(), g.getState(), g.getRandomly()); } }, DROP_INDEX { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3DropIndexGenerator.dropIndex(g.getConnection(), g.getState(), g.getSchema(), g.getRandomly()); } }, UPDATE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3UpdateGenerator.updateRow(g.getSchema().getRandomTableNoViewOrBailout(), g.getRandomly()); } }, ROLLBACK_TRANSACTION() { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateRollbackTransaction(g.getConnection(), g.getState()); } }, COMMIT { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateCommit(g.getConnection(), g.getState()); } }, DROP_TABLE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3DropTableGenerator.dropTable(g.getSchema()); } }, DROP_VIEW { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ViewGenerator.dropView(SQLite3Schema.fromConnection(g.getConnection())); } }, EXPLAIN { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ExplainGenerator.explain(g); } }, CHECK_RTREE_TABLE { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { Table table = g.getSchema().getRandomTableOrBailout(t -> t.getName().startsWith("r")); return new QueryAdapter(String.format("SELECT rtreecheck('%s');", table.getName())); } }, VIRTUAL_TABLE_ACTION { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return new SQLite3VirtualFTSTableCommandGenerator(g.getSchema(), g.getRandomly()).generate(); } }, CREATE_VIEW { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ViewGenerator.generate(g.getSchema(), g.getConnection(), g.getRandomly(), g.getState(), g); } }, CREATE_TRIGGER { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3CreateTriggerGenerator.create(g.getSchema(), g.getRandomly(), g.getConnection()); } }, MANIPULATE_STAT_TABLE { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { List<Column> columns = new ArrayList<>(); Table t = new Table("sqlite_stat1", columns, TableKind.MAIN, false, 1, false, false); if (Randomly.getBoolean()) { return SQLite3DeleteGenerator.deleteContent(t, g.getConnection(), g.getRandomly()); } else { StringBuilder sb = new StringBuilder(); sb.append("INSERT OR IGNORE INTO sqlite_stat1"); String indexName; try (Statement stat = g.getConnection().createStatement()) { try (ResultSet rs = stat.executeQuery( "SELECT name FROM sqlite_master WHERE type='index' ORDER BY RANDOM() LIMIT 1;")) { if (rs.isClosed()) { throw new IgnoreMeException(); } indexName = rs.getString("name"); } ; } sb.append(" VALUES"); sb.append("('"); sb.append(g.getSchema().getRandomTable().getName()); sb.append("', "); sb.append("'"); if (Randomly.getBoolean()) { sb.append(indexName); } else { sb.append(g.getSchema().getRandomTable().getName()); } sb.append("'"); sb.append(", '"); for (int i = 0; i < Randomly.smallNumber(); i++) { if (i != 0) { sb.append(" "); } if (Randomly.getBoolean()) { sb.append(g.getRandomly().getInteger()); } else { sb.append(Randomly.smallNumber()); } } if (Randomly.getBoolean()) { sb.append(" sz="); sb.append(g.getRandomly().getInteger()); } if (Randomly.getBoolean()) { sb.append(" unordered"); } if (Randomly.getBoolean()) { sb.append(" noskipscan"); } sb.append("')"); return new QueryAdapter(sb.toString(), Arrays.asList("no such table")); } } }; public abstract Query getQuery(SQLite3GlobalState state) throws SQLException; } public static final int NR_INSERT_ROW_TRIES = 30; private static final int NR_QUERIES_PER_TABLE = 100000; public static final int EXPRESSION_MAX_DEPTH = 3; public static final boolean ALLOW_FLOATING_POINT_FP = true; public static final boolean MUST_KNOW_RESULT = false; private SQLite3StateToReproduce state; private String databaseName; public static class SQLite3GlobalState { private Connection con; private SQLite3Schema schema; private SQLite3StateToReproduce state; private Randomly r; public Connection getConnection() { return con; } public SQLite3Schema getSchema() { return schema; } public void setConnection(Connection con) { this.con = con; } public void setSchema(SQLite3Schema schema) { this.schema = schema; } public void setState(SQLite3StateToReproduce state) { this.state = state; } public SQLite3StateToReproduce getState() { return state; } public Randomly getRandomly() { return r; } public void setRandomly(Randomly r) { this.r = r; } } public static class SQLite3SpecialStringGenerator { private enum Options { TIME_DATE_REGEX, NOW, DATE_TIME, TIME_MODIFIER } public static String generate() { StringBuilder sb = new StringBuilder(); switch (Randomly.fromOptions(Options.values())) { case TIME_DATE_REGEX: // https://www.sqlite.org/lang_datefunc.html return Randomly.fromOptions("%d", "%f", "%H", "%j", "%J", "%m", "%M", "%s", "%S", "%w", "%W", "%Y", "%%"); case NOW: return "now"; case DATE_TIME: long notCachedInteger = Randomly.getNotCachedInteger(1, 10); for (int i = 0; i < notCachedInteger; i++) { if (Randomly.getBoolean()) { sb.append(Randomly.getNonCachedInteger()); } else { sb.append(Randomly.getNotCachedInteger(0, 2000)); } sb.append(Randomly.fromOptions(":", "-", " ", "T")); } return sb.toString(); case TIME_MODIFIER: sb.append(Randomly.fromOptions("days", "hours", "minutes", "seconds", "months", "years", "start of month", "start of year", "start of day", "weekday", "unixepoch", "utc")); return sb.toString(); default: throw new AssertionError(); } } } private final SQLite3GlobalState globalState = new SQLite3GlobalState(); private enum TableType { NORMAL, FTS, RTREE } @Override public void generateAndTestDatabase(String databaseName, Connection con, StateLogger logger, StateToReproduce state, QueryManager manager, MainOptions options) throws SQLException { this.databaseName = databaseName; Randomly r = new Randomly(SQLite3SpecialStringGenerator::generate); globalState.setRandomly(r); SQLite3Schema newSchema = null; this.state = (SQLite3StateToReproduce) state; globalState.setConnection(con); globalState.setState((SQLite3StateToReproduce) state); addSensiblePragmaDefaults(con); int nrTablesToCreate = 1; if (Randomly.getBoolean()) { nrTablesToCreate++; } while (Randomly.getBooleanWithSmallProbability()) { nrTablesToCreate++; } int i = 0; newSchema = SQLite3Schema.fromConnection(con); do { globalState.setSchema(newSchema); Query tableQuery = getTableQuery(state, r, newSchema, i); manager.execute(tableQuery); i++; newSchema = SQLite3Schema.fromConnection(con); } while (newSchema.getDatabaseTables().size() != nrTablesToCreate); assert newSchema.getTables().getTables().size() == nrTablesToCreate; for (Table table : newSchema.getDatabaseTables()) { Query q = new QueryAdapter("SELECT * FROM " + table.getName(), Arrays.asList("generated column loop", "integer overflow")); if (!q.execute(con)) { throw new IgnoreMeException(); } } int[] nrRemaining = new int[Action.values().length]; List<Action> actions = new ArrayList<>(); int total = 0; for (i = 0; i < Action.values().length; i++) { Action action = Action.values()[i]; int nrPerformed = 0; switch (action) { case DROP_VIEW: case CREATE_VIEW: nrPerformed = r.getInteger(0, 0); break; case CREATE_TRIGGER: case DELETE: case DROP_INDEX: nrPerformed = r.getInteger(0, 2); break; case ALTER: case EXPLAIN: case DROP_TABLE: nrPerformed = r.getInteger(0, 0); break; case VACUUM: case CHECK_RTREE_TABLE: nrPerformed = r.getInteger(0, 3); break; case INSERT: nrPerformed = r.getInteger(0, NR_INSERT_ROW_TRIES); break; case MANIPULATE_STAT_TABLE: nrPerformed = r.getInteger(0, 5); break; case INDEX: nrPerformed = r.getInteger(0, 20); break; case VIRTUAL_TABLE_ACTION: case UPDATE: nrPerformed = r.getInteger(0, 30); break; case PRAGMA: nrPerformed = r.getInteger(0, 100); break; case COMMIT: case TRANSACTION_START: case REINDEX: case ANALYZE: case ROLLBACK_TRANSACTION: default: nrPerformed = r.getInteger(1, 10); break; } if (nrPerformed != 0) { actions.add(action); } nrRemaining[action.ordinal()] = nrPerformed; total += nrPerformed; } if (options.logEachSelect()) { logger.writeCurrent(state); } while (total != 0) { Action nextAction = null; int selection = r.getInteger(0, total); int previousRange = 0; for (i = 0; i < nrRemaining.length; i++) { if (previousRange <= selection && selection < previousRange + nrRemaining[i]) { nextAction = Action.values()[i]; break; } else { previousRange += nrRemaining[i]; } } assert nextAction != null; assert nrRemaining[nextAction.ordinal()] > 0; nrRemaining[nextAction.ordinal()]--; globalState.setSchema(newSchema); Query query = nextAction.getQuery(globalState); try { if (options.logEachSelect()) { logger.writeCurrent(query.getQueryString()); } manager.execute(query); if (query.couldAffectSchema()) { newSchema = SQLite3Schema.fromConnection(con); globalState.setSchema(newSchema); } } catch (IgnoreMeException e) { } total--; } Query query = SQLite3TransactionGenerator.generateCommit(con, state); manager.execute(query); // also do an abort for DEFERRABLE INITIALLY DEFERRED query = SQLite3TransactionGenerator.generateRollbackTransaction(con, state); manager.execute(query); newSchema = SQLite3Schema.fromConnection(con); if (Randomly.getBoolean()) { query = new QueryAdapter("PRAGMA integrity_check;"); manager.execute(query); } newSchema = SQLite3Schema.fromConnection(con); // SQLite3PivotedQuerySynthesizer queryGenerator = new SQLite3PivotedQuerySynthesizer(con, r); SQLite3MetamorphicQuerySynthesizer or = new SQLite3MetamorphicQuerySynthesizer(newSchema, r, con, (SQLite3StateToReproduce) state, logger, options, globalState); for (i = 0; i < NR_QUERIES_PER_TABLE; i++) { try { // if (Randomly.getBoolean()) { or.generateAndCheck(); manager.incrementSelectQueryCount(); // } else { // queryGenerator.generateAndCheckQuery(this.state, logger, options); // } } catch (IgnoreMeException e) { } } try { if (options.logEachSelect()) { logger.getCurrentFileWriter().close(); logger.currentFileWriter = null; } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.gc(); } private Query getTableQuery(StateToReproduce state, Randomly r, SQLite3Schema newSchema, int i) throws AssertionError { Query tableQuery; switch (Randomly.fromOptions(TableType.values())) { case NORMAL: String tableName = SQLite3Common.createTableName(i); tableQuery = SQLite3TableGenerator.createTableStatement(tableName, state, newSchema, r); break; case FTS: String ftsTableName = "v" + SQLite3Common.createTableName(i); tableQuery = SQLite3CreateVirtualFTSTableGenerator.createTableStatement(ftsTableName, r); break; case RTREE: String rTreeTableName = "rt" + i; tableQuery = SQLite3CreateVirtualRtreeTabelGenerator.createTableStatement(rTreeTableName, r); break; default: throw new AssertionError(); } return tableQuery; } // PRAGMAS to achieve good performance private final static List<String> DEFAULT_PRAGMAS = Arrays.asList("PRAGMA cache_size = 50000;", "PRAGMA temp_store=MEMORY;", "PRAGMA synchronous=off;"); private void addSensiblePragmaDefaults(Connection con) throws SQLException { List<String> pragmasToExecute = new ArrayList<>(); if (!Randomly.getBooleanWithSmallProbability()) { pragmasToExecute.addAll(DEFAULT_PRAGMAS); } if (Randomly.getBoolean() && !MUST_KNOW_RESULT) { pragmasToExecute.add("PRAGMA case_sensitive_like=ON;"); } if (Randomly.getBoolean()) { pragmasToExecute.add(String.format("PRAGMA encoding = '%s';", Randomly.fromOptions("UTF-8", "UTF-16", "UTF-16le", "UTF-16be"))); } for (String s : pragmasToExecute) { Query q = new QueryAdapter(s); state.statements.add(q); q.execute(con); } } @Override public Connection createDatabase(String databaseName, StateToReproduce state) throws SQLException { return DatabaseFacade.createDatabase(databaseName); } @Override public String getLogFileSubdirectoryName() { return "sqlite3"; } @Override public String toString() { return String.format("SQLite3Provider [database: %s]", databaseName); } @Override public void printDatabaseSpecificState(FileWriter writer, StateToReproduce state) { StringBuilder sb = new StringBuilder(); SQLite3StateToReproduce specificState = (SQLite3StateToReproduce) state; if (specificState.getRandomRowValues() != null) { List<Column> columnList = specificState.getRandomRowValues().keySet().stream().collect(Collectors.toList()); List<Table> tableList = columnList.stream().map(c -> c.getTable()).distinct().sorted() .collect(Collectors.toList()); for (Table t : tableList) { sb.append("-- " + t.getName() + "\n"); List<Column> columnsForTable = columnList.stream().filter(c -> c.getTable().equals(t)) .collect(Collectors.toList()); for (Column c : columnsForTable) { sb.append("--\t"); sb.append(c); sb.append("="); sb.append(specificState.getRandomRowValues().get(c)); sb.append("\n"); } } sb.append("-- expected values: \n"); String asExpectedValues = "-- " + SQLite3Visitor.asExpectedValues(specificState.getWhereClause()).replace("\n", "\n-- "); sb.append(asExpectedValues); } try { writer.write(sb.toString()); writer.flush(); } catch (IOException e) { throw new AssertionError(); } } @Override public StateToReproduce getStateToReproduce(String databaseName) { return new SQLite3StateToReproduce(databaseName); } @Override public Query checkIfRowIsStillContained(StateToReproduce state) { String checkRowIsInside = "SELECT " + state.queryTargetedColumnsString + " FROM " + state.queryTargetedTablesString + " INTERSECT SELECT " + state.values; return new QueryAdapter(checkRowIsInside); } }
src/lama/sqlite3/SQLite3Provider.java
package lama.sqlite3; import java.io.FileWriter; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import lama.DatabaseFacade; import lama.DatabaseProvider; import lama.IgnoreMeException; import lama.Main.QueryManager; import lama.Main.StateLogger; import lama.MainOptions; import lama.Query; import lama.QueryAdapter; import lama.Randomly; import lama.StateToReproduce; import lama.StateToReproduce.SQLite3StateToReproduce; import lama.sqlite3.dml.SQLite3DeleteGenerator; import lama.sqlite3.dml.SQLite3InsertGenerator; import lama.sqlite3.dml.SQLite3UpdateGenerator; import lama.sqlite3.gen.SQLite3AnalyzeGenerator; import lama.sqlite3.gen.SQLite3Common; import lama.sqlite3.gen.SQLite3CreateVirtualRtreeTabelGenerator; import lama.sqlite3.gen.SQLite3ExplainGenerator; import lama.sqlite3.gen.SQLite3PragmaGenerator; import lama.sqlite3.gen.SQLite3ReindexGenerator; import lama.sqlite3.gen.SQLite3TransactionGenerator; import lama.sqlite3.gen.SQLite3VacuumGenerator; import lama.sqlite3.gen.SQLite3VirtualFTSTableCommandGenerator; import lama.sqlite3.gen.ddl.SQLite3AlterTable; import lama.sqlite3.gen.ddl.SQLite3CreateTriggerGenerator; import lama.sqlite3.gen.ddl.SQLite3CreateVirtualFTSTableGenerator; import lama.sqlite3.gen.ddl.SQLite3DropIndexGenerator; import lama.sqlite3.gen.ddl.SQLite3DropTableGenerator; import lama.sqlite3.gen.ddl.SQLite3IndexGenerator; import lama.sqlite3.gen.ddl.SQLite3TableGenerator; import lama.sqlite3.gen.ddl.SQLite3ViewGenerator; import lama.sqlite3.queries.SQLite3MetamorphicQuerySynthesizer; import lama.sqlite3.queries.SQLite3PivotedQuerySynthesizer; import lama.sqlite3.schema.SQLite3Schema; import lama.sqlite3.schema.SQLite3Schema.Column; import lama.sqlite3.schema.SQLite3Schema.Table; import lama.sqlite3.schema.SQLite3Schema.Table.TableKind; public class SQLite3Provider implements DatabaseProvider { public static enum Action { PRAGMA { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3PragmaGenerator.insertPragma(g.getConnection(), g.getState(), g.getRandomly()); } }, INDEX { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3IndexGenerator.insertIndex(g.getSchema(), g.getState(), g.getRandomly()); } }, INSERT { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { Table randomTable = g.getSchema().getRandomTableOrBailout(t -> !t.isView()); return SQLite3InsertGenerator.insertRow(randomTable, g.getConnection(), g.getRandomly()); } }, VACUUM { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3VacuumGenerator.executeVacuum(); } }, REINDEX { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3ReindexGenerator.executeReindex(g.getConnection(), g.getState(), g.getSchema()); } }, ANALYZE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3AnalyzeGenerator.generateAnalyze(g.getSchema()); } }, DELETE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3DeleteGenerator.deleteContent(g.getSchema().getRandomTableNoViewOrBailout(), g.getConnection(), g.getRandomly()); } }, TRANSACTION_START { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateBeginTransaction(g.getConnection(), g.getState()); } }, ALTER { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3AlterTable.alterTable(g.getSchema(), g.getConnection(), g.getState(), g.getRandomly()); } }, DROP_INDEX { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3DropIndexGenerator.dropIndex(g.getConnection(), g.getState(), g.getSchema(), g.getRandomly()); } }, UPDATE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3UpdateGenerator.updateRow(g.getSchema().getRandomTableNoViewOrBailout(), g.getRandomly()); } }, ROLLBACK_TRANSACTION() { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateRollbackTransaction(g.getConnection(), g.getState()); } }, COMMIT { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3TransactionGenerator.generateCommit(g.getConnection(), g.getState()); } }, DROP_TABLE { @Override public Query getQuery(SQLite3GlobalState g) { return SQLite3DropTableGenerator.dropTable(g.getSchema()); } }, DROP_VIEW { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ViewGenerator.dropView(SQLite3Schema.fromConnection(g.getConnection())); } }, EXPLAIN { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ExplainGenerator.explain(g); } }, CHECK_RTREE_TABLE { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { Table table = g.getSchema().getRandomTableOrBailout(t -> t.getName().startsWith("r")); return new QueryAdapter(String.format("SELECT rtreecheck('%s');", table.getName())); } }, // TARGETED_SELECT { // // @Override // public Query getQuery(SQLite3Schema newSchema, Connection con, StateToReproduce state, Randomly r) // throws SQLException { // return new SQLite3PivotedQuerySynthesizer(con, r).getQueryThatContainsAtLeastOneRow((SQLite3StateToReproduce) state); // } // // }, VIRTUAL_TABLE_ACTION { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return new SQLite3VirtualFTSTableCommandGenerator(g.getSchema(), g.getRandomly()).generate(); } }, CREATE_VIEW { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3ViewGenerator.generate(g.getSchema(), g.getConnection(), g.getRandomly(), g.getState(), g); } }, CREATE_TRIGGER { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { return SQLite3CreateTriggerGenerator.create(g.getSchema(), g.getRandomly(), g.getConnection()); } }, MANIPULATE_STAT_TABLE { @Override public Query getQuery(SQLite3GlobalState g) throws SQLException { List<Column> columns = new ArrayList<>(); Table t = new Table("sqlite_stat1", columns, TableKind.MAIN, false, 1, false, false); if (Randomly.getBoolean()) { return SQLite3DeleteGenerator.deleteContent(t, g.getConnection(), g.getRandomly()); } else { StringBuilder sb = new StringBuilder(); sb.append("INSERT OR IGNORE INTO sqlite_stat1"); String indexName; try (Statement stat = g.getConnection().createStatement()) { try (ResultSet rs = stat.executeQuery( "SELECT name FROM sqlite_master WHERE type='index' ORDER BY RANDOM() LIMIT 1;")) { if (rs.isClosed()) { throw new IgnoreMeException(); } indexName = rs.getString("name"); } ; } sb.append(" VALUES"); sb.append("('"); sb.append(g.getSchema().getRandomTable().getName()); sb.append("', "); sb.append("'"); if (Randomly.getBoolean()) { sb.append(indexName); } else { sb.append(g.getSchema().getRandomTable().getName()); } sb.append("'"); sb.append(", '"); for (int i = 0; i < Randomly.smallNumber(); i++) { if (i != 0) { sb.append(" "); } if (Randomly.getBoolean()) { sb.append(g.getRandomly().getInteger()); } else { sb.append(Randomly.smallNumber()); } } if (Randomly.getBoolean()) { sb.append(" sz="); sb.append(g.getRandomly().getInteger()); } if (Randomly.getBoolean()) { sb.append(" unordered"); } if (Randomly.getBoolean()) { sb.append(" noskipscan"); } sb.append("')"); return new QueryAdapter(sb.toString(), Arrays.asList("no such table")); } } }; public abstract Query getQuery(SQLite3GlobalState state) throws SQLException; } public static final int NR_INSERT_ROW_TRIES = 30; private static final int NR_QUERIES_PER_TABLE = 100000; private static final int MAX_INSERT_ROW_TRIES = 0; public static final int EXPRESSION_MAX_DEPTH = 3; public static final boolean ALLOW_FLOATING_POINT_FP = true; public static final boolean MUST_KNOW_RESULT = false; private SQLite3StateToReproduce state; private String databaseName; public static class SQLite3GlobalState { private Connection con; private SQLite3Schema schema; private SQLite3StateToReproduce state; private Randomly r; public Connection getConnection() { return con; } public SQLite3Schema getSchema() { return schema; } public void setConnection(Connection con) { this.con = con; } public void setSchema(SQLite3Schema schema) { this.schema = schema; } public void setState(SQLite3StateToReproduce state) { this.state = state; } public SQLite3StateToReproduce getState() { return state; } public Randomly getRandomly() { return r; } public void setRandomly(Randomly r) { this.r = r; } } public static class SQLite3SpecialStringGenerator { private enum Options { TIME_DATE_REGEX, NOW, DATE_TIME, TIME_MODIFIER } public static String generate() { StringBuilder sb = new StringBuilder(); switch (Randomly.fromOptions(Options.values())) { case TIME_DATE_REGEX: // https://www.sqlite.org/lang_datefunc.html return Randomly.fromOptions("%d", "%f", "%H", "%j", "%J", "%m", "%M", "%s", "%S", "%w", "%W", "%Y", "%%"); case NOW: return "now"; case DATE_TIME: long notCachedInteger = Randomly.getNotCachedInteger(1, 10); for (int i = 0; i < notCachedInteger; i++) { if (Randomly.getBoolean()) { sb.append(Randomly.getNonCachedInteger()); } else { sb.append(Randomly.getNotCachedInteger(0, 2000)); } sb.append(Randomly.fromOptions(":", "-", " ", "T")); } return sb.toString(); case TIME_MODIFIER: sb.append(Randomly.fromOptions("days", "hours", "minutes", "seconds", "months", "years", "start of month", "start of year", "start of day", "weekday", "unixepoch", "utc")); return sb.toString(); default: throw new AssertionError(); } } } private final SQLite3GlobalState globalState = new SQLite3GlobalState(); private enum TableType { NORMAL, FTS, RTREE } @Override public void generateAndTestDatabase(String databaseName, Connection con, StateLogger logger, StateToReproduce state, QueryManager manager, MainOptions options) throws SQLException { this.databaseName = databaseName; Randomly r = new Randomly(SQLite3SpecialStringGenerator::generate); globalState.setRandomly(r); SQLite3Schema newSchema = null; this.state = (SQLite3StateToReproduce) state; globalState.setConnection(con); globalState.setState((SQLite3StateToReproduce) state); addSensiblePragmaDefaults(con); int nrTablesToCreate = 1; if (Randomly.getBoolean()) { nrTablesToCreate++; } while (Randomly.getBooleanWithSmallProbability()) { nrTablesToCreate++; } int i = 0; newSchema = SQLite3Schema.fromConnection(con); do { globalState.setSchema(newSchema); Query tableQuery = getTableQuery(state, r, newSchema, i); manager.execute(tableQuery); i++; newSchema = SQLite3Schema.fromConnection(con); } while (newSchema.getDatabaseTables().size() != nrTablesToCreate); assert newSchema.getTables().getTables().size() == nrTablesToCreate; for (Table table : newSchema.getDatabaseTables()) { Query q = new QueryAdapter("SELECT * FROM " + table.getName(), Arrays.asList("generated column loop", "integer overflow")); if (!q.execute(con)) { throw new IgnoreMeException(); } } int[] nrRemaining = new int[Action.values().length]; List<Action> actions = new ArrayList<>(); int total = 0; for (i = 0; i < Action.values().length; i++) { Action action = Action.values()[i]; int nrPerformed = 0; switch (action) { case DROP_VIEW: case CREATE_VIEW: nrPerformed = r.getInteger(0, 0); break; case CREATE_TRIGGER: case DELETE: case DROP_INDEX: nrPerformed = r.getInteger(0, 2); break; case ALTER: case EXPLAIN: case DROP_TABLE: nrPerformed = r.getInteger(0, 0); break; case VACUUM: case CHECK_RTREE_TABLE: nrPerformed = r.getInteger(0, 3); break; case INSERT: nrPerformed = r.getInteger(0, NR_INSERT_ROW_TRIES); break; case MANIPULATE_STAT_TABLE: nrPerformed = r.getInteger(0, 5); break; // case TARGETED_SELECT: // nrPerformed = 0; // r.getInteger(0, 100); // break; case INDEX: nrPerformed = r.getInteger(0, 20); break; case VIRTUAL_TABLE_ACTION: case UPDATE: nrPerformed = r.getInteger(0, 30); break; case PRAGMA: nrPerformed = r.getInteger(0, 100); break; case COMMIT: case TRANSACTION_START: case REINDEX: case ANALYZE: case ROLLBACK_TRANSACTION: default: nrPerformed = r.getInteger(1, 10); break; } if (nrPerformed != 0) { actions.add(action); } nrRemaining[action.ordinal()] = nrPerformed; total += nrPerformed; } if (options.logEachSelect()) { logger.writeCurrent(state); } while (total != 0) { Action nextAction = null; int selection = r.getInteger(0, total); int previousRange = 0; for (i = 0; i < nrRemaining.length; i++) { if (previousRange <= selection && selection < previousRange + nrRemaining[i]) { nextAction = Action.values()[i]; break; } else { previousRange += nrRemaining[i]; } } assert nextAction != null; assert nrRemaining[nextAction.ordinal()] > 0; nrRemaining[nextAction.ordinal()]--; globalState.setSchema(newSchema); Query query = nextAction.getQuery(globalState); try { if (options.logEachSelect()) { logger.writeCurrent(query.getQueryString()); } manager.execute(query); if (query.couldAffectSchema()) { newSchema = SQLite3Schema.fromConnection(con); globalState.setSchema(newSchema); } } catch (IgnoreMeException e) { } total--; } Query query = SQLite3TransactionGenerator.generateCommit(con, state); manager.execute(query); // also do an abort for DEFERRABLE INITIALLY DEFERRED query = SQLite3TransactionGenerator.generateRollbackTransaction(con, state); manager.execute(query); newSchema = SQLite3Schema.fromConnection(con); if (Randomly.getBoolean()) { query = new QueryAdapter("PRAGMA integrity_check;"); manager.execute(query); } // for (Table t : newSchema.getDatabaseTables()) { // if (t.getNrRows() == 0) { // throw new IgnoreMeException(); // } // } newSchema = SQLite3Schema.fromConnection(con); // SQLite3PivotedQuerySynthesizer queryGenerator = new SQLite3PivotedQuerySynthesizer(con, r); SQLite3MetamorphicQuerySynthesizer or = new SQLite3MetamorphicQuerySynthesizer(newSchema, r, con, (SQLite3StateToReproduce) state, logger, options, globalState); for (i = 0; i < NR_QUERIES_PER_TABLE; i++) { try { // if (Randomly.getBoolean()) { or.generateAndCheck(); manager.incrementSelectQueryCount(); // } else { // queryGenerator.generateAndCheckQuery(this.state, logger, options); // } } catch (IgnoreMeException e) { } } try { if (options.logEachSelect()) { logger.getCurrentFileWriter().close(); logger.currentFileWriter = null; } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.gc(); } private Query getTableQuery(StateToReproduce state, Randomly r, SQLite3Schema newSchema, int i) throws AssertionError { Query tableQuery; switch (Randomly.fromOptions(TableType.values())) { case NORMAL: String tableName = SQLite3Common.createTableName(i); tableQuery = SQLite3TableGenerator.createTableStatement(tableName, state, newSchema, r); break; case FTS: String ftsTableName = "v" + SQLite3Common.createTableName(i); tableQuery = SQLite3CreateVirtualFTSTableGenerator.createTableStatement(ftsTableName, r); break; case RTREE: String rTreeTableName = "rt" + i; tableQuery = SQLite3CreateVirtualRtreeTabelGenerator.createTableStatement(rTreeTableName, r); break; default: throw new AssertionError(); } return tableQuery; } // PRAGMAS to achieve good performance private final static List<String> DEFAULT_PRAGMAS = Arrays.asList("PRAGMA cache_size = 50000;", "PRAGMA temp_store=MEMORY;", "PRAGMA synchronous=off;"); private void addSensiblePragmaDefaults(Connection con) throws SQLException { List<String> pragmasToExecute = new ArrayList<>(); if (!Randomly.getBooleanWithSmallProbability()) { pragmasToExecute.addAll(DEFAULT_PRAGMAS); } if (Randomly.getBoolean() && !MUST_KNOW_RESULT) { pragmasToExecute.add("PRAGMA case_sensitive_like=ON;"); } if (Randomly.getBoolean()) { pragmasToExecute.add(String.format("PRAGMA encoding = '%s';", Randomly.fromOptions("UTF-8", "UTF-16", "UTF-16le", "UTF-16be"))); } for (String s : pragmasToExecute) { Query q = new QueryAdapter(s); state.statements.add(q); q.execute(con); } } private boolean ensureTableHasRows(Connection con, Table randomTable, Randomly r) throws AssertionError, SQLException { int nrRows; int counter = MAX_INSERT_ROW_TRIES; do { try { Query q = SQLite3InsertGenerator.insertRow(randomTable, con, r); state.statements.add(q); q.execute(con); } catch (SQLException e) { if (!SQLite3PivotedQuerySynthesizer.shouldIgnoreException(e)) { throw new AssertionError(e); } } nrRows = randomTable.getNrRows(); } while (nrRows == 0 && counter-- != 0); return nrRows != 0; } @Override public Connection createDatabase(String databaseName, StateToReproduce state) throws SQLException { return DatabaseFacade.createDatabase(databaseName); } @Override public String getLogFileSubdirectoryName() { return "sqlite3"; } @Override public String toString() { return String.format("SQLite3Provider [database: %s]", databaseName); } @Override public void printDatabaseSpecificState(FileWriter writer, StateToReproduce state) { StringBuilder sb = new StringBuilder(); SQLite3StateToReproduce specificState = (SQLite3StateToReproduce) state; if (specificState.getRandomRowValues() != null) { List<Column> columnList = specificState.getRandomRowValues().keySet().stream().collect(Collectors.toList()); List<Table> tableList = columnList.stream().map(c -> c.getTable()).distinct().sorted() .collect(Collectors.toList()); for (Table t : tableList) { sb.append("-- " + t.getName() + "\n"); List<Column> columnsForTable = columnList.stream().filter(c -> c.getTable().equals(t)) .collect(Collectors.toList()); for (Column c : columnsForTable) { sb.append("--\t"); sb.append(c); sb.append("="); sb.append(specificState.getRandomRowValues().get(c)); sb.append("\n"); } } sb.append("-- expected values: \n"); String asExpectedValues = "-- " + SQLite3Visitor.asExpectedValues(specificState.getWhereClause()).replace("\n", "\n-- "); sb.append(asExpectedValues); } try { writer.write(sb.toString()); writer.flush(); } catch (IOException e) { throw new AssertionError(); } } @Override public StateToReproduce getStateToReproduce(String databaseName) { return new SQLite3StateToReproduce(databaseName); } @Override public Query checkIfRowIsStillContained(StateToReproduce state) { String checkRowIsInside = "SELECT " + state.queryTargetedColumnsString + " FROM " + state.queryTargetedTablesString + " INTERSECT SELECT " + state.values; return new QueryAdapter(checkRowIsInside); } }
Remove obsolete and commented-out code
src/lama/sqlite3/SQLite3Provider.java
Remove obsolete and commented-out code
<ide><path>rc/lama/sqlite3/SQLite3Provider.java <ide> import lama.sqlite3.gen.ddl.SQLite3TableGenerator; <ide> import lama.sqlite3.gen.ddl.SQLite3ViewGenerator; <ide> import lama.sqlite3.queries.SQLite3MetamorphicQuerySynthesizer; <del>import lama.sqlite3.queries.SQLite3PivotedQuerySynthesizer; <ide> import lama.sqlite3.schema.SQLite3Schema; <ide> import lama.sqlite3.schema.SQLite3Schema.Column; <ide> import lama.sqlite3.schema.SQLite3Schema.Table; <ide> return new QueryAdapter(String.format("SELECT rtreecheck('%s');", table.getName())); <ide> } <ide> }, <del>// TARGETED_SELECT { <del>// <del>// @Override <del>// public Query getQuery(SQLite3Schema newSchema, Connection con, StateToReproduce state, Randomly r) <del>// throws SQLException { <del>// return new SQLite3PivotedQuerySynthesizer(con, r).getQueryThatContainsAtLeastOneRow((SQLite3StateToReproduce) state); <del>// } <del>// <del>// }, <ide> VIRTUAL_TABLE_ACTION { <ide> @Override <ide> public Query getQuery(SQLite3GlobalState g) throws SQLException { <ide> <ide> public static final int NR_INSERT_ROW_TRIES = 30; <ide> private static final int NR_QUERIES_PER_TABLE = 100000; <del> private static final int MAX_INSERT_ROW_TRIES = 0; <ide> public static final int EXPRESSION_MAX_DEPTH = 3; <ide> public static final boolean ALLOW_FLOATING_POINT_FP = true; <ide> public static final boolean MUST_KNOW_RESULT = false; <ide> case MANIPULATE_STAT_TABLE: <ide> nrPerformed = r.getInteger(0, 5); <ide> break; <del>// case TARGETED_SELECT: <del>// nrPerformed = 0; // r.getInteger(0, 100); <del>// break; <ide> case INDEX: <ide> nrPerformed = r.getInteger(0, 20); <ide> break; <ide> query = new QueryAdapter("PRAGMA integrity_check;"); <ide> manager.execute(query); <ide> } <del>// for (Table t : newSchema.getDatabaseTables()) { <del>// if (t.getNrRows() == 0) { <del>// throw new IgnoreMeException(); <del>// } <del>// } <ide> <ide> newSchema = SQLite3Schema.fromConnection(con); <ide> <ide> } <ide> } <ide> <del> private boolean ensureTableHasRows(Connection con, Table randomTable, Randomly r) <del> throws AssertionError, SQLException { <del> int nrRows; <del> int counter = MAX_INSERT_ROW_TRIES; <del> do { <del> try { <del> Query q = SQLite3InsertGenerator.insertRow(randomTable, con, r); <del> state.statements.add(q); <del> q.execute(con); <del> <del> } catch (SQLException e) { <del> if (!SQLite3PivotedQuerySynthesizer.shouldIgnoreException(e)) { <del> throw new AssertionError(e); <del> } <del> } <del> nrRows = randomTable.getNrRows(); <del> } while (nrRows == 0 && counter-- != 0); <del> return nrRows != 0; <del> } <ide> <ide> @Override <ide> public Connection createDatabase(String databaseName, StateToReproduce state) throws SQLException {
JavaScript
mit
1b412e544d944aec573b5f5f7242ec4f29211552
0
Scya597/Caltcha,Scya597/Caltcha
/* class Project { constructor(Object project) {}; newOne(); vote(String userId, Array dates); update(String userId, Number deadline); remove(String userId); getStats(String userId); } */ import axios from 'axios'; class Project { constructor(project) { this.id = project.id; this.team = project.team; this.title = project.title; this.minDuration = project.minDuration; this.description = project.description; this.location = project.location; this.finaldate = project.finaldate; this.deadline = project.deadline; this.ended = project.ended; this.superuser = project.superuser; this.normaluser = project.normaluser; this.optionaluser = project.optionaluser; this.closedUser = project.closedUser; this.votes = project.votes; } newOne() { axios.post('/api/project/new', { id: this.id, team: this.team, title: this.title, minDuration: this.minDuration, description: this.description, location: this.location, finaldate: this.finaldate, deadline: this.deadline, ended: this.ended, superuser: this.superuser, normaluser: this.normaluser, optionaluser: this.optionaluser, closedUser: this.closedUser, votes: this.votes, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } vote(userId, dates) { axios.post(`/api/project/vote/${userId}`, { projectId: this.id, dates, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } update(userId, deadline) { axios.post(`/api/project/update/${userId}`, { projectId: this.id, ended: false, deadline, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } remove(userId) { axios.delete(`/api/project/remove/${userId}`, { projectId: this.id, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } getStats(userId) { axios.get(`/api/project/stats/${this.id}/${userId}`) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } } export default Project;
src/utils/Project.js
/* class Project { constructor(Object project) {}; newOne(); vote(String userId, Array dates); update(String userId, Number deadline); remove(String userId); getStats(String userId); } */ import axios from 'axios'; class Project { constructor(project) { this.id = project.id; this.team = project.team; this.title = project.title; this.minDuration = project.minDuration; this.description = project.description; this.location = project.location; this.finaldate = project.finaldate; this.deadline = project.deadline; this.ended = project.ended; this.superuser = project.superuser; this.normaluser = project.normaluser; this.optionaluser = project.optionaluser; this.closedUser = project.closedUser; this.votes = project.votes; } newOne() { axios.post('/api/project/new', { id: this.id, team: this.team, title: this.title, minDuration: this.minDuration, description: this.description, location: this.location, finaldate: this.finaldate, deadline: this.deadline, ended: this.ended, superuser: this.superuser, normaluser: this.normaluser, optionaluser: this.optionaluser, closedUser: this.closedUser, votes: this.votes, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } vote(userId, dates) { axios.post(`/api/project/vote/${userId}`, { projectId: this.id, dates, }) .then((response) => { console.log(response); }) .catch((error) => { console.log(error); }); } } export default Project;
:rocket: Add update, remove, getStats to Project Class
src/utils/Project.js
:rocket: Add update, remove, getStats to Project Class
<ide><path>rc/utils/Project.js <ide> console.log(error); <ide> }); <ide> } <add> update(userId, deadline) { <add> axios.post(`/api/project/update/${userId}`, { <add> projectId: this.id, <add> ended: false, <add> deadline, <add> }) <add> .then((response) => { <add> console.log(response); <add> }) <add> .catch((error) => { <add> console.log(error); <add> }); <add> } <add> remove(userId) { <add> axios.delete(`/api/project/remove/${userId}`, { <add> projectId: this.id, <add> }) <add> .then((response) => { <add> console.log(response); <add> }) <add> .catch((error) => { <add> console.log(error); <add> }); <add> } <add> getStats(userId) { <add> axios.get(`/api/project/stats/${this.id}/${userId}`) <add> .then((response) => { <add> console.log(response); <add> }) <add> .catch((error) => { <add> console.log(error); <add> }); <add> } <ide> } <ide> <ide> export default Project;
Java
apache-2.0
02cb4c34a04bf2d8520cb8cdb2676dde8756e45b
0
lefou/AsciidocFX,LightGuard/AsciidocFX,gastaldi/AsciidocFX,asciidocfx/AsciidocFX,lefou/AsciidocFX,jaredmorgs/AsciidocFX,gastaldi/AsciidocFX,jaredmorgs/AsciidocFX,asciidocfx/AsciidocFX,jaredmorgs/AsciidocFX,asciidocfx/AsciidocFX,asciidocfx/AsciidocFX,gastaldi/AsciidocFX,LightGuard/AsciidocFX,LightGuard/AsciidocFX
package com.kodcu.controller; import com.kodcu.other.Current; import com.kodcu.other.IOHelper; import com.kodcu.service.DirectoryService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.nio.file.Path; import java.time.Duration; /** * Created by usta on 25.12.2014. */ @Controller public class ImageController { private final Current current; private final DirectoryService directoryService; @Autowired public ImageController(final Current current, final DirectoryService directoryService) { this.current = current; this.directoryService = directoryService; } @RequestMapping(value = {"/**/{extension:(?:\\w|\\W)+\\.(?:jpg|bmp|gif|jpeg|png|webp|svg)$}"}, method = RequestMethod.GET) @ResponseBody public ResponseEntity<byte[]> images(HttpServletRequest request, HttpServletResponse response, @PathVariable("extension") String extension) { response.setDateHeader("Expires", System.currentTimeMillis() + Duration.ofSeconds(10).toMillis()); Path imageFile; String uri = request.getRequestURI(); if (uri.startsWith("/")) uri = uri.substring(1); if (current.currentPath().isPresent()) { imageFile = current.currentPath().map(Path::getParent).get().resolve(uri); } else { imageFile = directoryService.getWorkingDirectory().get().resolve(uri); } byte[] temp = IOHelper.readAllBytes(imageFile); // if not found, try again in working dir if (temp.length == 0) { imageFile = directoryService.getWorkingDirectory().get().resolve(uri); temp = IOHelper.readAllBytes(imageFile); } return new ResponseEntity<>(temp, HttpStatus.OK); } }
src/main/java/com/kodcu/controller/ImageController.java
package com.kodcu.controller; import com.kodcu.other.Current; import com.kodcu.other.IOHelper; import com.kodcu.service.DirectoryService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.nio.file.Path; import java.time.Duration; /** * Created by usta on 25.12.2014. */ @Controller public class ImageController { private final Current current; private final DirectoryService directoryService; @Autowired public ImageController(final Current current, final DirectoryService directoryService) { this.current = current; this.directoryService = directoryService; } @RequestMapping(value = {"/**/{extension:(?:\\w|\\W)+\\.(?:jpg|bmp|gif|jpeg|png|webp|svg)$}"}, method = RequestMethod.GET) @ResponseBody public ResponseEntity<byte[]> images(HttpServletRequest request, HttpServletResponse response, @PathVariable("extension") String extension) { response.setDateHeader("Expires", System.currentTimeMillis() + Duration.ofSeconds(10).toMillis()); Path imageFile; String uri = request.getRequestURI(); if (uri.startsWith("/")) uri = uri.substring(1); if (current.currentPath().isPresent()) { imageFile = current.currentPath().map(Path::getParent).get().resolve(uri); } else { imageFile = directoryService.getWorkingDirectory().get().resolve(uri); } byte[] temp = IOHelper.readAllBytes(imageFile); return new ResponseEntity<>(temp, HttpStatus.OK); } }
try working dir for images
src/main/java/com/kodcu/controller/ImageController.java
try working dir for images
<ide><path>rc/main/java/com/kodcu/controller/ImageController.java <ide> <ide> byte[] temp = IOHelper.readAllBytes(imageFile); <ide> <add> // if not found, try again in working dir <add> if (temp.length == 0) { <add> imageFile = directoryService.getWorkingDirectory().get().resolve(uri); <add> temp = IOHelper.readAllBytes(imageFile); <add> } <add> <ide> return new ResponseEntity<>(temp, HttpStatus.OK); <ide> } <ide> }
Java
mit
937e4d9019ed3806f44c32cd5fb4a1f0541bd7d9
0
glass2/EZFTP
/* * Created by JFormDesigner on Fri Jul 21 22:03:03 PDT 2017 */ package edu.pdx.cs510.agile.team3.FTP; import java.awt.*; import javax.swing.*; import javax.swing.GroupLayout; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeWillExpandListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.ExpandVetoException; import javax.swing.tree.TreeNode; /** * @author Susham Yerabolu */ public class FileListViewer extends JFrame implements TreeWillExpandListener { public FileListViewer() { initComponents(); } private TreeNode createNodes() { DefaultMutableTreeNode root; DefaultMutableTreeNode grandparent; DefaultMutableTreeNode parent; root = new DefaultMutableTreeNode("San Francisco"); grandparent = new DefaultMutableTreeNode("Potrero Hill"); root.add(grandparent); parent = new DefaultMutableTreeNode("Restaurants"); grandparent.add(parent); return root; } private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents // Generated using JFormDesigner Evaluation license - Susham Yerabolu panel1 = new JPanel(); scrollPane1 = new JScrollPane(); tree1 = new JTree(); tree1.setDragEnabled(true); panel2 = new JPanel(); scrollPane2 = new JScrollPane(); tree2 = new JTree(); tree2.setDragEnabled(true); button1 = new JButton(); button2 = new JButton(); //======== this ======== Container contentPane = getContentPane(); //======== panel1 ======== { // JFormDesigner evaluation mark panel1.setBorder(new javax.swing.border.CompoundBorder( new javax.swing.border.TitledBorder(new javax.swing.border.EmptyBorder(0, 0, 0, 0), "JFormDesigner Evaluation", javax.swing.border.TitledBorder.CENTER, javax.swing.border.TitledBorder.BOTTOM, new java.awt.Font("Dialog", java.awt.Font.BOLD, 12), java.awt.Color.red), panel1.getBorder())); panel1.addPropertyChangeListener(new java.beans.PropertyChangeListener(){public void propertyChange(java.beans.PropertyChangeEvent e){if("border".equals(e.getPropertyName()))throw new RuntimeException();}}); //======== scrollPane1 ======== { scrollPane1.setViewportView(tree1); } GroupLayout panel1Layout = new GroupLayout(panel1); panel1.setLayout(panel1Layout); panel1Layout.setHorizontalGroup( panel1Layout.createParallelGroup() .addGroup(panel1Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane1, GroupLayout.DEFAULT_SIZE, 324, Short.MAX_VALUE) .addContainerGap()) ); panel1Layout.setVerticalGroup( panel1Layout.createParallelGroup() .addGroup(panel1Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane1, GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) .addContainerGap()) ); } //======== panel2 ======== { //======== scrollPane2 ======== { scrollPane2.setViewportView(tree2); } GroupLayout panel2Layout = new GroupLayout(panel2); panel2.setLayout(panel2Layout); panel2Layout.setHorizontalGroup( panel2Layout.createParallelGroup() .addGroup(GroupLayout.Alignment.TRAILING, panel2Layout.createSequentialGroup() .addComponent(scrollPane2, GroupLayout.DEFAULT_SIZE, 362, Short.MAX_VALUE) .addContainerGap()) ); panel2Layout.setVerticalGroup( panel2Layout.createParallelGroup() .addGroup(panel2Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane2, GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) .addContainerGap()) ); } //---- button1 ---- button1.setText(">>"); //---- button2 ---- button2.setText("<<"); GroupLayout contentPaneLayout = new GroupLayout(contentPane); contentPane.setLayout(contentPaneLayout); contentPaneLayout.setHorizontalGroup( contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addContainerGap() .addComponent(panel1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addGroup(contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addGap(16, 16, 16) .addComponent(button1)) .addGroup(contentPaneLayout.createSequentialGroup() .addGap(18, 18, 18) .addComponent(button2))) .addGap(18, 18, 18) .addComponent(panel2, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); contentPaneLayout.setVerticalGroup( contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addContainerGap() .addGroup(contentPaneLayout.createParallelGroup() .addComponent(panel1, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(panel2, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) .addGroup(contentPaneLayout.createSequentialGroup() .addGap(122, 122, 122) .addComponent(button1) .addGap(42, 42, 42) .addComponent(button2) .addContainerGap(223, Short.MAX_VALUE)) ); pack(); setLocationRelativeTo(getOwner()); // JFormDesigner - End of component initialization //GEN-END:initComponents } // JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables // Generated using JFormDesigner Evaluation license - Susham Yerabolu private JPanel panel1; private JScrollPane scrollPane1; private JTree tree1; private JPanel panel2; private JScrollPane scrollPane2; private JTree tree2; private JButton button1; private JButton button2; @Override public void treeWillExpand(TreeExpansionEvent event) throws ExpandVetoException { } @Override public void treeWillCollapse(TreeExpansionEvent event) throws ExpandVetoException { } // JFormDesigner - End of variables declaration //GEN-END:variables }
src/main/java/edu/pdx/cs510/agile/team3/FTP/FileListViewer.java
/* * Created by JFormDesigner on Fri Jul 21 22:03:03 PDT 2017 */ package edu.pdx.cs510.agile.team3.FTP; import java.awt.*; import javax.swing.*; import javax.swing.GroupLayout; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeWillExpandListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.ExpandVetoException; import javax.swing.tree.TreeNode; /** * @author Susham Yerabolu */ public class FileListViewer extends JFrame implements TreeWillExpandListener { public FileListViewer() { initComponents(); } private TreeNode createNodes() { DefaultMutableTreeNode root; DefaultMutableTreeNode grandparent; DefaultMutableTreeNode parent; root = new DefaultMutableTreeNode("San Francisco"); grandparent = new DefaultMutableTreeNode("Potrero Hill"); root.add(grandparent); parent = new DefaultMutableTreeNode("Restaurants"); grandparent.add(parent); return root; } private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents // Generated using JFormDesigner Evaluation license - Susham Yerabolu panel1 = new JPanel(); scrollPane1 = new JScrollPane(); tree1 = new JTree(); panel2 = new JPanel(); scrollPane2 = new JScrollPane(); tree2 = new JTree(); button1 = new JButton(); button2 = new JButton(); //======== this ======== Container contentPane = getContentPane(); //======== panel1 ======== { // JFormDesigner evaluation mark panel1.setBorder(new javax.swing.border.CompoundBorder( new javax.swing.border.TitledBorder(new javax.swing.border.EmptyBorder(0, 0, 0, 0), "JFormDesigner Evaluation", javax.swing.border.TitledBorder.CENTER, javax.swing.border.TitledBorder.BOTTOM, new java.awt.Font("Dialog", java.awt.Font.BOLD, 12), java.awt.Color.red), panel1.getBorder())); panel1.addPropertyChangeListener(new java.beans.PropertyChangeListener(){public void propertyChange(java.beans.PropertyChangeEvent e){if("border".equals(e.getPropertyName()))throw new RuntimeException();}}); //======== scrollPane1 ======== { scrollPane1.setViewportView(tree1); } GroupLayout panel1Layout = new GroupLayout(panel1); panel1.setLayout(panel1Layout); panel1Layout.setHorizontalGroup( panel1Layout.createParallelGroup() .addGroup(panel1Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane1, GroupLayout.DEFAULT_SIZE, 324, Short.MAX_VALUE) .addContainerGap()) ); panel1Layout.setVerticalGroup( panel1Layout.createParallelGroup() .addGroup(panel1Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane1, GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) .addContainerGap()) ); } //======== panel2 ======== { //======== scrollPane2 ======== { scrollPane2.setViewportView(tree2); } GroupLayout panel2Layout = new GroupLayout(panel2); panel2.setLayout(panel2Layout); panel2Layout.setHorizontalGroup( panel2Layout.createParallelGroup() .addGroup(GroupLayout.Alignment.TRAILING, panel2Layout.createSequentialGroup() .addComponent(scrollPane2, GroupLayout.DEFAULT_SIZE, 362, Short.MAX_VALUE) .addContainerGap()) ); panel2Layout.setVerticalGroup( panel2Layout.createParallelGroup() .addGroup(panel2Layout.createSequentialGroup() .addContainerGap() .addComponent(scrollPane2, GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) .addContainerGap()) ); } //---- button1 ---- button1.setText(">>"); //---- button2 ---- button2.setText("<<"); GroupLayout contentPaneLayout = new GroupLayout(contentPane); contentPane.setLayout(contentPaneLayout); contentPaneLayout.setHorizontalGroup( contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addContainerGap() .addComponent(panel1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE) .addGroup(contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addGap(16, 16, 16) .addComponent(button1)) .addGroup(contentPaneLayout.createSequentialGroup() .addGap(18, 18, 18) .addComponent(button2))) .addGap(18, 18, 18) .addComponent(panel2, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addContainerGap()) ); contentPaneLayout.setVerticalGroup( contentPaneLayout.createParallelGroup() .addGroup(contentPaneLayout.createSequentialGroup() .addContainerGap() .addGroup(contentPaneLayout.createParallelGroup() .addComponent(panel1, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(panel2, GroupLayout.DEFAULT_SIZE, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) .addGroup(contentPaneLayout.createSequentialGroup() .addGap(122, 122, 122) .addComponent(button1) .addGap(42, 42, 42) .addComponent(button2) .addContainerGap(223, Short.MAX_VALUE)) ); pack(); setLocationRelativeTo(getOwner()); // JFormDesigner - End of component initialization //GEN-END:initComponents } // JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables // Generated using JFormDesigner Evaluation license - Susham Yerabolu private JPanel panel1; private JScrollPane scrollPane1; private JTree tree1; private JPanel panel2; private JScrollPane scrollPane2; private JTree tree2; private JButton button1; private JButton button2; @Override public void treeWillExpand(TreeExpansionEvent event) throws ExpandVetoException { } @Override public void treeWillCollapse(TreeExpansionEvent event) throws ExpandVetoException { } // JFormDesigner - End of variables declaration //GEN-END:variables }
Enabling Drag feature from both the Local Machine File List(JTree) and Remote File List(JTree)
src/main/java/edu/pdx/cs510/agile/team3/FTP/FileListViewer.java
Enabling Drag feature from both the Local Machine File List(JTree) and Remote File List(JTree)
<ide><path>rc/main/java/edu/pdx/cs510/agile/team3/FTP/FileListViewer.java <ide> panel1 = new JPanel(); <ide> scrollPane1 = new JScrollPane(); <ide> tree1 = new JTree(); <add> tree1.setDragEnabled(true); <ide> panel2 = new JPanel(); <ide> scrollPane2 = new JScrollPane(); <ide> tree2 = new JTree(); <add> tree2.setDragEnabled(true); <ide> button1 = new JButton(); <ide> button2 = new JButton(); <ide>
Java
mit
7af76994ec0e5e2cbc9712a520b1952acd1ca1b8
0
MineLittlePony/MineLittlePony,MineLittlePony/MineLittlePony
package com.minelittlepony.model.ponies; import net.minecraft.entity.Entity; import net.minecraft.entity.monster.EntityZombieVillager; import net.minecraft.entity.passive.EntityVillager; import net.minecraft.util.math.MathHelper; import static com.minelittlepony.model.PonyModelConstants.*; import com.minelittlepony.model.player.ModelAlicorn; import com.minelittlepony.render.plane.PlaneRenderer; public class ModelVillagerPony extends ModelAlicorn { public PlaneRenderer bag, apron, trinket; public ModelVillagerPony() { super(false); } @Override public void setRotationAngles(float move, float swing, float ticks, float headYaw, float headPitch, float scale, Entity entity) { super.setRotationAngles(move, swing, ticks, headYaw, headPitch, scale, entity); float angleY = 0; if (swingProgress > -9990.0F && !canCast()) { angleY = MathHelper.sin(MathHelper.sqrt(swingProgress) * PI * 2) * 0.04F; } bag.rotateAngleY = angleY; apron.rotateAngleY = angleY; trinket.rotateAngleY = angleY; } @Override protected void renderBody(Entity entity, float move, float swing, float ticks, float headYaw, float headPitch, float scale) { super.renderBody(entity, move, swing, ticks, headYaw, headPitch, scale); int profession = getProfession(entity); if (profession > -1) { bipedBody.postRender(this.scale); if (profession < 2) { bag.render(scale); } else if (profession == 2) { trinket.render(scale); } else if (profession > 2) { apron.render(scale); } } } protected int getProfession(Entity entity) { if (entity instanceof EntityVillager) { return ((EntityVillager) entity).getProfession(); } if (entity instanceof EntityZombieVillager) { return ((EntityZombieVillager) entity).getProfession(); } return -1; } @Override protected void initTextures() { super.initTextures(); bag = new PlaneRenderer(this, 56, 19); apron = new PlaneRenderer(this, 56, 16); trinket = new PlaneRenderer(this, 0, 3); } @Override protected void initPositions(float yOffset, float stretch) { super.initPositions(yOffset, stretch); bag.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .tex(56, 25).addBackPlane(-7, -5, -4, 3, 6, stretch) //right bag front .addBackPlane( 4, -5, -4, 3, 6, stretch) //left bag front .tex(59, 25).addBackPlane(-7, -5, 4, 3, 6, stretch) //right bag back .addBackPlane( 4, -5, 4, 3, 6, stretch) //left bag back .tex(56, 19).addWestPlane(-7, -5, -4, 6, 8, stretch) //right bag outside .addWestPlane( 7, -5, -4, 6, 8, stretch) //left bag outside .addWestPlane(-4.01f, -5, -4, 6, 8, stretch) //right bag inside .addWestPlane( 4.01f, -5, -4, 6, 8, stretch) //left bag inside .tex(56, 31) .addTopPlane(-4, -4.5F, -1, 8, 1, stretch) //strap front .addTopPlane(-4, -4.5F, 0, 8, 1, stretch) //strap back .addBackPlane(-4, -4.5F, 0, 8, 1, stretch) .addFrontPlane(-4, -4.5F, 0, 8, 1, stretch) .child(0).tex(56, 16).flipZ().addTopPlane(2, -5, -13, 8, 3, stretch) //left bag top .flipZ().addTopPlane(2, -5, -2, 8, 3, stretch) //right bag top .tex(56, 22).flipZ().addBottomPlane(2, 1, -13, 8, 3, stretch) //left bag bottom .flipZ().addBottomPlane(2, 1, -2, 8, 3, stretch) //right bag bottom .rotateAngleY = 4.712389F; apron.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .addBackPlane(-4, -4, -9, 8, 10, stretch); trinket.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .addBackPlane(-2, -4, -9, 4, 5, stretch); } }
src/main/java/com/minelittlepony/model/ponies/ModelVillagerPony.java
package com.minelittlepony.model.ponies; import net.minecraft.entity.Entity; import net.minecraft.entity.monster.EntityZombieVillager; import net.minecraft.entity.passive.EntityVillager; import net.minecraft.util.math.MathHelper; import static com.minelittlepony.model.PonyModelConstants.*; import com.minelittlepony.model.player.ModelAlicorn; import com.minelittlepony.render.plane.PlaneRenderer; public class ModelVillagerPony extends ModelAlicorn { public PlaneRenderer bag, apron, trinket; public ModelVillagerPony() { super(false); } @Override public void setRotationAngles(float move, float swing, float ticks, float headYaw, float headPitch, float scale, Entity entity) { super.setRotationAngles(move, swing, ticks, headYaw, headPitch, scale, entity); float angleY = 0; if (swingProgress > -9990.0F && !canCast()) { angleY = MathHelper.sin(MathHelper.sqrt(swingProgress) * PI * 2) * 0.04F; } bag.rotateAngleY = angleY; apron.rotateAngleY = angleY; trinket.rotateAngleY = angleY; } @Override protected void renderBody(Entity entity, float move, float swing, float ticks, float headYaw, float headPitch, float scale) { super.renderBody(entity, move, swing, ticks, headYaw, headPitch, scale); int profession = getProfession(entity); if (profession > -1) { bipedBody.postRender(this.scale); if (profession < 2) { bag.render(scale); } else if (profession == 2) { trinket.render(scale); } else if (profession > 2) { apron.render(scale); } } } protected int getProfession(Entity entity) { if (entity instanceof EntityVillager) { return ((EntityVillager) entity).getProfession(); } if (entity instanceof EntityZombieVillager) { return ((EntityZombieVillager) entity).getProfession(); } return -1; } @Override protected void initTextures() { super.initTextures(); bag = new PlaneRenderer(this, 56, 19); apron = new PlaneRenderer(this, 56, 16); trinket = new PlaneRenderer(this, 0, 3); } @Override protected void initPositions(float yOffset, float stretch) { super.initPositions(yOffset, stretch); bag.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .tex(56, 25).addBackPlane(-7, -5, -4, 3, 6, stretch) //right bag front .addBackPlane( 4, -5, -4, 3, 6, stretch) //left bag front .tex(59, 25).addBackPlane(-7, -5, 4, 3, 6, stretch) //right bag back .addBackPlane( 4, -5, 4, 3, 6, stretch) //left bag back .tex(56, 29).addWestPlane(-7, -5, -4, 6, 8, stretch) //right bag outside .addWestPlane( 7, -5, -4, 6, 8, stretch) //left bag outside .addWestPlane(-4.01f, -5, -4, 6, 8, stretch) //right bag inside .addWestPlane( 4.01f, -5, -4, 6, 8, stretch) //left bag inside .tex(56, 31) .addTopPlane(-4, -4.5F, -1, 8, 1, stretch) //strap front .addTopPlane(-4, -4.5F, 0, 8, 1, stretch) //strap back .addBackPlane(-4, -4.5F, 0, 8, 1, stretch) .addFrontPlane(-4, -4.5F, 0, 8, 1, stretch) .child(0).tex(56, 16).flipZ().addTopPlane(2, -5, -13, 8, 3, stretch) //left bag top .flipZ().addTopPlane(2, -5, -2, 8, 3, stretch) //right bag top .tex(56, 22).flipZ().addBottomPlane(2, 1, -13, 8, 3, stretch) //left bag bottom .flipZ().addBottomPlane(2, 1, -2, 8, 3, stretch) //right bag bottom .rotateAngleY = 4.712389F; apron.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .addBackPlane(-4, -4, -9, 8, 10, stretch); trinket.offset(BODY_CENTRE_X, BODY_CENTRE_Y, BODY_CENTRE_Z) .around(HEAD_RP_X, HEAD_RP_Y + yOffset, HEAD_RP_Z) .addBackPlane(-2, -4, -9, 4, 5, stretch); } }
Fix texture alignment on saddlebags
src/main/java/com/minelittlepony/model/ponies/ModelVillagerPony.java
Fix texture alignment on saddlebags
<ide><path>rc/main/java/com/minelittlepony/model/ponies/ModelVillagerPony.java <ide> .addBackPlane( 4, -5, -4, 3, 6, stretch) //left bag front <ide> .tex(59, 25).addBackPlane(-7, -5, 4, 3, 6, stretch) //right bag back <ide> .addBackPlane( 4, -5, 4, 3, 6, stretch) //left bag back <del> .tex(56, 29).addWestPlane(-7, -5, -4, 6, 8, stretch) //right bag outside <add> .tex(56, 19).addWestPlane(-7, -5, -4, 6, 8, stretch) //right bag outside <ide> .addWestPlane( 7, -5, -4, 6, 8, stretch) //left bag outside <ide> .addWestPlane(-4.01f, -5, -4, 6, 8, stretch) //right bag inside <ide> .addWestPlane( 4.01f, -5, -4, 6, 8, stretch) //left bag inside
JavaScript
agpl-3.0
710c3d1e886fc103594b670c2c906e861c394843
0
bippum/listen-bot
class Trivia { constructor(questions, categories) { this.questions = questions; this.categories = categories; this.active_questions = {}; this.channels = []; this.hints = {}; this.hlock = {}; this.points = {}; this.streaks = {}; } clean(str) { return str.toString().toLowerCase().replace(/[+\-%s]/g, "").trim(); } notping(author) { return `**${author.username}#${author.discriminator}**`; } get_new_question(old_question, redis, channel, retries = 2) { let cat = this.categories[Math.floor(Math.random() * this.categories.length)]; let fil = this.questions.filter(question => question.category == cat); let res = fil[Math.floor(Math.random() * fil.length)]; let ret = old_question.question != "new" && old_question.question == res.question ? this.get_new_question(old_question) : res; if (redis && channel) { this.hlock[channel] = true; this.active_questions[channel] = ret; this.hints[channel] = ret.answer.replace(/[^+\-%s\. ]/g, "•"); if (ret.name) this.hints[channel] = this.hints[channel].replace(/[s]/g, "•"); redis.set(`trivia:${channel}:hint`, true, () => { redis.expire(`trivia:${channel}:hint`, 10, () => { redis.set(`trivia:${channel}:retries`, retries, () => { this.hlock[channel] = false; }); }); }); // AAAAAAAAAAAAAAAAAAAAAAAAAAAa } return ret; } init(client, channel) { this.channels.push(channel); client.createMessage(channel, "Trivia game started in this channel."); let question = this.get_new_question("new", client.redis, channel); client.createMessage(channel, `**${question.question}** (Hint: ${this.hints[channel]})`); } increment_user(client, user_id, score) { let sql = [ "INSERT INTO public.scores (id, score, streak, banned)", "VALUES ($1, $2, 1, false)", "ON CONFLICT (id) DO", "UPDATE SET score = public.scores.score + (SELECT CASE WHEN banned IS TRUE THEN 1 ELSE EXCLUDED.score END FROM public.scores WHERE id = $1)", "WHERE scores.id = $1;" ].join(" "); client.pg.query({ "text": sql, "values": [user_id, score] }).catch(err => client.helper.log("postgres", err)); } store_streak(client, user_id, streak) { client.pg.query({ "text": "UPDATE scores SET streak = $1 WHERE id = $2 AND streak <= $1;", "values": [streak, user_id] }).catch(err => client.helper.log("postgres", err)); } handle(message, client) { let question = this.active_questions[message.channel.id]; this.points[message.channel.id] = this.points[message.channel.id] || {}; this.points[message.channel.id][message.author.id] = this.points[message.channel.id][message.author.id] || 5; if (this.clean(message.content) == this.clean(question.answer)) { let new_question = this.get_new_question(question, client.redis, message.channel.id); this.increment_user(client, message.author.id, this.points[message.channel.id][message.author.id]); let streakstr = ""; if (!this.streaks[message.channel.id]) { this.streaks[message.channel.id] = { "user": message.author.id, "streak": 0 }; } if (this.streaks[message.channel.id].user == message.author.id) { this.streaks[message.channel.id].streak += 1; streakstr = `${this.notping(message.author)} is on a streak of ${this.streaks[message.channel.id].streak}! `; } else { if (this.streaks[message.channel.id].streak > 2) { streakstr = `${this.notping(message.author)} broke ${this.notping(client.users.get(this.streaks[message.channel.id].user))}'s streak of ${this.streaks[message.channel.id].streak}! `; } this.store_streak(client, this.streaks[message.channel.id].user, this.streaks[message.channel.id].streak); this.streaks[message.channel.id] = { "user": message.author.id, "streak": 1 }; } message.channel.createMessage(`(+${this.points[message.channel.id][message.author.id]}) ${this.notping(message.author)} is correct! The answer was **${question.answer}**. ${streakstr}New question:\n\n**${new_question.question}** (Hint: ${this.hints[message.channel.id]})`); delete this.points[message.channel.id]; } else { let pts = this.points[message.channel.id][message.author.id]; this.points[message.channel.id][message.author.id] = pts > 1 ? pts - 1 : 1; } } replace(str, orig) { if (str.split("•").length === 1) return orig; let index = Math.floor(Math.random() * str.length); if (str.charAt(index) == "•") { return `${str.substr(0, index)}${orig.charAt(index)}${str.substr(index + 1)}`; } else { return this.replace(str, orig); } } keyevent(message, client) { let split_content = message.split(":"); let channel = split_content[1], code = split_content[2]; if (!this.channels.includes(channel)) return; if (code == "hint") { if (!this.hlock[channel]) { let question = this.active_questions[channel]; this.hints[channel] = this.replace(this.hints[channel], question.answer); if (this.hints[channel].length > 10) this.hints[channel] = this.replace(this.hints[channel], question.answer); if (question.answer == this.hints[channel]) { client.redis.get(`trivia:${channel}:retries`, (err, reply) => { if (reply > 0) { let new_question = this.get_new_question(question, client.redis, channel, reply - 1); client.createMessage(channel, `Time's up! The answer was **${question.answer}**. New question:\n\n**${new_question.question}** (Hint: ${this.hints[channel]})`) .catch(err => client.helper.handle("trivia", err)); } else { this.channels.splice(this.channels.indexOf(channel), 1); client.createMessage(channel, `Time's up! The answer was **${question.answer}**. Not enough activity detected in this channel.\nUse \`--trivia start\` to start up a new game.`) .catch(err => client.helper.handle("trivia", err)); client.helper.log("trivia", `${channel}: trivia timed out`); } if (this.streaks[channel]) this.store_streak(client, this.streaks[channel].user, this.streaks[channel].streak); delete this.points[channel]; delete this.streaks[channel]; }); } else { client.redis.set(`trivia:${channel}:hint`, true); client.redis.expire(`trivia:${channel}:hint`, 10); client.createMessage(channel, `Hint: ${this.hints[channel]}`).catch(err => client.helper.handle("trivia", err)); } } else { client.helper.log("trivia", `lock jiggled in ${channel}`); } } } } module.exports = Trivia;
trivia/trivia.js
class Trivia { constructor(questions, categories) { this.questions = questions; this.categories = categories; this.active_questions = {}; this.channels = []; this.hints = {}; this.hlock = {}; this.points = {}; this.streaks = {}; } clean(str) { return str.toString().toLowerCase().replace(/[+\-%s]/g, "").trim(); } notping(author) { return `**${author.username}#${author.discriminator}**`; } get_new_question(old_question, redis, channel, retries = 2) { let cat = this.categories[Math.floor(Math.random() * this.categories.length)]; let fil = this.questions.filter(question => question.category == cat); let res = fil[Math.floor(Math.random() * fil.length)]; let ret = old_question.question != "new" && old_question.question == res.question ? this.get_new_question(old_question) : res; if (redis && channel) { this.hlock[channel] = true; this.active_questions[channel] = ret; this.hints[channel] = ret.answer.replace(/[^+\-%s\. ]/g, "•"); if (ret.name) this.hints[channel] = this.hints[channel].replace(/[s]/g, "•"); redis.set(`trivia:${channel}:hint`, true, () => { redis.expire(`trivia:${channel}:hint`, 10, () => { redis.set(`trivia:${channel}:retries`, retries, () => { this.hlock[channel] = false; }); }); }); // AAAAAAAAAAAAAAAAAAAAAAAAAAAa } return ret; } init(client, channel) { this.channels.push(channel); client.createMessage(channel, "Trivia game started in this channel."); let question = this.get_new_question("new", client.redis, channel); client.createMessage(channel, `**${question.question}** (Hint: ${this.hints[channel]})`); } increment_user(client, user_id, score) { let sql = [ "INSERT INTO public.scores (id, score, streak, banned)", "VALUES ($1, $2, 1, false)", "ON CONFLICT (id) DO", "UPDATE SET score = public.scores.score + (SELECT CASE WHEN banned IS TRUE THEN 1 ELSE EXCLUDED.score END FROM public.scores WHERE id = $1)", "WHERE scores.id = $1;" ].join(" "); client.pg.query({ "text": sql, "values": [user_id, score] }).catch(err => client.helper.log("postgres", err)); } store_streak(client, user_id, streak) { client.pg.query({ "text": "UPDATE scores SET streak = $1 WHERE id = $2 AND streak <= $1;", "values": [streak, user_id] }).catch(err => client.helper.log("postgres", err)); } handle(message, client) { let question = this.active_questions[message.channel.iLDd]; this.points[message.channel.id] = this.points[message.channel.id] || {}; this.points[message.channel.id][message.author.id] = this.points[message.channel.id][message.author.id] || 5; if (this.clean(message.content) == this.clean(question.answer)) { let new_question = this.get_new_question(question, client.redis, message.channel.id); this.increment_user(client, message.author.id, this.points[message.channel.id][message.author.id]); let streakstr = ""; if (!this.streaks[message.channel.id]) { this.streaks[message.channel.id] = { "user": message.author.id, "streak": 0 }; } if (this.streaks[message.channel.id].user == message.author.id) { this.streaks[message.channel.id].streak += 1; streakstr = `${this.notping(message.author)} is on a streak of ${this.streaks[message.channel.id].streak}! `; } else { if (this.streaks[message.channel.id].streak > 2) { streakstr = `${this.notping(message.author)} broke ${this.notping(client.users.get(this.streaks[message.channel.id].user))}'s streak of ${this.streaks[message.channel.id].streak}! `; } this.store_streak(client, this.streaks[message.channel.id].user, this.streaks[message.channel.id].streak); this.streaks[message.channel.id] = { "user": message.author.id, "streak": 1 }; } message.channel.createMessage(`(+${this.points[message.channel.id][message.author.id]}) ${this.notping(message.author)} is correct! The answer was **${question.answer}**. ${streakstr}New question:\n\n**${new_question.question}** (Hint: ${this.hints[message.channel.id]})`); delete this.points[message.channel.id]; } else { let pts = this.points[message.channel.id][message.author.id]; this.points[message.channel.id][message.author.id] = pts > 1 ? pts - 1 : 1; } } replace(str, orig) { if (str.split("•").length === 1) return orig; let index = Math.floor(Math.random() * str.length); if (str.charAt(index) == "•") { return `${str.substr(0, index)}${orig.charAt(index)}${str.substr(index + 1)}`; } else { return this.replace(str, orig); } } keyevent(message, client) { let split_content = message.split(":"); let channel = split_content[1], code = split_content[2]; if (!this.channels.includes(channel)) return; if (code == "hint") { if (!this.hlock[channel]) { let question = this.active_questions[channel]; this.hints[channel] = this.replace(this.hints[channel], question.answer); if (this.hints[channel].length > 10) this.hints[channel] = this.replace(this.hints[channel], question.answer); if (question.answer == this.hints[channel]) { client.redis.get(`trivia:${channel}:retries`, (err, reply) => { if (reply > 0) { let new_question = this.get_new_question(question, client.redis, channel, reply - 1); client.createMessage(channel, `Time's up! The answer was **${question.answer}**. New question:\n\n**${new_question.question}** (Hint: ${this.hints[channel]})`) .catch(err => client.helper.handle("trivia", err)); } else { this.channels.splice(this.channels.indexOf(channel), 1); client.createMessage(channel, `Time's up! The answer was **${question.answer}**. Not enough activity detected in this channel.\nUse \`--trivia start\` to start up a new game.`) .catch(err => client.helper.handle("trivia", err)); client.helper.log("trivia", `${channel}: trivia timed out`); } if (this.streaks[channel]) this.store_streak(client, this.streaks[channel].user, this.streaks[channel].streak); delete this.points[channel]; delete this.streaks[channel]; }); } else { client.redis.set(`trivia:${channel}:hint`, true); client.redis.expire(`trivia:${channel}:hint`, 10); client.createMessage(channel, `Hint: ${this.hints[channel]}`).catch(err => client.helper.handle("trivia", err)); } } else { client.helper.log("trivia", `lock jiggled in ${channel}`); } } } } module.exports = Trivia;
murder me
trivia/trivia.js
murder me
<ide><path>rivia/trivia.js <ide> } <ide> <ide> handle(message, client) { <del> let question = this.active_questions[message.channel.iLDd]; <add> let question = this.active_questions[message.channel.id]; <ide> this.points[message.channel.id] = this.points[message.channel.id] || {}; <ide> this.points[message.channel.id][message.author.id] = this.points[message.channel.id][message.author.id] || 5; <ide> if (this.clean(message.content) == this.clean(question.answer)) {
Java
apache-2.0
5ed1ee9d9e5d97776a04f7926923b7ec2641b899
0
j4velin/SystemAppMover
/* * Copyright 2012 Thomas Hoffmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.j4velin.systemappmover; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.content.Intent; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.TextView; import com.stericson.RootTools.RootTools; import java.io.File; /** * The main activity. * <p/> * All the logic starts in the AppPicker, which is started from the checkForRoot * method if root is available */ public class MoverActivity extends Activity { public final static String SYSTEM_FOLDER_1 = "/system/priv-app/"; public final static String SYSTEM_FOLDER_2 = "/system/app/"; public final static String SYSTEM_DIR_TARGET = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT ? SYSTEM_FOLDER_1 : SYSTEM_FOLDER_2; public static boolean SHOW_SYSTEM_APPS = false; /** * Shows an error dialog with the specified text * * @param text the error text */ void showErrorDialog(final String text) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Error").setMessage(text) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); } catch (Exception e) { e.printStackTrace(); } } }); builder.create().show(); } /** * Shows another warning when enabling the 'show system apps' option */ void showSystemAppWarningDialog(final String text) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Warning").setMessage(text + " Did you make a backup?") .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); } catch (Exception e) { e.printStackTrace(); } } }).setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); showErrorDialog("You should!"); } catch (Exception e) { e.printStackTrace(); } } }); builder.create().show(); } /** * Shows the initial warning dialog */ void showWarningDialog() { final Dialog d = new Dialog(this); d.setTitle("Warning"); d.setCancelable(false); d.setContentView(R.layout.warningdialog); final CheckBox c = (CheckBox) d.findViewById(R.id.c); final Button b = (Button) d.findViewById(R.id.b); c.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { b.setText(checked ? android.R.string.ok : android.R.string.cancel); } }); b.setText(android.R.string.cancel); b.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (c.isChecked()) { getSharedPreferences("settings", MODE_PRIVATE).edit() .putBoolean("warningRead", true).commit(); d.dismiss(); } else { d.dismiss(); finish(); } } }); d.show(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); RootTools.debugMode = false; checkForRoot(); } /** * Uses the RootTools library to check for root and busybox */ private void checkForRoot() { final ProgressDialog progress = ProgressDialog.show(this, "", "Waiting for root access", true); progress.show(); final TextView error = (TextView) findViewById(R.id.error); final Handler h = new Handler(); new Thread(new Runnable() { @Override public void run() { boolean systemlessRoot = new File("/su").exists(); if (!systemlessRoot && !RootTools.isRootAvailable()) { if (progress == null || !progress.isShowing()) return; progress.cancel(); h.post(new Runnable() { @Override public void run() { error.setText( "Your device seems not to be rooted!\nThis app requires root access and does not work without.\n\nClick [here] to uninstall."); // ask user to delete app on non-rooted devices error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(Intent.ACTION_DELETE, Uri.parse("package:de.j4velin.systemappmover"))); } }); } }); return; } final boolean root = systemlessRoot || RootTools.isAccessGiven(); if (progress == null || !progress.isShowing()) return; progress.cancel(); h.post(new Runnable() { @Override public void run() { if (root) { ((CheckBox) findViewById(R.id.root)).setChecked(true); } else { error.setText("No root access granted - click here to recheck"); error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { checkForRoot(); } }); return; } if (new File("/su/xbin/busybox").exists() || RootTools.isBusyboxAvailable()) { CheckBox busyBox = (CheckBox) findViewById(R.id.busybox); busyBox.setChecked(true); busyBox.setText("BusyBox " + RootTools.getBusyBoxVersion()); } else { error.setText("No busybox found!\nClick here to download"); error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { RootTools.offerBusyBox(MoverActivity.this); finish(); } }); } if (root) { new AppPicker(MoverActivity.this).execute(); if (!getSharedPreferences("settings", MODE_PRIVATE) .getBoolean("warningRead", false)) { showWarningDialog(); } error.setText( "Use at your own risk! I won't take responsibility for damages on your device! Make a backup first!"); final CheckBox showSystem = (CheckBox) findViewById(R.id.showsystem); showSystem.setOnCheckedChangeListener( new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged( final CompoundButton buttonView, boolean isChecked) { SHOW_SYSTEM_APPS = isChecked; new AppPicker(MoverActivity.this).execute(); if (isChecked) { String warning = "Moving system apps is NOT recommended and will most definitely damage something on your system when doing so."; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { warning += " On Android 5.0+, this feature is highly experimental and most system apps won\'t ever work again once moved!"; } showSystemAppWarningDialog(warning); } } }); } } } ); } } ). start(); } }
src/main/java/de/j4velin/systemappmover/MoverActivity.java
/* * Copyright 2012 Thomas Hoffmann * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.j4velin.systemappmover; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.content.Intent; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.TextView; import com.stericson.RootTools.RootTools; import java.io.File; /** * The main activity. * <p/> * All the logic starts in the AppPicker, which is started from the checkForRoot * method if root is available */ public class MoverActivity extends Activity { public final static String SYSTEM_FOLDER_1 = "/system/priv-app/"; public final static String SYSTEM_FOLDER_2 = "/system/app/"; public final static String SYSTEM_DIR_TARGET = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT ? SYSTEM_FOLDER_1 : SYSTEM_FOLDER_2; public static boolean SHOW_SYSTEM_APPS = false; /** * Shows an error dialog with the specified text * * @param text the error text */ void showErrorDialog(final String text) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Error").setMessage(text) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); } catch (Exception e) { e.printStackTrace(); } } }); builder.create().show(); } /** * Shows another warning when enabling the 'show system apps' option */ void showSystemAppWarningDialog(final String text) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("Warning").setMessage(text + " Did you make a backup?") .setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); } catch (Exception e) { e.printStackTrace(); } } }).setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, int id) { try { dialog.dismiss(); showErrorDialog("You should!"); } catch (Exception e) { e.printStackTrace(); } } }); builder.create().show(); } /** * Shows the initial warning dialog */ void showWarningDialog() { final Dialog d = new Dialog(this); d.setTitle("Warning"); d.setCancelable(false); d.setContentView(R.layout.warningdialog); final CheckBox c = (CheckBox) d.findViewById(R.id.c); final Button b = (Button) d.findViewById(R.id.b); c.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { b.setText(checked ? android.R.string.ok : android.R.string.cancel); } }); b.setText(android.R.string.cancel); b.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (c.isChecked()) { getSharedPreferences("settings", MODE_PRIVATE).edit() .putBoolean("warningRead", true).commit(); d.dismiss(); } else { d.dismiss(); finish(); } } }); d.show(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); RootTools.debugMode = false; checkForRoot(); } /** * Uses the RootTools library to check for root and busybox */ private void checkForRoot() { final ProgressDialog progress = ProgressDialog.show(this, "", "Waiting for root access", true); progress.show(); final TextView error = (TextView) findViewById(R.id.error); final Handler h = new Handler(); new Thread(new Runnable() { @Override public void run() { boolean systemlessRoot = new File("/su").exists(); if (!systemlessRoot && !RootTools.isRootAvailable()) { if (progress == null || !progress.isShowing()) return; progress.cancel(); h.post(new Runnable() { @Override public void run() { error.setText( "Your device seems not to be rooted!\nThis app requires root access and does not work without.\n\nClick [here] to uninstall."); // ask user to delete app on non-rooted devices error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(Intent.ACTION_DELETE, Uri.parse("package:de.j4velin.systemappmover"))); } }); } }); return; } final boolean root = systemlessRoot || RootTools.isAccessGiven(); if (progress == null || !progress.isShowing()) return; progress.cancel(); h.post(new Runnable() { @Override public void run() { if (root) { ((CheckBox) findViewById(R.id.root)).setChecked(true); } else { error.setText("No root access granted - click here to recheck"); error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { checkForRoot(); } }); return; } if (RootTools.isBusyboxAvailable()) { CheckBox busyBox = (CheckBox) findViewById(R.id.busybox); busyBox.setChecked(true); busyBox.setText("BusyBox " + RootTools.getBusyBoxVersion()); } else { error.setText("No busybox found!\nClick here to download"); error.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { RootTools.offerBusyBox(MoverActivity.this); finish(); } }); } if (root) { new AppPicker(MoverActivity.this).execute(); if (!getSharedPreferences("settings", MODE_PRIVATE) .getBoolean("warningRead", false)) { showWarningDialog(); } error.setText( "Use at your own risk! I won't take responsibility for damages on your device! Make a backup first!"); final CheckBox showSystem = (CheckBox) findViewById(R.id.showsystem); showSystem.setOnCheckedChangeListener( new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(final CompoundButton buttonView, boolean isChecked) { SHOW_SYSTEM_APPS = isChecked; new AppPicker(MoverActivity.this).execute(); if (isChecked) { String warning = "Moving system apps is NOT recommended and will most definitely damage something on your system when doing so."; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { warning += " On Android 5.0+, this feature is highly experimental and most system apps won\'t ever work again once moved!"; } showSystemAppWarningDialog(warning); } } }); } } } ); } } ). start(); } }
look for /su/xbin/busybox binary when searching busybox
src/main/java/de/j4velin/systemappmover/MoverActivity.java
look for /su/xbin/busybox binary when searching busybox
<ide><path>rc/main/java/de/j4velin/systemappmover/MoverActivity.java <ide> return; <ide> } <ide> <del> if (RootTools.isBusyboxAvailable()) { <add> if (new File("/su/xbin/busybox").exists() || <add> RootTools.isBusyboxAvailable()) { <ide> CheckBox busyBox = (CheckBox) findViewById(R.id.busybox); <ide> busyBox.setChecked(true); <ide> busyBox.setText("BusyBox " + RootTools.getBusyBoxVersion()); <ide> showSystem.setOnCheckedChangeListener( <ide> new CompoundButton.OnCheckedChangeListener() { <ide> @Override <del> public void onCheckedChanged(final CompoundButton buttonView, boolean isChecked) { <add> public void onCheckedChanged( <add> final CompoundButton buttonView, <add> boolean isChecked) { <ide> SHOW_SYSTEM_APPS = isChecked; <ide> new AppPicker(MoverActivity.this).execute(); <ide> if (isChecked) {
JavaScript
apache-2.0
a7411368879f0e42aa57fd95e0e3f676b898f199
0
eric-isakson/listenmindfully.org,eric-isakson/listenmindfully.org
// This script will boot app.js with the number of workers // specified in WORKER_COUNT. // // The master will respond to SIGHUP, which will trigger // restarting all the workers and reloading the app. var cluster = require('cluster'); var workerCount = process.env.WORKER_COUNT || 2; // Defines what each worker needs to run // In this case, it's app.js a simple node http app cluster.setupMaster({ exec: 'bin/www' }); // Gets the count of active workers function numWorkers() { return Object.keys(cluster.workers).length; } var stopping = false; // Forks off the workers unless the server is stopping function forkNewWorkers() { if (!stopping) { for (var i = numWorkers(); i < workerCount; i++) { cluster.fork(); } } } // A list of workers queued for a restart var workersToStop = []; // Stops a single worker // Gives 60 seconds after disconnect before SIGTERM function stopWorker(worker) { console.log('stopping', worker.process.pid); worker.disconnect(); var killTimer = setTimeout(function() { worker.kill(); }, 5000); // Ensure we don't stay up just for this setTimeout killTimer.unref(); } // Tell the next worker queued to restart to disconnect // This will allow the process to finish it's work // for 60 seconds before sending SIGTERM function stopNextWorker() { var i = workersToStop.pop(); var worker = cluster.workers[i]; if (worker) stopWorker(worker); } // Stops all the works at once function stopAllWorkers() { stopping = true; console.log('stopping all workers'); for (var id in cluster.workers) { stopWorker(cluster.workers[id]); } } // Worker is now listening on a port // Once it is ready, we can signal the next worker to restart cluster.on('listening', stopNextWorker); // A worker has disconnected either because the process was killed // or we are processing the workersToStop array restarting each process // In either case, we will fork any workers needed cluster.on('disconnect', forkNewWorkers); // HUP signal sent to the master process to start restarting all the workers sequentially process.on('SIGHUP', function() { console.log('restarting all workers'); workersToStop = Object.keys(cluster.workers); stopNextWorker(); }); // Kill all the workers at once process.on('SIGTERM', stopAllWorkers); // Fork off the initial workers forkNewWorkers(); console.log('app master', process.pid, 'booted');
boot.js
// This script will boot app.js with the number of workers // specified in WORKER_COUNT. // // The master will respond to SIGHUP, which will trigger // restarting all the workers and reloading the app. var cluster = require('cluster'); var workerCount = process.env.WORKER_COUNT || 2; // Defines what each worker needs to run // In this case, it's app.js a simple node http app cluster.setupMaster({ exec: 'app.js' }); // Gets the count of active workers function numWorkers() { return Object.keys(cluster.workers).length; } var stopping = false; // Forks off the workers unless the server is stopping function forkNewWorkers() { if (!stopping) { for (var i = numWorkers(); i < workerCount; i++) { cluster.fork(); } } } // A list of workers queued for a restart var workersToStop = []; // Stops a single worker // Gives 60 seconds after disconnect before SIGTERM function stopWorker(worker) { console.log('stopping', worker.process.pid); worker.disconnect(); var killTimer = setTimeout(function() { worker.kill(); }, 5000); // Ensure we don't stay up just for this setTimeout killTimer.unref(); } // Tell the next worker queued to restart to disconnect // This will allow the process to finish it's work // for 60 seconds before sending SIGTERM function stopNextWorker() { var i = workersToStop.pop(); var worker = cluster.workers[i]; if (worker) stopWorker(worker); } // Stops all the works at once function stopAllWorkers() { stopping = true; console.log('stopping all workers'); for (var id in cluster.workers) { stopWorker(cluster.workers[id]); } } // Worker is now listening on a port // Once it is ready, we can signal the next worker to restart cluster.on('listening', stopNextWorker); // A worker has disconnected either because the process was killed // or we are processing the workersToStop array restarting each process // In either case, we will fork any workers needed cluster.on('disconnect', forkNewWorkers); // HUP signal sent to the master process to start restarting all the workers sequentially process.on('SIGHUP', function() { console.log('restarting all workers'); workersToStop = Object.keys(cluster.workers); stopNextWorker(); }); // Kill all the workers at once process.on('SIGTERM', stopAllWorkers); // Fork off the initial workers forkNewWorkers(); console.log('app master', process.pid, 'booted');
Update boot to run the express bin/www script.
boot.js
Update boot to run the express bin/www script.
<ide><path>oot.js <ide> <ide> // Defines what each worker needs to run <ide> // In this case, it's app.js a simple node http app <del>cluster.setupMaster({ exec: 'app.js' }); <add>cluster.setupMaster({ exec: 'bin/www' }); <ide> <ide> // Gets the count of active workers <ide> function numWorkers() { return Object.keys(cluster.workers).length; }
Java
apache-2.0
d2b441972546ea0ec5874b99925060921ab9a0e0
0
ST-DDT/CrazySpawner
package de.st_ddt.crazyspawner.entities; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.command.CommandSender; import org.bukkit.command.ConsoleCommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.*; import org.bukkit.inventory.ItemStack; import org.bukkit.material.Colorable; import org.bukkit.metadata.MetadataValue; import de.st_ddt.crazyplugin.exceptions.CrazyException; import de.st_ddt.crazyspawner.CrazySpawner; import de.st_ddt.crazyspawner.entities.properties.*; import de.st_ddt.crazyutil.ChatHelper; import de.st_ddt.crazyutil.ConfigurationSaveable; import de.st_ddt.crazyutil.EntitySpawner; import de.st_ddt.crazyutil.NamedEntitySpawner; import de.st_ddt.crazyutil.VersionComparator; import de.st_ddt.crazyutil.paramitrisable.NamedEntitySpawnerParamitrisable; import de.st_ddt.crazyutil.paramitrisable.Paramitrisable; import de.st_ddt.crazyutil.paramitrisable.StringParamitrisable; import de.st_ddt.crazyutil.paramitrisable.TabbedParamitrisable; import de.st_ddt.crazyutil.source.Localized; public class CustomEntitySpawner implements NamedEntitySpawner, MetadataValue, ConfigurationSaveable { public final static String METAHEADER = "CustomEntityMeta"; protected final static boolean v146OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.4.6") >= 0; protected final static boolean v150OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.5.0") >= 0; protected final static boolean v161OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.6.1") >= 0; protected final static boolean v162OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.6.2") >= 0; protected final static EntitySpawner[] ENTITYSPAWNER = new EntitySpawner[EntityType.values().length]; @SuppressWarnings("unchecked") protected final static Set<Class<? extends EntityPropertyInterface>>[] ENTITYPROPERTIES = new Set[EntityType.values().length]; static { // Spawner - Default for (final EntityType type : EntityType.values()) if (type.isSpawnable()) registerEntitySpawner(new DefaultSpawner(type)); // Spawner - Fixes registerEntitySpawner(new CenteredSpawner(EntityType.ENDER_CRYSTAL) { @Override public Entity spawn(final Location location) { final Entity entity = super.spawn(location); location.clone().add(0, 1, 0).getBlock().setType(Material.FIRE); location.getBlock().setType(Material.BEDROCK); return entity; } }); registerEntitySpawner(new BasicSpawner(EntityType.DROPPED_ITEM) { private final ItemStack item = new ItemStack(1); @Override public Entity spawn(final Location location) { return location.getWorld().dropItem(location, item); } }); registerEntitySpawner(new ClassSpawner(EntityType.FIREWORK)); registerEntitySpawner(new FallingBlockSpawner()); registerEntitySpawner(new LightningSpawner()); // Add Spawners to NamedEntitySpawnerParamitrisable for (final EntitySpawner spawner : ENTITYSPAWNER) if (spawner != null) if (spawner instanceof NamedEntitySpawner) NamedEntitySpawnerParamitrisable.registerNamedEntitySpawner((NamedEntitySpawner) spawner, spawner.getType().name(), spawner.getType().getName()); // Properties for (final EntityType type : EntityType.values()) ENTITYPROPERTIES[type.ordinal()] = new LinkedHashSet<Class<? extends EntityPropertyInterface>>(); // Properties - VIP required to be first! registerEntityProperty(FallingBlockProperty.class, FallingBlock.class); registerEntityProperty(LightningProperty.class, LightningStrike.class); // Properties - Sorted by EntityInterfaces registerEntityProperty(AgeProperty.class, Ageable.class); registerEntityProperty(BoatProperty.class, Boat.class); registerEntityProperty(ColorableProperty.class, Colorable.class); registerEntityProperty(AlarmProperty.class, Creature.class); registerEntityProperty(DetectionProperty.class, Creature.class); registerEntityProperty(CreeperProperty.class, Creeper.class); if (v146OrLater) registerEntityProperty(HealthProperty.class, LivingEntity.class); registerEntityProperty(EndermanProperty.class, Enderman.class); registerEntityProperty(DespawnProperty.class, Entity.class, LivingEntity.class); registerEntityProperty(BurningProperty.class, Entity.class); registerEntityProperty(InvulnerableProperty.class, Entity.class); registerEntityProperty(VelocityProperty.class, Entity.class); registerEntityProperty(PassengerProperty.class, Entity.class); registerEntityProperty(PeacefulProperty.class, Entity.class); registerEntityProperty(ExperienceOrbProperty.class, ExperienceOrb.class); registerEntityProperty(ExplosiveProperty.class, Explosive.class); registerEntityProperty(FallingBlockExtendedProperty.class, FallingBlock.class); // Fireball required? registerEntityProperty(FireworkProperty.class, Firework.class); // Hanging required? if (v162OrLater) registerEntityProperty(HorseProperty.class, Horse.class); // InventoryHolder required? registerEntityProperty(IronGolemProperty.class, IronGolem.class); registerEntityProperty(AlarmProperty.class, Item.class); registerEntityProperty(DroppedItemProperty.class, Item.class); // ItemFrame required? registerEntityProperty(DamageProperty.class, LivingEntity.class); registerEntityProperty(LivingDespawnProperty.class, LivingEntity.class); registerEntityProperty(EquipmentProperties.class, LivingEntity.class); if (v150OrLater) registerEntityProperty(NameProperty.class, LivingEntity.class); registerEntityProperty(PotionProterty.class, LivingEntity.class); registerEntityProperty(XPProperty.class, LivingEntity.class); // Minecard required? registerEntityProperty(OcelotProperty.class, Ocelot.class); // Painting required? registerEntityProperty(PigProperty.class, Pig.class); registerEntityProperty(PigZombieProperty.class, PigZombie.class); // Projectile required? registerEntityProperty(SheepProperty.class, Sheep.class); registerEntityProperty(SkeletonProperty.class, Skeleton.class); registerEntityProperty(SlimeProperty.class, Slime.class); registerEntityProperty(TameableProperty.class, Tameable.class); // TNTPrimed impossible? registerEntityProperty(VillagerProperty.class, Villager.class); registerEntityProperty(WolfProperty.class, Wolf.class); registerEntityProperty(ZombieProperty.class, Zombie.class); } public static void registerEntitySpawner(final EntitySpawner spawner) { ENTITYSPAWNER[spawner.getType().ordinal()] = spawner; } public static Set<EntityType> getSpawnableEntityTypes() { final Set<EntityType> res = new HashSet<EntityType>(); for (final EntityType type : EntityType.values()) if (ENTITYSPAWNER[type.ordinal()] != null) res.add(type); return res; } public static void registerEntityProperty(final Class<? extends EntityPropertyInterface> propertyClass, final Class<?> targetClass) { for (final EntityType type : EntityType.values()) if (type.getEntityClass() != null && targetClass.isAssignableFrom(type.getEntityClass())) ENTITYPROPERTIES[type.ordinal()].add(propertyClass); } public static void registerEntityProperty(final Class<? extends EntityPropertyInterface> propertyClass, final Class<?> targetClass, final Class<?>... ignoredClasses) { for (final EntityType type : EntityType.values()) if (type.getEntityClass() != null && targetClass.isAssignableFrom(type.getEntityClass())) { for (final Class<?> ignoredClass : ignoredClasses) if (ignoredClass.isAssignableFrom(type.getEntityClass())) return; ENTITYPROPERTIES[type.ordinal()].add(propertyClass); } } protected static List<EntityPropertyInterface> getDefaultEntityProperties(final EntityType type) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.newInstance()); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } protected static List<EntityPropertyInterface> getEntityPropertiesFromConfig(final EntityType type, final ConfigurationSection config) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.getConstructor(ConfigurationSection.class).newInstance(config)); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } protected static List<EntityPropertyInterface> getEntityPropertiesFromParams(final EntityType type, final Map<String, ? extends Paramitrisable> params) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.getConstructor(Map.class).newInstance(params)); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } public static StringParamitrisable getCommandParams(final EntityType type, final Map<String, ? super TabbedParamitrisable> params, final CommandSender sender) { final StringParamitrisable nameParam = new StringParamitrisable(null); params.put("n", nameParam); params.put("name", nameParam); for (final EntityPropertyInterface property : getDefaultEntityProperties(type)) property.getCommandParams(params, sender); return nameParam; } public static int getTotalSpawnableEntityTypeCount() { return getSpawnableEntityTypes().size(); } public static int getTotalPropertiesCount() { return getAllPropertyClasses().size(); } protected static Set<Class<? extends EntityPropertyInterface>> getAllPropertyClasses() { final Set<Class<? extends EntityPropertyInterface>> properties = new HashSet<Class<? extends EntityPropertyInterface>>(); for (final EntityType type : getSpawnableEntityTypes()) properties.addAll(ENTITYPROPERTIES[type.ordinal()]); return properties; } public static int getTotalCommandParamsCount() { final Map<String, Paramitrisable> params = new HashMap<String, Paramitrisable>(); final ConsoleCommandSender console = Bukkit.getConsoleSender(); for (final Class<? extends EntityPropertyInterface> property : getAllPropertyClasses()) try { property.newInstance().getCommandParams(params, console); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("Property: " + property.getSimpleName()); e.printStackTrace(); } return new HashSet<Paramitrisable>(params.values()).size(); } protected final String name; protected final EntityType type; protected final List<EntityPropertyInterface> properties; public CustomEntitySpawner(final EntityType type) { this(type.getName() == null ? type.name() : type.getName(), type); } public CustomEntitySpawner(final String name, final EntityType type) { super(); if (name == null) throw new IllegalArgumentException("Name cannot be null!"); if (name.length() == 0) throw new IllegalArgumentException("Name cannot be empty!"); this.name = name.toUpperCase(); if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = type; this.properties = getDefaultEntityProperties(type); } public CustomEntitySpawner(final ConfigurationSection config) { super(); if (config == null) throw new IllegalArgumentException("Config cannot be null!"); this.name = config.getString("name", "UNNAMED").toUpperCase(); final String typeName = config.getString("type", null); if (typeName == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = EntityType.valueOf(typeName.toUpperCase()); if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.properties = getEntityPropertiesFromConfig(type, config); } public CustomEntitySpawner(final EntityType type, final Map<String, ? extends Paramitrisable> params) { super(); final StringParamitrisable nameParam = (StringParamitrisable) params.get("name"); this.name = nameParam.getValue().toUpperCase(); if (type == null) throw new IllegalArgumentException("EntityType cannot be null!"); this.type = type; this.properties = getEntityPropertiesFromParams(type, params); } /** * Creates a CustomEntitySpawner from args.<br> * This is a helper method for default custom entities. * * @param name * The name of the custom entity. * @param type * The entity type of this spawner. * @param sender * The CommandSender how creates this object. * @param args * The params to create this object. */ public CustomEntitySpawner(final String name, final EntityType type, final CommandSender sender, final String... args) { super(); this.name = name; if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = type; final Map<String, Paramitrisable> params = new HashMap<String, Paramitrisable>(); getCommandParams(type, params, sender); for (final String arg : args) { final String[] split = arg.split(":", 2); final Paramitrisable param = params.get(split[0]); if (param != null) try { param.setParameter(split[1]); } catch (final CrazyException e) { e.printStackTrace(); } } this.properties = getEntityPropertiesFromParams(type, params); } @Override public final String getName() { return name; } @Override public final EntityType getType() { return type; } @Override public final Class<? extends Entity> getEntityClass() { return type.getEntityClass(); } protected EntitySpawner getSpawner() { if (!properties.isEmpty()) { final EntityPropertyInterface property = properties.get(0); if (property instanceof EntitySpawner) return (EntitySpawner) property; } return ENTITYSPAWNER[type.ordinal()]; } public final boolean isSpawnable() { return getSpawner() != null; } @Override public final Entity spawn(final Location location) { final EntitySpawner spawner = getSpawner(); if (spawner == null) return null; final Entity entity = spawner.spawn(location); if (entity == null) return null; apply(entity); return entity; } @Localized({ "CRAZYSPAWNER.ENTITY.PROPERTY.NAME $Name$", "CRAZYSPAWNER.ENTITY.PROPERTY.TYPE $EntityType$" }) public void show(final CommandSender target) { CrazySpawner.getPlugin().sendLocaleMessage("ENTITY.PROPERTY.NAME", target, name); CrazySpawner.getPlugin().sendLocaleMessage("ENTITY.PROPERTY.TYPE", target, type.name()); for (final EntityPropertyInterface property : properties) property.show(target); } /** * Apply all features to the given entity.<br> * EntityType of this Spawner must match the EntityType of the given entity. * * @param entity * The entity the properties should be applied to. */ public final void apply(final Entity entity) { entity.setMetadata(METAHEADER, this); for (final EntityPropertyInterface property : properties) property.apply(entity); } @Override public Collection<? extends Entity> getEntities(final World world) { // EDIT include entity properties or check meta return world.getEntitiesByClass(type.getEntityClass()); } public final StringParamitrisable getCommandParams(final Map<String, ? super TabbedParamitrisable> params, final CommandSender sender) { final StringParamitrisable nameParam = new StringParamitrisable(name); params.put("n", nameParam); params.put("name", nameParam); for (final EntityPropertyInterface property : properties) property.getCommandParams(params, sender); return nameParam; } public final void addEntityProperty(final EntityPropertyInterface property) { if (property == null) return; for (int i = 0; i < properties.size(); i++) if (properties.get(i).getClass().getName().equals(property.getClass().getName())) { properties.set(i, property); return; } properties.add(property); } @Override public void save(final ConfigurationSection config, final String path) { config.set(path + "name", name.toUpperCase()); config.set(path + "type", type.name()); for (final EntityPropertyInterface property : properties) property.save(config, path); } public void dummySave(final ConfigurationSection config, final String path) { config.set(path + "name", "String"); config.set(path + "type", "EntityType"); for (final EntityPropertyInterface property : properties) property.dummySave(config, path); } private abstract static class BasicSpawner implements NamedEntitySpawner { protected final EntityType type; public BasicSpawner(final EntityType type) { this.type = type; } @Override public final EntityType getType() { return type; } @Override public String getName() { return type.getName(); } @Override public final Class<? extends Entity> getEntityClass() { return type.getEntityClass(); } @Override public abstract Entity spawn(Location location); @Override public Collection<? extends Entity> getEntities(final World world) { return world.getEntitiesByClass(type.getEntityClass()); } } private static class DefaultSpawner extends BasicSpawner { public DefaultSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { return location.getWorld().spawnEntity(location, type); } } private static class CenteredSpawner extends DefaultSpawner { public CenteredSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { location.setX(Math.floor(location.getX()) + 0.5); location.setY(Math.floor(location.getY())); location.setZ(Math.floor(location.getZ()) + 0.5); location.setYaw(0); location.setPitch(0); return super.spawn(location); } } private static class ClassSpawner extends DefaultSpawner { public ClassSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { try { return location.getWorld().spawn(location, type.getEntityClass()); } catch (final Exception e) { e.printStackTrace(); return null; } } } public static class FallingBlockSpawner extends DefaultSpawner { protected final Material material; protected final byte data; public FallingBlockSpawner() { super(EntityType.FALLING_BLOCK); this.material = Material.STONE; this.data = 0; } public FallingBlockSpawner(final Material material, final byte data) { super(EntityType.FALLING_BLOCK); if (material == null) throw new IllegalArgumentException("Material cannot be null!"); this.material = material; this.data = data; } @Override public final FallingBlock spawn(final Location location) { try { return location.getWorld().spawnFallingBlock(location, material, data); } catch (final Exception e) { e.printStackTrace(); return null; } } @Override public final Collection<FallingBlock> getEntities(final World world) { final Collection<FallingBlock> entities = world.getEntitiesByClass(FallingBlock.class); final Iterator<FallingBlock> it = entities.iterator(); while (it.hasNext()) if (it.next().getMaterial() != material) it.remove(); return entities; } } public static class LightningSpawner extends DefaultSpawner { protected final boolean effect; public LightningSpawner() { super(EntityType.LIGHTNING); this.effect = false; } public LightningSpawner(final boolean effect) { super(EntityType.LIGHTNING); this.effect = effect; } @Override public final String getName() { return "LIGHTNINGSTRIKE"; } @Override public final LightningStrike spawn(final Location location) { if (effect) return location.getWorld().strikeLightningEffect(location); else return location.getWorld().strikeLightning(location); } @Override public final Collection<LightningStrike> getEntities(final World world) { final Collection<LightningStrike> entities = world.getEntitiesByClass(LightningStrike.class); final Iterator<LightningStrike> it = entities.iterator(); while (it.hasNext()) if (it.next().isEffect() != effect) it.remove(); return entities; } } @Override public boolean equals(final Object obj) { if (obj instanceof CustomEntitySpawner) return name.equals(((CustomEntitySpawner) obj).name); else return false; } @Override public int hashCode() { return name.hashCode(); } @Override public final CustomEntitySpawner value() { return this; } @Override public final int asInt() { return 0; } @Override public final float asFloat() { return 0; } @Override public final double asDouble() { return 0; } @Override public final long asLong() { return 0; } @Override public final short asShort() { return 0; } @Override public final byte asByte() { return 0; } @Override public final boolean asBoolean() { return false; } @Override public final String asString() { return toString(); } @Override public final CrazySpawner getOwningPlugin() { return CrazySpawner.getPlugin(); } @Override public final void invalidate() { } }
src/de/st_ddt/crazyspawner/entities/CustomEntitySpawner.java
package de.st_ddt.crazyspawner.entities; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.bukkit.Bukkit; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.command.CommandSender; import org.bukkit.command.ConsoleCommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.*; import org.bukkit.inventory.ItemStack; import org.bukkit.material.Colorable; import org.bukkit.metadata.MetadataValue; import de.st_ddt.crazyplugin.exceptions.CrazyException; import de.st_ddt.crazyspawner.CrazySpawner; import de.st_ddt.crazyspawner.entities.properties.*; import de.st_ddt.crazyutil.ChatHelper; import de.st_ddt.crazyutil.ConfigurationSaveable; import de.st_ddt.crazyutil.EntitySpawner; import de.st_ddt.crazyutil.NamedEntitySpawner; import de.st_ddt.crazyutil.VersionComparator; import de.st_ddt.crazyutil.paramitrisable.NamedEntitySpawnerParamitrisable; import de.st_ddt.crazyutil.paramitrisable.Paramitrisable; import de.st_ddt.crazyutil.paramitrisable.StringParamitrisable; import de.st_ddt.crazyutil.paramitrisable.TabbedParamitrisable; import de.st_ddt.crazyutil.source.Localized; public class CustomEntitySpawner implements NamedEntitySpawner, MetadataValue, ConfigurationSaveable { public final static String METAHEADER = "CustomEntityMeta"; protected final static boolean v146OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.4.6") >= 0; protected final static boolean v150OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.5.0") >= 0; protected final static boolean v161OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.6.1") >= 0; protected final static boolean v162OrLater = VersionComparator.compareVersions(ChatHelper.getMinecraftVersion(), "1.6.2") >= 0; protected final static EntitySpawner[] ENTITYSPAWNER = new EntitySpawner[EntityType.values().length]; @SuppressWarnings("unchecked") protected final static Set<Class<? extends EntityPropertyInterface>>[] ENTITYPROPERTIES = new Set[EntityType.values().length]; static { // Spawner - Default for (final EntityType type : EntityType.values()) if (type.isSpawnable()) registerEntitySpawner(new DefaultSpawner(type)); // Spawner - Fixes registerEntitySpawner(new CenteredSpawner(EntityType.ENDER_CRYSTAL) { @Override public Entity spawn(final Location location) { final Entity entity = super.spawn(location); location.clone().add(0, 1, 0).getBlock().setType(Material.FIRE); location.getBlock().setType(Material.BEDROCK); return entity; } }); registerEntitySpawner(new BasicSpawner(EntityType.DROPPED_ITEM) { private final ItemStack item = new ItemStack(1); @Override public Entity spawn(final Location location) { return location.getWorld().dropItem(location, item); } }); registerEntitySpawner(new ClassSpawner(EntityType.FIREWORK)); registerEntitySpawner(new FallingBlockSpawner()); registerEntitySpawner(new LightningSpawner()); // Add Spawners to NamedEntitySpawnerParamitrisable for (final EntitySpawner spawner : ENTITYSPAWNER) if (spawner != null) if (spawner instanceof NamedEntitySpawner) NamedEntitySpawnerParamitrisable.registerNamedEntitySpawner((NamedEntitySpawner) spawner, spawner.getType().name(), spawner.getType().getName()); // Properties for (final EntityType type : EntityType.values()) ENTITYPROPERTIES[type.ordinal()] = new LinkedHashSet<Class<? extends EntityPropertyInterface>>(); // Properties - VIP required to be first! registerEntityProperty(FallingBlockProperty.class, FallingBlock.class); registerEntityProperty(LightningProperty.class, LightningStrike.class); // Properties - Sorted by EntityInterfaces registerEntityProperty(AgeProperty.class, Ageable.class); registerEntityProperty(BoatProperty.class, Boat.class); registerEntityProperty(ColorableProperty.class, Colorable.class); registerEntityProperty(AlarmProperty.class, Creature.class); registerEntityProperty(DetectionProperty.class, Creature.class); registerEntityProperty(CreeperProperty.class, Creeper.class); if (v146OrLater) registerEntityProperty(HealthProperty.class, LivingEntity.class); registerEntityProperty(EndermanProperty.class, Enderman.class); registerEntityProperty(DespawnProperty.class, Entity.class, LivingEntity.class); registerEntityProperty(BurningProperty.class, Entity.class); registerEntityProperty(InvulnerableProperty.class, Entity.class); registerEntityProperty(VelocityProperty.class, Entity.class); registerEntityProperty(PassengerProperty.class, Entity.class); registerEntityProperty(PeacefulProperty.class, Entity.class); registerEntityProperty(ExperienceOrbProperty.class, ExperienceOrb.class); registerEntityProperty(ExplosiveProperty.class, Explosive.class); registerEntityProperty(FallingBlockExtendedProperty.class, FallingBlock.class); // Fireball required? registerEntityProperty(FireworkProperty.class, Firework.class); // Hanging required? if (v162OrLater) registerEntityProperty(HorseProperty.class, Horse.class); // InventoryHolder required? registerEntityProperty(IronGolemProperty.class, IronGolem.class); registerEntityProperty(AlarmProperty.class, Item.class); registerEntityProperty(DroppedItemProperty.class, Item.class); // ItemFrame required? registerEntityProperty(DamageProperty.class, LivingEntity.class); registerEntityProperty(LivingDespawnProperty.class, LivingEntity.class); registerEntityProperty(EquipmentProperties.class, LivingEntity.class); if (v150OrLater) registerEntityProperty(NameProperty.class, LivingEntity.class); registerEntityProperty(PotionProterty.class, LivingEntity.class); registerEntityProperty(XPProperty.class, LivingEntity.class); // Minecard required? registerEntityProperty(OcelotProperty.class, Ocelot.class); // Painting required? registerEntityProperty(PigProperty.class, Pig.class); registerEntityProperty(PigZombieProperty.class, PigZombie.class); // Projectile required? registerEntityProperty(SheepProperty.class, Sheep.class); registerEntityProperty(SkeletonProperty.class, Skeleton.class); registerEntityProperty(SlimeProperty.class, Slime.class); registerEntityProperty(TameableProperty.class, Tameable.class); // TNTPrimed impossible? registerEntityProperty(VillagerProperty.class, Villager.class); registerEntityProperty(WolfProperty.class, Wolf.class); registerEntityProperty(ZombieProperty.class, Zombie.class); } public static void registerEntitySpawner(final EntitySpawner spawner) { ENTITYSPAWNER[spawner.getType().ordinal()] = spawner; } public static Set<EntityType> getSpawnableEntityTypes() { final Set<EntityType> res = new HashSet<EntityType>(); for (final EntityType type : EntityType.values()) if (ENTITYSPAWNER[type.ordinal()] != null) res.add(type); return res; } public static void registerEntityProperty(final Class<? extends EntityPropertyInterface> propertyClass, final Class<?> targetClass) { for (final EntityType type : EntityType.values()) if (type.getEntityClass() != null && targetClass.isAssignableFrom(type.getEntityClass())) ENTITYPROPERTIES[type.ordinal()].add(propertyClass); } public static void registerEntityProperty(final Class<? extends EntityPropertyInterface> propertyClass, final Class<?> targetClass, final Class<?>... ignoredClasses) { for (final EntityType type : EntityType.values()) if (type.getEntityClass() != null && targetClass.isAssignableFrom(type.getEntityClass())) { for (final Class<?> ignoredClass : ignoredClasses) if (ignoredClass.isAssignableFrom(type.getEntityClass())) return; ENTITYPROPERTIES[type.ordinal()].add(propertyClass); } } protected static List<EntityPropertyInterface> getDefaultEntityProperties(final EntityType type) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.newInstance()); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } protected static List<EntityPropertyInterface> getEntityPropertiesFromConfig(final EntityType type, final ConfigurationSection config) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.getConstructor(ConfigurationSection.class).newInstance(config)); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } protected static List<EntityPropertyInterface> getEntityPropertiesFromParams(final EntityType type, final Map<String, ? extends Paramitrisable> params) { final Set<Class<? extends EntityPropertyInterface>> properties = ENTITYPROPERTIES[type.ordinal()]; final List<EntityPropertyInterface> res = new ArrayList<EntityPropertyInterface>(properties.size()); for (final Class<? extends EntityPropertyInterface> property : properties) try { res.add(property.getConstructor(Map.class).newInstance(params)); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("EntityType: " + type.name() + ", Property: " + property.getSimpleName()); e.printStackTrace(); } return res; } public static StringParamitrisable getCommandParams(final EntityType type, final Map<String, ? super TabbedParamitrisable> params, final CommandSender sender) { final StringParamitrisable nameParam = new StringParamitrisable(null); params.put("n", nameParam); params.put("name", nameParam); for (final EntityPropertyInterface property : getDefaultEntityProperties(type)) property.getCommandParams(params, sender); return nameParam; } public static int getTotalSpawnableEntityTypeCount() { return getSpawnableEntityTypes().size(); } public static int getTotalPropertiesCount() { return getAllPropertyClasses().size(); } protected static Set<Class<? extends EntityPropertyInterface>> getAllPropertyClasses() { final Set<Class<? extends EntityPropertyInterface>> properties = new HashSet<Class<? extends EntityPropertyInterface>>(); for (final EntityType type : getSpawnableEntityTypes()) properties.addAll(ENTITYPROPERTIES[type.ordinal()]); return properties; } public static int getTotalCommandParamsCount() { final Map<String, Paramitrisable> params = new HashMap<String, Paramitrisable>(); final ConsoleCommandSender console = Bukkit.getConsoleSender(); for (final Class<? extends EntityPropertyInterface> property : getAllPropertyClasses()) try { property.newInstance().getCommandParams(params, console); } catch (final Exception e) { System.err.println("WARNING: Serious Bug detected, please report this!"); System.err.println("Property: " + property.getSimpleName()); e.printStackTrace(); } return new HashSet<Paramitrisable>(params.values()).size(); } protected final String name; protected final EntityType type; protected final List<EntityPropertyInterface> properties; public CustomEntitySpawner(final EntityType type) { this(type.getName() == null ? type.name() : type.getName(), type); } public CustomEntitySpawner(final String name, final EntityType type) { super(); if (name == null) throw new IllegalArgumentException("Name cannot be null!"); if (name.length() == 0) throw new IllegalArgumentException("Name cannot be empty!"); this.name = name.toUpperCase(); if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = type; this.properties = getDefaultEntityProperties(type); } public CustomEntitySpawner(final ConfigurationSection config) { super(); if (config == null) throw new IllegalArgumentException("Config cannot be null!"); this.name = config.getString("name", "UNNAMED").toUpperCase(); final String typeName = config.getString("type", null); if (typeName == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = EntityType.valueOf(typeName.toUpperCase()); if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.properties = getEntityPropertiesFromConfig(type, config); } public CustomEntitySpawner(final EntityType type, final Map<String, ? extends Paramitrisable> params) { super(); final StringParamitrisable nameParam = (StringParamitrisable) params.get("name"); this.name = nameParam.getValue().toUpperCase(); if (type == null) throw new IllegalArgumentException("EntityType cannot be null!"); this.type = type; this.properties = getEntityPropertiesFromParams(type, params); } /** * Creates a CustomEntitySpawner from args.<br> * This is a helper method for default custom entities. * * @param name * The name of the custom entity. * @param type * The entity type of this spawner. * @param sender * The CommandSender how creates this object. * @param args * The params to create this object. */ public CustomEntitySpawner(final String name, final EntityType type, final CommandSender sender, final String... args) { super(); this.name = name; if (type == null) throw new IllegalArgumentException("Type cannot be null!"); this.type = type; final Map<String, Paramitrisable> params = new HashMap<String, Paramitrisable>(); getCommandParams(type, params, sender); for (final String arg : args) { final String[] split = arg.split(":", 2); final Paramitrisable param = params.get(split[0]); if (param != null) try { param.setParameter(split[1]); } catch (final CrazyException e) { e.printStackTrace(); } } this.properties = getEntityPropertiesFromParams(type, params); } @Override public final String getName() { return name; } @Override public final EntityType getType() { return type; } @Override public final Class<? extends Entity> getEntityClass() { return type.getEntityClass(); } protected EntitySpawner getSpawner() { if (!properties.isEmpty()) { final EntityPropertyInterface property = properties.get(0); if (property instanceof EntitySpawner) return (EntitySpawner) property; } return ENTITYSPAWNER[type.ordinal()]; } public final boolean isSpawnable() { return getSpawner() != null; } @Override public final Entity spawn(final Location location) { final EntitySpawner spawner = getSpawner(); if (spawner == null) return null; final Entity entity = spawner.spawn(location); if (entity == null) return null; entity.setMetadata(METAHEADER, this); apply(entity); return entity; } @Localized({ "CRAZYSPAWNER.ENTITY.PROPERTY.NAME $Name$", "CRAZYSPAWNER.ENTITY.PROPERTY.TYPE $EntityType$" }) public void show(final CommandSender target) { CrazySpawner.getPlugin().sendLocaleMessage("ENTITY.PROPERTY.NAME", target, name); CrazySpawner.getPlugin().sendLocaleMessage("ENTITY.PROPERTY.TYPE", target, type.name()); for (final EntityPropertyInterface property : properties) property.show(target); } /** * Apply all features to the given entity.<br> * EntityType of this Spawner must match the EntityType of the given entity. * * @param entity * The entity the properties should be applied to. */ public final void apply(final Entity entity) { for (final EntityPropertyInterface property : properties) property.apply(entity); } @Override public Collection<? extends Entity> getEntities(final World world) { // EDIT include entity properties or check meta return world.getEntitiesByClass(type.getEntityClass()); } public final StringParamitrisable getCommandParams(final Map<String, ? super TabbedParamitrisable> params, final CommandSender sender) { final StringParamitrisable nameParam = new StringParamitrisable(name); params.put("n", nameParam); params.put("name", nameParam); for (final EntityPropertyInterface property : properties) property.getCommandParams(params, sender); return nameParam; } public final void addEntityProperty(final EntityPropertyInterface property) { if (property == null) return; for (int i = 0; i < properties.size(); i++) if (properties.get(i).getClass().getName().equals(property.getClass().getName())) { properties.set(i, property); return; } properties.add(property); } @Override public void save(final ConfigurationSection config, final String path) { config.set(path + "name", name.toUpperCase()); config.set(path + "type", type.name()); for (final EntityPropertyInterface property : properties) property.save(config, path); } public void dummySave(final ConfigurationSection config, final String path) { config.set(path + "name", "String"); config.set(path + "type", "EntityType"); for (final EntityPropertyInterface property : properties) property.dummySave(config, path); } private abstract static class BasicSpawner implements NamedEntitySpawner { protected final EntityType type; public BasicSpawner(final EntityType type) { this.type = type; } @Override public final EntityType getType() { return type; } @Override public String getName() { return type.getName(); } @Override public final Class<? extends Entity> getEntityClass() { return type.getEntityClass(); } @Override public abstract Entity spawn(Location location); @Override public Collection<? extends Entity> getEntities(final World world) { return world.getEntitiesByClass(type.getEntityClass()); } } private static class DefaultSpawner extends BasicSpawner { public DefaultSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { return location.getWorld().spawnEntity(location, type); } } private static class CenteredSpawner extends DefaultSpawner { public CenteredSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { location.setX(Math.floor(location.getX()) + 0.5); location.setY(Math.floor(location.getY())); location.setZ(Math.floor(location.getZ()) + 0.5); location.setYaw(0); location.setPitch(0); return super.spawn(location); } } private static class ClassSpawner extends DefaultSpawner { public ClassSpawner(final EntityType type) { super(type); } @Override public Entity spawn(final Location location) { try { return location.getWorld().spawn(location, type.getEntityClass()); } catch (final Exception e) { e.printStackTrace(); return null; } } } public static class FallingBlockSpawner extends DefaultSpawner { protected final Material material; protected final byte data; public FallingBlockSpawner() { super(EntityType.FALLING_BLOCK); this.material = Material.STONE; this.data = 0; } public FallingBlockSpawner(final Material material, final byte data) { super(EntityType.FALLING_BLOCK); if (material == null) throw new IllegalArgumentException("Material cannot be null!"); this.material = material; this.data = data; } @Override public final FallingBlock spawn(final Location location) { try { return location.getWorld().spawnFallingBlock(location, material, data); } catch (final Exception e) { e.printStackTrace(); return null; } } @Override public final Collection<FallingBlock> getEntities(final World world) { final Collection<FallingBlock> entities = world.getEntitiesByClass(FallingBlock.class); final Iterator<FallingBlock> it = entities.iterator(); while (it.hasNext()) if (it.next().getMaterial() != material) it.remove(); return entities; } } public static class LightningSpawner extends DefaultSpawner { protected final boolean effect; public LightningSpawner() { super(EntityType.LIGHTNING); this.effect = false; } public LightningSpawner(final boolean effect) { super(EntityType.LIGHTNING); this.effect = effect; } @Override public final String getName() { return "LIGHTNINGSTRIKE"; } @Override public final LightningStrike spawn(final Location location) { if (effect) return location.getWorld().strikeLightningEffect(location); else return location.getWorld().strikeLightning(location); } @Override public final Collection<LightningStrike> getEntities(final World world) { final Collection<LightningStrike> entities = world.getEntitiesByClass(LightningStrike.class); final Iterator<LightningStrike> it = entities.iterator(); while (it.hasNext()) if (it.next().isEffect() != effect) it.remove(); return entities; } } @Override public boolean equals(final Object obj) { if (obj instanceof CustomEntitySpawner) return name.equals(((CustomEntitySpawner) obj).name); else return false; } @Override public int hashCode() { return name.hashCode(); } @Override public final CustomEntitySpawner value() { return this; } @Override public final int asInt() { return 0; } @Override public final float asFloat() { return 0; } @Override public final double asDouble() { return 0; } @Override public final long asLong() { return 0; } @Override public final short asShort() { return 0; } @Override public final byte asByte() { return 0; } @Override public final boolean asBoolean() { return false; } @Override public final String asString() { return toString(); } @Override public final CrazySpawner getOwningPlugin() { return CrazySpawner.getPlugin(); } @Override public final void invalidate() { } }
CrazySpawner: also apply Spawner Metadata to applied entities.
src/de/st_ddt/crazyspawner/entities/CustomEntitySpawner.java
CrazySpawner: also apply Spawner Metadata to applied entities.
<ide><path>rc/de/st_ddt/crazyspawner/entities/CustomEntitySpawner.java <ide> final Entity entity = spawner.spawn(location); <ide> if (entity == null) <ide> return null; <del> entity.setMetadata(METAHEADER, this); <ide> apply(entity); <ide> return entity; <ide> } <ide> */ <ide> public final void apply(final Entity entity) <ide> { <add> entity.setMetadata(METAHEADER, this); <ide> for (final EntityPropertyInterface property : properties) <ide> property.apply(entity); <ide> }
Java
bsd-2-clause
d6b1699804af8f65a0703eb0d925d7cafcadd616
0
GandhiCorn/Ryhmae57-OHTU-miniprojekti
package ryhma57.references; import java.util.EnumSet; import ryhma57.backend.BibtexReferenceField; import static ryhma57.backend.BibtexReferenceField.*; public class Book extends Reference { private static EnumSet<BibtexReferenceField> existingFields; private static EnumSet<BibtexReferenceField> requiredFields, optionalFields; static { Book.requiredFields = Reference.createFieldSet(AUTHOR, EDITOR, TITLE, YEAR, PUBLISHER); Book.optionalFields = Reference.createFieldSet(VOLUME, NUMBER, SERIES, ADDRESS, EDITION, MONTH, NOTE); Book.existingFields = Reference.createExistingSet( Book.requiredFields, Book.optionalFields); } public Book() { super(existingFields, requiredFields, "book"); } }
Ryhma57/src/main/java/ryhma57/references/Book.java
package ryhma57.references; import java.util.EnumSet; import ryhma57.backend.BibtexReferenceField; import static ryhma57.backend.BibtexReferenceField.*; public class Book extends Reference { private static EnumSet<BibtexReferenceField> existingFields; private static EnumSet<BibtexReferenceField> requiredFields, optionalFields; static { Book.requiredFields = Reference.createFieldSet(AUTHOR, EDITOR, TITLE, YEAR, PUBLISHER); Book.optionalFields = Reference.createFieldSet(VOLUME, NUMBER, SERIES, ADDRESS, EDITION, MONTH, NOTE); Book.existingFields = Reference.createExistingSet( Book.requiredFields, Book.optionalFields); } public Book(String id, String author, String title, String year, String publisher) { super(existingFields, requiredFields, "book"); setID(id); setField(AUTHOR, author); setField(TITLE, title); setField(YEAR, year); setField(PUBLISHER, publisher); } public Book() { super(existingFields, requiredFields, "book"); } }
Remove unused function.
Ryhma57/src/main/java/ryhma57/references/Book.java
Remove unused function.
<ide><path>yhma57/src/main/java/ryhma57/references/Book.java <ide> Book.requiredFields, Book.optionalFields); <ide> } <ide> <del> public Book(String id, String author, String title, String year, String publisher) { <del> super(existingFields, requiredFields, "book"); <del> setID(id); <del> setField(AUTHOR, author); <del> setField(TITLE, title); <del> setField(YEAR, year); <del> setField(PUBLISHER, publisher); <del> } <ide> public Book() { <ide> super(existingFields, requiredFields, "book"); <ide> }
JavaScript
mit
16bc06d710f3cf481c83349b5258e87b53207b8d
0
tstumpenhusen/JSLoader,tstumpenhusen/JSLoader
define([], function() { return function() { var _config = {}; /** * Splits Selectors from Configurations * @param {object} config * @private */ function _parseConfig(config) { for (var selector in config) { _parseConfigNode(selector, config[selector]); } } /** * Validate Configuration and Call Config Save * @param {string} selector * @param {object} config * @private */ function _parseConfigNode(selector, config) { var configType = Object.prototype.toString.call(config); if (configType === "[object Array]") { for (var index in config) { _parseConfigNode(selector, config[index]); } } else if (configType === "[object Object]" && config.hasOwnProperty("extensions")) { // Validate Callback config.callback = (typeof config.callback === "function" ? config.callback : undefined); _addConfigToSelector(selector, config); } else { console.error("Invalid Configuration given for " + selector + "!", config); } } /** * Saves Configuration in Config-Array for Selector * @param {string} selector * @param {object} config * @private */ function _addConfigToSelector(selector, config) { if (!_config.hasOwnProperty(selector)) { _config[selector] = []; } _config[selector].push(config); } /** * Calls load for all Selectors * @private */ function _loadAll() { for (var index in _config) { _load(index, _config[index]); } } /** * Call loading of extensions for each configuration in selector * @param {string} selector * @param {...object} configArray * @private */ function _load(selector, configArray) { var result = false; for (var index in configArray) { result = _loadIfElementExists(selector, configArray[index], result) if (!result) { break; } } } /** * Uses RequireJs to load Extensions if element exists * @param {string} element * @param {object} config * @param {boolean} elementAlreadyFound * @returns {boolean} true = success | false = element not found, load aborted * @private */ function _loadIfElementExists(element, config, elementAlreadyFound) { if (elementAlreadyFound === true || document.querySelectorAll(element).length >= element.split(",").length) { requirejs(config.extensions, config.callback); return true; } return false; } return { getConfig: function() { return _config; }, /** * Parses and saved Configuration for later loading * @param {object} config * @returns {object} */ addConfig: function(config) { _parseConfig(config); return this; }, /** * Loads all Extensions when Dom is loaded * @see _loadAll */ load: function() { if (document.readyState !== "complete") { window.onload = _loadAll; } else { _loadAll(); } } } }; });
assets/scripts/lib/ExtensionLoader.js
define([], function() { return function() { var _config = {}; /** * Splits Selectors from Configurations * @param {object} config * @private */ function _parseConfig(config) { for (var selector in config) { _parseConfigNode(selector, config[selector]); } } /** * Validate Configuration and Call Config Save * @param {string} selector * @param {object} config * @private */ function _parseConfigNode(selector, config) { var configType = Object.prototype.toString.call(config); if (configType === "[object Array]") { for (var index in config) { _parseConfigNode(selector, config[index]); } } else if (configType === "[object Object]" && config.hasOwnProperty("extensions")) { // Validate Callback config.callback = (typeof config.callback === "function" ? config.callback : undefined); _addConfigToSelector(selector, config); } else { console.error("Invalid Configuration given for " + selector + "!", config); } } /** * Saves Configuration in Config-Array for Selector * @param {string} selector * @param {object} config * @private */ function _addConfigToSelector(selector, config) { if (!_config.hasOwnProperty(selector)) { _config[selector] = []; } _config[selector].push(config); } /** * Calls load for all Selectors * @private */ function _loadAll() { for (var index in _config) { _load(index, _config[index]); } } /** * Call loading of extensions for each configuration in selector * @param {string} selector * @param {...object} configArray * @private */ function _load(selector, configArray) { for (var index in configArray) { if (!_loadIfElementExists(selector, configArray[index])) { break; } } } /** * Uses RequireJs to load Extensions if element exists * @param {string} element * @param {object} config * @returns {boolean} true = success | false = element not found, load aborted * @private */ function _loadIfElementExists(element, config) { if (document.querySelectorAll(element).length >= element.split(",").length) { requirejs(config.extensions, config.callback); return true; } return false; } return { getConfig: function() { return _config; }, /** * Parses and saved Configuration for later loading * @param {object} config * @returns {object} */ addConfig: function(config) { _parseConfig(config); return this; }, /** * Loads all Extensions when Dom is loaded * @see _loadAll */ load: function() { if (document.readyState !== "complete") { window.onload = _loadAll; } else { _loadAll(); } } } }; });
Performance-Update on loading extensions
assets/scripts/lib/ExtensionLoader.js
Performance-Update on loading extensions
<ide><path>ssets/scripts/lib/ExtensionLoader.js <ide> * @private <ide> */ <ide> function _load(selector, configArray) { <add> var result = false; <ide> for (var index in configArray) { <del> if (!_loadIfElementExists(selector, configArray[index])) { <add> result = _loadIfElementExists(selector, configArray[index], result) <add> if (!result) { <ide> break; <ide> } <ide> } <ide> * Uses RequireJs to load Extensions if element exists <ide> * @param {string} element <ide> * @param {object} config <add> * @param {boolean} elementAlreadyFound <ide> * @returns {boolean} true = success | false = element not found, load aborted <ide> * @private <ide> */ <del> function _loadIfElementExists(element, config) { <del> if (document.querySelectorAll(element).length >= element.split(",").length) { <add> function _loadIfElementExists(element, config, elementAlreadyFound) { <add> if (elementAlreadyFound === true || document.querySelectorAll(element).length >= element.split(",").length) { <ide> requirejs(config.extensions, config.callback); <ide> return true; <ide> }
JavaScript
agpl-3.0
7edbe58ecb4d24ade8a1f060dc6738a47ed0e7cd
0
3drepo/3drepo.io,3drepo/3drepo.io,3drepo/3drepo.io,3drepo/3drepo.io,3drepo/3drepo.io,3drepo/3drepo.io
/** * Copyright (C) 2016 3D Repo Ltd * * This program is free software: you can redistribute it and/or modify * it under the issuesPin of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ (function () { "use strict"; angular.module("3drepo") .component( "issuesPin", { controller: IssuesPinCtrl, bindings: { account: "<", project: "<", sendEvent: "&", event: "<", setPin: "&", clearPin: "<" } } ); IssuesPinCtrl.$inject = ["EventService"]; function IssuesPinCtrl (EventService) { var self = this, newPinId = "newPinId", pinDropMode = false; // Init this.setPin({data: null}); /** * Monitor changes to parameters * @param {Object} changes */ this.$onChanges = function (changes) { var data, position = [], normal = [], pickedPos = null, pickedNorm = null; if (changes.hasOwnProperty("event") && (changes.event.currentValue !== null)) { if ((changes.event.currentValue.type === EventService.EVENT.VIEWER.PICK_POINT) && (changes.event.currentValue.value.hasOwnProperty("id")) && pinDropMode) { removePin(); var trans = changes.event.currentValue.value.trans; position = changes.event.currentValue.value.position; normal = changes.event.currentValue.value.normal; if(trans) { position = trans.inverse().multMatrixPnt(position); } data = { id: newPinId, account: self.account, project: self.project, position: position.toGL(), norm: normal.toGL(), selectedObjectId: changes.event.currentValue.value.id, pickedPos: position, pickedNorm: normal, colours: [[1.0, 0.7, 0]] }; self.sendEvent({type: EventService.EVENT.VIEWER.ADD_PIN, value: data}); this.setPin({data: data}); } else if (changes.event.currentValue.type === EventService.EVENT.VIEWER.BACKGROUND_SELECTED) { removePin(); } else if (changes.event.currentValue.type === EventService.EVENT.PIN_DROP_MODE) { pinDropMode = changes.event.currentValue.value; } } if (changes.hasOwnProperty("clearPin") && changes.clearPin.currentValue) { removePin(); } }; /** * Remove pin when component is destroyed */ this.$onDestroy = function () { removePin(); }; function removePin () { self.sendEvent({type: EventService.EVENT.VIEWER.REMOVE_PIN, value: {id: newPinId}}); self.sendEvent({type: EventService.EVENT.VIEWER.HIGHLIGHT_OBJECTS, value: []}); self.setPin({data: null}); } } }());
frontend/issues/js/issuePinComponent.js
/** * Copyright (C) 2016 3D Repo Ltd * * This program is free software: you can redistribute it and/or modify * it under the issuesPin of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ (function () { "use strict"; angular.module("3drepo") .component( "issuesPin", { controller: IssuesPinCtrl, bindings: { account: "<", project: "<", sendEvent: "&", event: "<", setPin: "&", clearPin: "<" } } ); IssuesPinCtrl.$inject = ["EventService"]; function IssuesPinCtrl (EventService) { var self = this, newPinId = "newPinId", pinDropMode = false; // Init this.setPin({data: null}); /** * Monitor changes to parameters * @param {Object} changes */ this.$onChanges = function (changes) { var data, position = [], normal = [], pickedPos = null, pickedNorm = null; if (changes.hasOwnProperty("event") && (changes.event.currentValue !== null)) { if ((changes.event.currentValue.type === EventService.EVENT.VIEWER.PICK_POINT) && (changes.event.currentValue.value.hasOwnProperty("id")) && pinDropMode) { removePin(); var trans = changes.event.currentValue.value.trans; position = changes.event.currentValue.value.position; normal = changes.event.currentValue.value.normal; if(trans) { position = trans.inverse().multMatrixPnt(position); } data = { id: newPinId, account: self.account, project: self.project, position: position.toGL(), norm: normal.toGL(), selectedObjectId: changes.event.currentValue.value.id, pickedPos: position, pickedNorm: normal, colours: [[0.5, 0, 0]] }; self.sendEvent({type: EventService.EVENT.VIEWER.ADD_PIN, value: data}); this.setPin({data: data}); } else if (changes.event.currentValue.type === EventService.EVENT.VIEWER.BACKGROUND_SELECTED) { removePin(); } else if (changes.event.currentValue.type === EventService.EVENT.PIN_DROP_MODE) { pinDropMode = changes.event.currentValue.value; } } if (changes.hasOwnProperty("clearPin") && changes.clearPin.currentValue) { removePin(); } }; /** * Remove pin when component is destroyed */ this.$onDestroy = function () { removePin(); }; function removePin () { self.sendEvent({type: EventService.EVENT.VIEWER.REMOVE_PIN, value: {id: newPinId}}); self.sendEvent({type: EventService.EVENT.VIEWER.HIGHLIGHT_OBJECTS, value: []}); self.setPin({data: null}); } } }());
#267 pin created when creating issue should already be in highlighted colour
frontend/issues/js/issuePinComponent.js
#267 pin created when creating issue should already be in highlighted colour
<ide><path>rontend/issues/js/issuePinComponent.js <ide> selectedObjectId: changes.event.currentValue.value.id, <ide> pickedPos: position, <ide> pickedNorm: normal, <del> colours: [[0.5, 0, 0]] <add> colours: [[1.0, 0.7, 0]] <ide> }; <ide> self.sendEvent({type: EventService.EVENT.VIEWER.ADD_PIN, value: data}); <ide> this.setPin({data: data});
JavaScript
apache-2.0
5678c34f1b130ab512aeae9a003119c1a04e5399
0
Thorium-Sim/thorium,Thorium-Sim/thorium,Thorium-Sim/thorium,Thorium-Sim/thorium
import React, {Fragment} from "react"; import { Button, Row, Col, Card, CardBody, ListGroup, ListGroupItem, } from "helpers/reactstrap"; import {titleCase} from "change-case"; const ProbeEquipment = ({ selectedProbeType, probes, equipment, cancelProbe, prepareProbe, }) => { const [shownDescription, setShownDescription] = React.useState(null); const [savedEquipment, setSavedEquipment] = React.useState(equipment || []); const [selectedEquipment, setSelectedEquipment] = React.useState(null); const [selectedLoadedEquipment, setSelectedLoadedEquipment] = React.useState( null, ); const [selectedScienceType, setSelectedScienceType] = React.useState(null); const addToProbe = () => { let slicedEquipment = savedEquipment.slice(0); const e = selectedEquipment; const type = probes.types.find(p => p.id === selectedProbeType); const used = slicedEquipment.reduce((prev, next) => { return prev + next.count * next.size; }, 0); let eq = slicedEquipment.find(eq => eq.id === e.id); if (eq) { //Update the equipment if (used + eq.size < type.size) { eq.count += 1; } } else { //Add the equipment to the list if (used + e.size <= type.size) { slicedEquipment.push({ id: e.id, name: e.name, description: e.description, size: e.size, count: 1, }); } } setSavedEquipment(slicedEquipment); }; const removeFromProbe = () => { const e = selectedLoadedEquipment; setSavedEquipment(equipment => { return equipment .map(eq => { if (eq.id === e.id) { return {...eq, count: eq.count - 1}; } return eq; }) .filter(eq => eq.count > 0); }); }; const type = probes.types.find(p => p.id === selectedProbeType); const used = savedEquipment.reduce((prev, next) => { return prev + next.count * next.size; }, 0); return ( <Fragment> <Row className="probeEquipment"> {selectedProbeType === "science" && ( <Col sm={3} className="science-probe"> <h3>Configuration Options</h3> <ListGroup style={{height: "20vh", overflowY: "auto"}}> {probes.scienceTypes .concat() .sort((a, b) => { if (a.id > b.id) return 1; if (b.id > a.id) return -1; return 0; }) .map(s => ( <ListGroupItem key={s.id} active={selectedScienceType === s.id} onClick={() => setSelectedScienceType(s.id)} > {titleCase(`${s.name} ${s.type}`)} </ListGroupItem> ))} </ListGroup> <h3>Description</h3> <Card style={{height: "15vh", marginTop: "5px", overflowY: "auto"}}> <CardBody> {selectedScienceType && probes.scienceTypes.find(c => c.id === selectedScienceType) .description} </CardBody> </Card> <h3>Required Equipment</h3> <Card style={{height: "16vh", marginTop: "5px", overflowY: "auto"}}> <CardBody style={{whiteSpace: "pre-line"}}> {selectedScienceType && probes.scienceTypes .find(c => c.id === selectedScienceType) .equipment.map( e => probes.types .find(t => t.id === "science") .availableEquipment.find(q => q.id === e).name, ) .join("\n")} </CardBody> </Card> </Col> )} <Col sm={selectedProbeType === "science" ? 5 : 8}> <Row> <Col sm="12"> <h2>Available Equipment:</h2> <Card> <CardBody> <Row> <Col sm="8"> <strong>Name</strong> </Col> <Col sm="2"> <strong>Size</strong> </Col> <Col sm="2"> <strong>Qty</strong> </Col> </Row> </CardBody> <CardBody className="equipmentList"> {type.availableEquipment.map(e => { const used = equipment.find(eq => eq.id === e.id) || { count: 0, }; return ( <Row key={e.id} onClick={() => { setSelectedEquipment(e); setShownDescription(e.description); }} className="equipmentItem" style={{ backgroundColor: selectedEquipment && e.id === selectedEquipment.id ? "rgba(255,255,0,0.3)" : null, }} > <Col sm="8"> <p>{e.name}</p> </Col> <Col sm="2"> <p>{e.size}</p> </Col> <Col sm="2"> <p>{e.count - used.count}</p> </Col> </Row> ); })} </CardBody> </Card> </Col> </Row> <Row style={{marginTop: "10px"}}> <Col sm={6}> <Button color="danger" block disabled={!selectedLoadedEquipment} onClick={removeFromProbe} > Remove Equipment </Button> </Col> <Col sm={6}> <Button color="success" block disabled={!selectedEquipment} onClick={addToProbe} > Add Equipment </Button> </Col> </Row> <Row style={{marginTop: "20px"}}> <Col sm="12"> <h2>Loaded Equipment:</h2> <Card> <CardBody> <Row> <Col sm="6"> <strong>Name</strong> </Col> <Col sm="3"> <strong>Size</strong> </Col> <Col sm="3"> <strong>Qty</strong> </Col> </Row> </CardBody> <CardBody className="equipmentList"> {savedEquipment.map(e => ( <Row key={e.id} onClick={() => { setSelectedLoadedEquipment(e); setShownDescription(e.description); }} className="equipmentItem" style={{ backgroundColor: selectedLoadedEquipment && e.id === selectedLoadedEquipment.id ? "rgba(255,255,0,0.3)" : null, }} > <Col sm="8"> <p>{e.name}</p> </Col> <Col sm="2"> <p>{e.size}</p> </Col> <Col sm="2"> <p>{e.count}</p> </Col> </Row> ))} </CardBody> </Card> </Col> </Row> </Col> <Col sm="4" className="probe-control-buttons"> <p> <strong>Total Space: {type.size}</strong> </p> <p> <strong>Space Used: {used}</strong> </p> <p> <strong>Space Remaining: {type.size - used}</strong> </p> <Button block color="primary" onClick={() => prepareProbe(savedEquipment)} > Prepare Probe </Button> <Button block color="danger" onClick={cancelProbe}> Cancel Probe </Button> {shownDescription && ( <p className="description">{shownDescription}</p> )} </Col> </Row> </Fragment> ); }; export default ProbeEquipment;
src/components/views/ProbeConstruction/probeEquipment.js
import React, {Fragment} from "react"; import { Button, Row, Col, Card, CardBody, ListGroup, ListGroupItem, } from "helpers/reactstrap"; import {titleCase} from "change-case"; const ProbeEquipment = ({ selectedProbeType, probes, equipment, cancelProbe, prepareProbe, }) => { const [shownDescription, setShownDescription] = React.useState(null); const [savedEquipment, setSavedEquipment] = React.useState(equipment || []); const [selectedEquipment, setSelectedEquipment] = React.useState(null); const [selectedLoadedEquipment, setSelectedLoadedEquipment] = React.useState( null, ); const [selectedScienceType, setSelectedScienceType] = React.useState(null); const addToProbe = () => { let slicedEquipment = savedEquipment.slice(0); const e = selectedEquipment; const type = probes.types.find(p => p.id === selectedProbeType); const used = slicedEquipment.reduce((prev, next) => { return prev + next.count * next.size; }, 0); let eq = slicedEquipment.find(eq => eq.id === e.id); if (eq) { //Update the equipment if (used + eq.size < type.size) { eq.count += 1; } } else { //Add the equipment to the list if (used + e.size <= type.size) { slicedEquipment.push({ id: e.id, name: e.name, description: e.description, size: e.size, count: 1, }); } } setSavedEquipment(slicedEquipment); }; const removeFromProbe = () => { const e = selectedLoadedEquipment; setSavedEquipment(equipment => { return equipment .map(eq => { if (eq.id === e.id) { return {...eq, count: eq.count - 1}; } return eq; }) .filter(eq => eq.count > 0); }); }; const type = probes.types.find(p => p.id === selectedProbeType); const used = equipment.reduce((prev, next) => { return prev + next.count * next.size; }, 0); return ( <Fragment> <Row className="probeEquipment"> {selectedProbeType === "science" && ( <Col sm={3} className="science-probe"> <h3>Configuration Options</h3> <ListGroup style={{height: "20vh", overflowY: "auto"}}> {probes.scienceTypes .concat() .sort((a, b) => { if (a.id > b.id) return 1; if (b.id > a.id) return -1; return 0; }) .map(s => ( <ListGroupItem key={s.id} active={selectedScienceType === s.id} onClick={() => setSelectedScienceType(s.id)} > {titleCase(`${s.name} ${s.type}`)} </ListGroupItem> ))} </ListGroup> <h3>Description</h3> <Card style={{height: "15vh", marginTop: "5px", overflowY: "auto"}}> <CardBody> {selectedScienceType && probes.scienceTypes.find(c => c.id === selectedScienceType) .description} </CardBody> </Card> <h3>Required Equipment</h3> <Card style={{height: "16vh", marginTop: "5px", overflowY: "auto"}}> <CardBody style={{whiteSpace: "pre-line"}}> {selectedScienceType && probes.scienceTypes .find(c => c.id === selectedScienceType) .equipment.map( e => probes.types .find(t => t.id === "science") .availableEquipment.find(q => q.id === e).name, ) .join("\n")} </CardBody> </Card> </Col> )} <Col sm={selectedProbeType === "science" ? 5 : 8}> <Row> <Col sm="12"> <h2>Available Equipment:</h2> <Card> <CardBody> <Row> <Col sm="8"> <strong>Name</strong> </Col> <Col sm="2"> <strong>Size</strong> </Col> <Col sm="2"> <strong>Qty</strong> </Col> </Row> </CardBody> <CardBody className="equipmentList"> {type.availableEquipment.map(e => { const used = equipment.find(eq => eq.id === e.id) || { count: 0, }; return ( <Row key={e.id} onClick={() => { setSelectedEquipment(e); setShownDescription(e.description); }} className="equipmentItem" style={{ backgroundColor: selectedEquipment && e.id === selectedEquipment.id ? "rgba(255,255,0,0.3)" : null, }} > <Col sm="8"> <p>{e.name}</p> </Col> <Col sm="2"> <p>{e.size}</p> </Col> <Col sm="2"> <p>{e.count - used.count}</p> </Col> </Row> ); })} </CardBody> </Card> </Col> </Row> <Row style={{marginTop: "10px"}}> <Col sm={6}> <Button color="danger" block disabled={!selectedLoadedEquipment} onClick={removeFromProbe} > Remove Equipment </Button> </Col> <Col sm={6}> <Button color="success" block disabled={!selectedEquipment} onClick={addToProbe} > Add Equipment </Button> </Col> </Row> <Row style={{marginTop: "20px"}}> <Col sm="12"> <h2>Loaded Equipment:</h2> <Card> <CardBody> <Row> <Col sm="6"> <strong>Name</strong> </Col> <Col sm="3"> <strong>Size</strong> </Col> <Col sm="3"> <strong>Qty</strong> </Col> </Row> </CardBody> <CardBody className="equipmentList"> {savedEquipment.map(e => ( <Row key={e.id} onClick={() => { setSelectedLoadedEquipment(e); setShownDescription(e.description); }} className="equipmentItem" style={{ backgroundColor: selectedLoadedEquipment && e.id === selectedLoadedEquipment.id ? "rgba(255,255,0,0.3)" : null, }} > <Col sm="8"> <p>{e.name}</p> </Col> <Col sm="2"> <p>{e.size}</p> </Col> <Col sm="2"> <p>{e.count}</p> </Col> </Row> ))} </CardBody> </Card> </Col> </Row> </Col> <Col sm="4" className="probe-control-buttons"> <p> <strong>Total Space: {type.size}</strong> </p> <p> <strong>Space Used: {used}</strong> </p> <p> <strong>Space Remaining: {type.size - used}</strong> </p> <Button block color="primary" onClick={() => prepareProbe(savedEquipment)} > Prepare Probe </Button> <Button block color="danger" onClick={cancelProbe}> Cancel Probe </Button> {shownDescription && ( <p className="description">{shownDescription}</p> )} </Col> </Row> </Fragment> ); }; export default ProbeEquipment;
fix(Probes): When adding equipment to a probe, it now shows the correct space used total. Closes #2630
src/components/views/ProbeConstruction/probeEquipment.js
fix(Probes): When adding equipment to a probe, it now shows the correct space used total. Closes #2630
<ide><path>rc/components/views/ProbeConstruction/probeEquipment.js <ide> }; <ide> <ide> const type = probes.types.find(p => p.id === selectedProbeType); <del> const used = equipment.reduce((prev, next) => { <add> const used = savedEquipment.reduce((prev, next) => { <ide> return prev + next.count * next.size; <ide> }, 0); <ide>
Java
apache-2.0
7b157973cd96b40f4cc60e30cea9867709ca8d8c
0
IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service,IHTSDO/OTF-Mapping-Service
package org.ihtsdo.otf.mapping.jpa.algo; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.InputStreamReader; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.ihtsdo.otf.mapping.algo.Algorithm; import org.ihtsdo.otf.mapping.helpers.PfsParameterJpa; import org.ihtsdo.otf.mapping.helpers.SearchResult; import org.ihtsdo.otf.mapping.helpers.SearchResultList; import org.ihtsdo.otf.mapping.jpa.algo.helpers.SimpleMetadataHelper; import org.ihtsdo.otf.mapping.jpa.helpers.LoggerUtility; import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.RootServiceJpa; import org.ihtsdo.otf.mapping.rf2.Concept; import org.ihtsdo.otf.mapping.rf2.Description; import org.ihtsdo.otf.mapping.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.mapping.rf2.jpa.DescriptionJpa; import org.ihtsdo.otf.mapping.services.ContentService; import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility; import org.ihtsdo.otf.mapping.services.helpers.ProgressListener; // TODO: Auto-generated Javadoc /** * The Class SimpleLoaderAlgorithm. */ public class SimpleLoaderAlgorithm extends RootServiceJpa implements Algorithm, AutoCloseable { /** Listeners. */ private List<ProgressListener> listeners = new ArrayList<>(); /** Name of terminology to be loaded. */ private String terminology; /** The input directory. */ private String inputDir; /** Terminology version. */ private String version; /** Metadata counter. */ private int metadataCounter; /** The date format. */ final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); /** The log. */ private static Logger log; /** The log file. */ private File logFile; /** The concept file name. */ final private String CONCEPT_FILE_NAME = "concepts.txt"; /** The parent child file name. */ final private String PARENT_CHILD_FILE_NAME = "parent-child.txt"; /** The concept attributes file name. */ final private String CONCEPT_ATTRIBUTES_FILE_NAME = "concept-attributes.txt"; /** The concept relationships file name. */ final private String CONCEPT_RELATIONSHIPS_FILE_NAME = "concept-relationships.txt"; /** The simple refsets file name. */ final private String SIMPLE_REFSETS_FILE_NAME = "simple-refset-members.txt"; /** The obj ct. */ private int objCt = 1001; Map<String, Integer> terminologyIds = new HashMap<>(); /** * Instantiates a {@link SimpleLoaderAlgorithm} from the specified parameters. * * @param terminology the terminology * @param version the version * @param inputDir the input dir * @param metadataCounter the metadata counter * @throws Exception the exception */ public SimpleLoaderAlgorithm(String terminology, String version, String inputDir, String metadataCounter) throws Exception { super(); this.terminology = terminology; this.version = version; this.inputDir = inputDir; this.metadataCounter = metadataCounter == null ? 1 : Integer.parseInt(metadataCounter); // initialize logger String rootPath = ConfigUtility.getConfigProperties().getProperty("map.principle.source.document.dir"); if (!rootPath.endsWith("/") && !rootPath.endsWith("\\")) { rootPath += "/"; } rootPath += "logs"; File logDirectory = new File(rootPath); if (!logDirectory.exists()) { logDirectory.mkdir(); } logFile = new File(logDirectory, "load_" + terminology + ".log"); LoggerUtility.setConfiguration("load", logFile.getAbsolutePath()); SimpleLoaderAlgorithm.log = LoggerUtility.getLogger("load"); } /** * Compute. * * @throws Exception the exception */ /* see superclass */ @SuppressWarnings("resource") /* see superclass */ @Override public void compute() throws Exception { // clear log before starting process PrintWriter writer = new PrintWriter(logFile); writer.print(""); writer.close(); boolean parChdFileExists = false; boolean conAttrFileExists = false; boolean conRelFileExists = false; boolean simpleRefsetFileExists = false; // Check the input directory File inputDirFile = new File(this.inputDir); if (!inputDirFile.exists()) { throw new Exception("Specified input directory does not exist"); } if (!new File(this.inputDir, CONCEPT_FILE_NAME).exists()) { throw new Exception( "The " + CONCEPT_FILE_NAME + " file of the input directory does not exist"); } if (!new File(this.inputDir, PARENT_CHILD_FILE_NAME).exists()) { log.info("The " + PARENT_CHILD_FILE_NAME + " file of the input directory does not exist. Making default isa relationships to root."); parChdFileExists = false; } else { parChdFileExists = true; } if (new File(this.inputDir, CONCEPT_ATTRIBUTES_FILE_NAME).exists()) { conAttrFileExists = true; } if (new File(this.inputDir, CONCEPT_RELATIONSHIPS_FILE_NAME).exists()) { conRelFileExists = true; } if (new File(this.inputDir, SIMPLE_REFSETS_FILE_NAME).exists()) { simpleRefsetFileExists = true; } log.info("Starting loading simple data"); log.info(" terminology = " + terminology); log.info(" version = " + version); log.info(" inputDir = " + inputDir); try { final ContentService contentService = new ContentServiceJpa(); contentService.setTransactionPerOperation(false); contentService.beginTransaction(); final Date now = new Date(); SimpleMetadataHelper helper = new SimpleMetadataHelper(terminology, version, dateFormat.format(now), contentService); log.info(" Create concept metadata"); helper.setMetadataCounter(metadataCounter); Map<String, Concept> conceptMap = helper.createMetadata(); // Create the root concept log.info(" Create the root concept"); Concept rootConcept = new ConceptJpa(); rootConcept.setTerminologyId("root"); rootConcept.setEffectiveTime(now); // assume active rootConcept.setActive(true); rootConcept.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); rootConcept.setDefinitionStatusId( Long.parseLong(conceptMap.get("defaultDefinitionStatus").getTerminologyId())); rootConcept.setTerminology(terminology); rootConcept.setTerminologyVersion(version); rootConcept.setDefaultPreferredName(terminology + " Root Concept"); final Description rootDesc = new DescriptionJpa(); rootDesc.setTerminologyId("root"); rootDesc.setEffectiveTime(now); rootDesc.setActive(true); rootDesc.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); rootDesc.setTerminology(terminology); rootDesc.setTerminologyVersion(version); rootDesc.setTerm(terminology + " Root Concept"); rootDesc.setConcept(rootConcept); rootDesc.setCaseSignificanceId( Long.valueOf(conceptMap.get("defaultCaseSignificance").getTerminologyId())); rootDesc.setLanguageCode("en"); rootDesc.setTypeId(Long.parseLong(conceptMap.get("preferred").getTerminologyId())); rootConcept.addDescription(rootDesc); rootConcept = contentService.addConcept(rootConcept); conceptMap.put(rootConcept.getTerminologyId(), rootConcept); // // Open the file and process the data // code\tpreferred\t[synonym\t,..] log.info(" Load concepts"); String line; FileInputStream fis = new FileInputStream(new File(inputDir, CONCEPT_FILE_NAME)); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader concepts = new BufferedReader(isr); //final BufferedReader concepts = // new BufferedReader(new FileReader(new File(inputDir, CONCEPT_FILE_NAME))); while ((line = concepts.readLine()) != null) { final String[] fields = parseLine(line); // skip header if (fields[0].equals("code")) { continue; } if (fields.length < 2) { throw new Exception("Unexpected line, not enough fields: " + line); } final String code = fields[0]; final String preferred = fields[1]; Concept concept = new ConceptJpa(); concept.setTerminologyId(code); concept.setEffectiveTime(now); // assume active concept.setActive(true); concept.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); concept.setDefinitionStatusId( Long.parseLong(conceptMap.get("defaultDefinitionStatus").getTerminologyId())); concept.setTerminology(terminology); concept.setTerminologyVersion(version); concept.setDefaultPreferredName(preferred); final Description pref = new DescriptionJpa(); pref.setTerminologyId(objCt++ + ""); pref.setEffectiveTime(now); pref.setActive(true); pref.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); pref.setTerminology(terminology); pref.setTerminologyVersion(version); pref.setTerm(preferred); pref.setConcept(concept); pref.setCaseSignificanceId( Long.valueOf(conceptMap.get("defaultCaseSignificance").getTerminologyId())); pref.setLanguageCode("en"); pref.setTypeId(Long.parseLong(conceptMap.get("preferred").getTerminologyId())); concept.addDescription(pref); for (int i = 2; i < fields.length; i++) { helper.createAttributeValue(concept, Long.parseLong(conceptMap.get("synonym").getTerminologyId()), fields[i], version, objCt++, now); } log.info( " concept = " + concept.getTerminologyId() + ", " + concept.getDefaultPreferredName()); concept = contentService.addConcept(concept); conceptMap.put(concept.getTerminologyId(), concept); concept = contentService.getConcept(concept.getId()); // If no par/chd file, make isa relationships to the root if (!parChdFileExists) { helper.createIsaRelationship(rootConcept, concept, objCt++ + "", terminology, version, dateFormat.format(now)); } } // If there is a par/chd file, need to create all those // relationships now if (parChdFileExists) { log.info(" Load par/chd relationships"); final BufferedReader parentChild = new BufferedReader(new FileReader(new File(inputDir, PARENT_CHILD_FILE_NAME))); while ((line = parentChild.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length != 2) { throw new Exception("Unexpected number of fields: " + fields.length); } final Concept par = conceptMap.get(fields[0]); if (par == null) { throw new Exception("Unable to find parent concept " + line); } final Concept chd = conceptMap.get(fields[1]); if (chd == null) { throw new Exception("Unable to find child concept " + line); } helper.createIsaRelationship(par, chd, objCt++ + "", terminology, version, dateFormat.format(now)); } parentChild.close(); } // Create terminology-specific metadata concept - for any metadata created // with attr/rel/refsets final Concept targetTerminologyMetadataConcept = helper.createNewActiveConcept(terminology + " metadata", conceptMap.get("Metadata")); conceptMap.put(terminology + " metadata", targetTerminologyMetadataConcept); final Concept refsetsConcept = helper.createNewActiveConcept("Refsets", targetTerminologyMetadataConcept); conceptMap.put("Refsets", refsetsConcept); final Concept simpleRefsetsConcept = helper.createNewActiveConcept("Simple refsets", refsetsConcept); conceptMap.put("Simple refsets", simpleRefsetsConcept); final Concept asteriskRefsetConcept = helper.createNewActiveConcept("Asterisk refset", simpleRefsetsConcept); conceptMap.put("Asterisk refset", asteriskRefsetConcept); final Concept daggerRefsetConcept = helper.createNewActiveConcept("Dagger refset", simpleRefsetsConcept); conceptMap.put("Dagger refset", daggerRefsetConcept); // // Relationship types // Concept relationshipTypeConcept = conceptMap.get("relationshipType"); final Concept asteriskToDaggerConcept = helper.createNewActiveConcept("Asterisk to dagger", relationshipTypeConcept); conceptMap.put("Asterisk to dagger", asteriskToDaggerConcept); final Concept daggerToAsteriskConcept = helper.createNewActiveConcept("Dagger to asterisk", relationshipTypeConcept); conceptMap.put("Dagger to asterisk", daggerToAsteriskConcept); final Concept asteriskToAsteriskConcept = helper.createNewActiveConcept("Asterisk to asterisk", relationshipTypeConcept); conceptMap.put("Asterisk to asterisk", asteriskToAsteriskConcept); final Concept daggerToDaggerConcept = helper.createNewActiveConcept("Dagger to dagger", relationshipTypeConcept); conceptMap.put("Dagger to dagger", daggerToDaggerConcept); final Concept referenceConcept = helper.createNewActiveConcept("Reference", relationshipTypeConcept); conceptMap.put("Reference", referenceConcept); // If there is a concept attributes file, need to create all those // attributes now if (conAttrFileExists) { loadConceptAttributes(helper, conceptMap, now); } // If there is a concept relationships file, load now if (conRelFileExists) { loadConceptRelationships(helper, conceptMap, now); for (Concept cpt : conceptMap.values()) { for (Description d : cpt.getDescriptions()) { if (d.getTerm().contains("(") && d.getTerm().contains(")")) { d.setTerm(d.getTerm().substring(0, d.getTerm().lastIndexOf("("))); } } } } // If there is a simple refsets file , load now if (simpleRefsetFileExists) { loadSimpleRefsets(helper, conceptMap, now); } concepts.close(); contentService.commit(); // Tree position computation String isaRelType = conceptMap.get("isa").getTerminologyId(); log.info("Start creating tree positions root, " + isaRelType); contentService.computeTreePositions(terminology, version, isaRelType, "root"); // Clean-up log.info("done ..."); contentService.close(); } catch (Exception e) { e.printStackTrace(); log.error(e.getMessage(), e); throw new Exception("Unexpected exception:", e); } } /** * Load concept attributes. * * @param helper the helper * @param conceptMap the concept map * @param now the now * @throws Exception the exception */ private void loadConceptAttributes(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load concept attributes"); String line; try (FileInputStream fis = new FileInputStream(new File(inputDir, CONCEPT_ATTRIBUTES_FILE_NAME)); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader conAttr = new BufferedReader(isr)) { final Concept targetTerminologyMetadataConcept = conceptMap.get(terminology + " metadata"); while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length == 1) { addConceptAttributesMetadata(helper, targetTerminologyMetadataConcept, conceptMap, fields); } else if (fields.length != 3) { throw new Exception("Unexpected number of fields: " + fields.length); } else { final Concept con = conceptMap.get(fields[0]); if (con == null) { throw new Exception("Unable to find parent concept " + line); } final String attrType = fields[1]; final String value = fields[2]; helper.createAttributeValue(con, Long.parseLong(conceptMap.get(attrType).getTerminologyId()), value, version, objCt++, now); } } } } /** * Adds the concept attributes metadata. * * @param helper the helper * @param parentConcept the parent concept * @param conceptMap the concept map * @param fields the fields * @throws Exception the exception */ private void addConceptAttributesMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } /** * Load concept relationships. * * @param helper the helper * @param conceptMap the concept map * @param now the now * @throws Exception the exception */ private void loadConceptRelationships(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load concept relationships"); String line; try (final BufferedReader conAttr = new BufferedReader(new FileReader(new File(inputDir, CONCEPT_RELATIONSHIPS_FILE_NAME)));) { final Concept targetTerminologyMetadataConcept = conceptMap.get(terminology + " metadata"); while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); /* * if (fields.length == 1) { addConceptRelationshipsMetadata(helper, * targetTerminologyMetadataConcept, conceptMap, fields); } else if * (fields.length != 4) { throw new Exception("Unexpected number of fields: " + * fields.length); } else */ if (fields.length == 4) { final Concept sourceCon = conceptMap.get(fields[0]); if (sourceCon == null) { throw new Exception("Unable to find source concept " + line); } final Concept destinationCon = conceptMap.get(fields[1]); if (destinationCon == null) { //throw new Exception("Unable to find source concept " + line); System.out.println("Unable to find source concept " + fields[0] + " " + fields[1]); continue; } // relationship's terminologyId must reference the terminology of the matching description String terminologyId = ""; Set<Description> descriptions = sourceCon.getDescriptions(); final String type = fields[2]; final String label = fields[3]; for (Description d : descriptions) { if (d.getTerm().contains(label)) { if (terminologyIds.containsKey(d.getTerminologyId())) { terminologyId = d.getTerminologyId() + "~" + ((Integer)terminologyIds.get(d.getTerminologyId())) + 1; terminologyIds.put(d.getTerminologyId(), ((Integer)terminologyIds.get(d.getTerminologyId())) + 1); } else { terminologyId = d.getTerminologyId() + "~1"; terminologyIds.put(d.getTerminologyId(), Integer.parseInt("1")); } } } if (terminologyId.contentEquals("")) { terminologyId = objCt++ + ""; } helper.createRelationship(sourceCon, destinationCon, label, type, terminologyId, terminology, version, dateFormat.format(now)); } } } } /** * Adds the concept relationships metadata. * * @param helper the helper * @param parentConcept the parent concept * @param conceptMap the concept map * @param fields the fields * @throws Exception the exception */ private void addConceptRelationshipsMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } private void loadSimpleRefsets(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load simple refsets and members"); String line; try (final BufferedReader conAttr = new BufferedReader(new FileReader(new File(inputDir, SIMPLE_REFSETS_FILE_NAME)));) { while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length == 2) { final Concept con = conceptMap.get(fields[0]); if (con == null) { throw new Exception("Unable to find concept " + line); } final String refsetName = fields[1]; helper.createRefsetMember(con, refsetName, "" + objCt++, terminology, version, dateFormat.format(now)); } } } } private void addSimpleRefsetMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } /** * Parses the line. * * @param line the line * @return the string[] */ private String[] parseLine(String line) { line = line.replace("\r", ""); return line.indexOf('\t') != -1 ? line.split("\t") : line.split("\\|"); } /** * Adds the progress listener. * * @param l the l */ @Override public void addProgressListener(ProgressListener l) { listeners.add(l); } /** * Removes the progress listener. * * @param l the l */ @Override public void removeProgressListener(ProgressListener l) { listeners.remove(l); } /** * Reset. * * @throws Exception the exception */ @Override public void reset() throws Exception { // n/a } /** * Check preconditions. * * @throws Exception the exception */ @Override public void checkPreconditions() throws Exception { // n/a } /** * Cancel. * * @throws Exception the exception */ @Override public void cancel() throws Exception { // n/a } }
jpa-services/src/main/java/org/ihtsdo/otf/mapping/jpa/algo/SimpleLoaderAlgorithm.java
package org.ihtsdo.otf.mapping.jpa.algo; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.InputStreamReader; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.ihtsdo.otf.mapping.algo.Algorithm; import org.ihtsdo.otf.mapping.helpers.PfsParameterJpa; import org.ihtsdo.otf.mapping.helpers.SearchResult; import org.ihtsdo.otf.mapping.helpers.SearchResultList; import org.ihtsdo.otf.mapping.jpa.algo.helpers.SimpleMetadataHelper; import org.ihtsdo.otf.mapping.jpa.helpers.LoggerUtility; import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa; import org.ihtsdo.otf.mapping.jpa.services.RootServiceJpa; import org.ihtsdo.otf.mapping.rf2.Concept; import org.ihtsdo.otf.mapping.rf2.Description; import org.ihtsdo.otf.mapping.rf2.jpa.ConceptJpa; import org.ihtsdo.otf.mapping.rf2.jpa.DescriptionJpa; import org.ihtsdo.otf.mapping.services.ContentService; import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility; import org.ihtsdo.otf.mapping.services.helpers.ProgressListener; // TODO: Auto-generated Javadoc /** * The Class SimpleLoaderAlgorithm. */ public class SimpleLoaderAlgorithm extends RootServiceJpa implements Algorithm, AutoCloseable { /** Listeners. */ private List<ProgressListener> listeners = new ArrayList<>(); /** Name of terminology to be loaded. */ private String terminology; /** The input directory. */ private String inputDir; /** Terminology version. */ private String version; /** Metadata counter. */ private int metadataCounter; /** The date format. */ final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); /** The log. */ private static Logger log; /** The log file. */ private File logFile; /** The concept file name. */ final private String CONCEPT_FILE_NAME = "concepts.txt"; /** The parent child file name. */ final private String PARENT_CHILD_FILE_NAME = "parent-child.txt"; /** The concept attributes file name. */ final private String CONCEPT_ATTRIBUTES_FILE_NAME = "concept-attributes.txt"; /** The concept relationships file name. */ final private String CONCEPT_RELATIONSHIPS_FILE_NAME = "concept-relationships.txt"; /** The simple refsets file name. */ final private String SIMPLE_REFSETS_FILE_NAME = "simple-refset-members.txt"; /** The obj ct. */ private int objCt = 1001; /** * Instantiates a {@link SimpleLoaderAlgorithm} from the specified parameters. * * @param terminology the terminology * @param version the version * @param inputDir the input dir * @param metadataCounter the metadata counter * @throws Exception the exception */ public SimpleLoaderAlgorithm(String terminology, String version, String inputDir, String metadataCounter) throws Exception { super(); this.terminology = terminology; this.version = version; this.inputDir = inputDir; this.metadataCounter = metadataCounter == null ? 1 : Integer.parseInt(metadataCounter); // initialize logger String rootPath = ConfigUtility.getConfigProperties().getProperty("map.principle.source.document.dir"); if (!rootPath.endsWith("/") && !rootPath.endsWith("\\")) { rootPath += "/"; } rootPath += "logs"; File logDirectory = new File(rootPath); if (!logDirectory.exists()) { logDirectory.mkdir(); } logFile = new File(logDirectory, "load_" + terminology + ".log"); LoggerUtility.setConfiguration("load", logFile.getAbsolutePath()); SimpleLoaderAlgorithm.log = LoggerUtility.getLogger("load"); } /** * Compute. * * @throws Exception the exception */ /* see superclass */ @SuppressWarnings("resource") /* see superclass */ @Override public void compute() throws Exception { // clear log before starting process PrintWriter writer = new PrintWriter(logFile); writer.print(""); writer.close(); boolean parChdFileExists = false; boolean conAttrFileExists = false; boolean conRelFileExists = false; boolean simpleRefsetFileExists = false; // Check the input directory File inputDirFile = new File(this.inputDir); if (!inputDirFile.exists()) { throw new Exception("Specified input directory does not exist"); } if (!new File(this.inputDir, CONCEPT_FILE_NAME).exists()) { throw new Exception( "The " + CONCEPT_FILE_NAME + " file of the input directory does not exist"); } if (!new File(this.inputDir, PARENT_CHILD_FILE_NAME).exists()) { log.info("The " + PARENT_CHILD_FILE_NAME + " file of the input directory does not exist. Making default isa relationships to root."); parChdFileExists = false; } else { parChdFileExists = true; } if (new File(this.inputDir, CONCEPT_ATTRIBUTES_FILE_NAME).exists()) { conAttrFileExists = true; } if (new File(this.inputDir, CONCEPT_RELATIONSHIPS_FILE_NAME).exists()) { conRelFileExists = true; } if (new File(this.inputDir, SIMPLE_REFSETS_FILE_NAME).exists()) { simpleRefsetFileExists = true; } log.info("Starting loading simple data"); log.info(" terminology = " + terminology); log.info(" version = " + version); log.info(" inputDir = " + inputDir); try { final ContentService contentService = new ContentServiceJpa(); contentService.setTransactionPerOperation(false); contentService.beginTransaction(); final Date now = new Date(); SimpleMetadataHelper helper = new SimpleMetadataHelper(terminology, version, dateFormat.format(now), contentService); log.info(" Create concept metadata"); helper.setMetadataCounter(metadataCounter); Map<String, Concept> conceptMap = helper.createMetadata(); // Create the root concept log.info(" Create the root concept"); Concept rootConcept = new ConceptJpa(); rootConcept.setTerminologyId("root"); rootConcept.setEffectiveTime(now); // assume active rootConcept.setActive(true); rootConcept.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); rootConcept.setDefinitionStatusId( Long.parseLong(conceptMap.get("defaultDefinitionStatus").getTerminologyId())); rootConcept.setTerminology(terminology); rootConcept.setTerminologyVersion(version); rootConcept.setDefaultPreferredName(terminology + " Root Concept"); final Description rootDesc = new DescriptionJpa(); rootDesc.setTerminologyId("root"); rootDesc.setEffectiveTime(now); rootDesc.setActive(true); rootDesc.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); rootDesc.setTerminology(terminology); rootDesc.setTerminologyVersion(version); rootDesc.setTerm(terminology + " Root Concept"); rootDesc.setConcept(rootConcept); rootDesc.setCaseSignificanceId( Long.valueOf(conceptMap.get("defaultCaseSignificance").getTerminologyId())); rootDesc.setLanguageCode("en"); rootDesc.setTypeId(Long.parseLong(conceptMap.get("preferred").getTerminologyId())); rootConcept.addDescription(rootDesc); rootConcept = contentService.addConcept(rootConcept); conceptMap.put(rootConcept.getTerminologyId(), rootConcept); // // Open the file and process the data // code\tpreferred\t[synonym\t,..] log.info(" Load concepts"); String line; FileInputStream fis = new FileInputStream(new File(inputDir, CONCEPT_FILE_NAME)); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader concepts = new BufferedReader(isr); //final BufferedReader concepts = // new BufferedReader(new FileReader(new File(inputDir, CONCEPT_FILE_NAME))); while ((line = concepts.readLine()) != null) { final String[] fields = parseLine(line); // skip header if (fields[0].equals("code")) { continue; } if (fields.length < 2) { throw new Exception("Unexpected line, not enough fields: " + line); } final String code = fields[0]; final String preferred = fields[1]; Concept concept = new ConceptJpa(); concept.setTerminologyId(code); concept.setEffectiveTime(now); // assume active concept.setActive(true); concept.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); concept.setDefinitionStatusId( Long.parseLong(conceptMap.get("defaultDefinitionStatus").getTerminologyId())); concept.setTerminology(terminology); concept.setTerminologyVersion(version); concept.setDefaultPreferredName(preferred); final Description pref = new DescriptionJpa(); pref.setTerminologyId(objCt++ + ""); pref.setEffectiveTime(now); pref.setActive(true); pref.setModuleId(Long.parseLong(conceptMap.get("defaultModule").getTerminologyId())); pref.setTerminology(terminology); pref.setTerminologyVersion(version); pref.setTerm(preferred); pref.setConcept(concept); pref.setCaseSignificanceId( Long.valueOf(conceptMap.get("defaultCaseSignificance").getTerminologyId())); pref.setLanguageCode("en"); pref.setTypeId(Long.parseLong(conceptMap.get("preferred").getTerminologyId())); concept.addDescription(pref); for (int i = 2; i < fields.length; i++) { helper.createAttributeValue(concept, Long.parseLong(conceptMap.get("synonym").getTerminologyId()), fields[i], version, objCt++, now); } log.info( " concept = " + concept.getTerminologyId() + ", " + concept.getDefaultPreferredName()); concept = contentService.addConcept(concept); conceptMap.put(concept.getTerminologyId(), concept); concept = contentService.getConcept(concept.getId()); // If no par/chd file, make isa relationships to the root if (!parChdFileExists) { helper.createIsaRelationship(rootConcept, concept, objCt++ + "", terminology, version, dateFormat.format(now)); } } // If there is a par/chd file, need to create all those // relationships now if (parChdFileExists) { log.info(" Load par/chd relationships"); final BufferedReader parentChild = new BufferedReader(new FileReader(new File(inputDir, PARENT_CHILD_FILE_NAME))); while ((line = parentChild.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length != 2) { throw new Exception("Unexpected number of fields: " + fields.length); } final Concept par = conceptMap.get(fields[0]); if (par == null) { throw new Exception("Unable to find parent concept " + line); } final Concept chd = conceptMap.get(fields[1]); if (chd == null) { throw new Exception("Unable to find child concept " + line); } helper.createIsaRelationship(par, chd, objCt++ + "", terminology, version, dateFormat.format(now)); } parentChild.close(); } // Create terminology-specific metadata concept - for any metadata created // with attr/rel/refsets final Concept targetTerminologyMetadataConcept = helper.createNewActiveConcept(terminology + " metadata", conceptMap.get("Metadata")); conceptMap.put(terminology + " metadata", targetTerminologyMetadataConcept); final Concept refsetsConcept = helper.createNewActiveConcept("Refsets", targetTerminologyMetadataConcept); conceptMap.put("Refsets", refsetsConcept); final Concept simpleRefsetsConcept = helper.createNewActiveConcept("Simple refsets", refsetsConcept); conceptMap.put("Simple refsets", simpleRefsetsConcept); final Concept asteriskRefsetConcept = helper.createNewActiveConcept("Asterisk refset", simpleRefsetsConcept); conceptMap.put("Asterisk refset", asteriskRefsetConcept); final Concept daggerRefsetConcept = helper.createNewActiveConcept("Dagger refset", simpleRefsetsConcept); conceptMap.put("Dagger refset", daggerRefsetConcept); // // Relationship types // Concept relationshipTypeConcept = conceptMap.get("relationshipType"); final Concept asteriskToDaggerConcept = helper.createNewActiveConcept("Asterisk to dagger", relationshipTypeConcept); conceptMap.put("Asterisk to dagger", asteriskToDaggerConcept); final Concept daggerToAsteriskConcept = helper.createNewActiveConcept("Dagger to asterisk", relationshipTypeConcept); conceptMap.put("Dagger to asterisk", daggerToAsteriskConcept); final Concept asteriskToAsteriskConcept = helper.createNewActiveConcept("Asterisk to asterisk", relationshipTypeConcept); conceptMap.put("Asterisk to asterisk", asteriskToAsteriskConcept); final Concept daggerToDaggerConcept = helper.createNewActiveConcept("Dagger to dagger", relationshipTypeConcept); conceptMap.put("Dagger to dagger", daggerToDaggerConcept); final Concept referenceConcept = helper.createNewActiveConcept("Reference", relationshipTypeConcept); conceptMap.put("Reference", referenceConcept); // If there is a concept attributes file, need to create all those // attributes now if (conAttrFileExists) { loadConceptAttributes(helper, conceptMap, now); } // If there is a concept relationships file, load now if (conRelFileExists) { loadConceptRelationships(helper, conceptMap, now); for (Concept cpt : conceptMap.values()) { for (Description d : cpt.getDescriptions()) { if (d.getTerm().contains("(") && d.getTerm().contains(")")) { d.setTerm(d.getTerm().substring(0, d.getTerm().lastIndexOf("("))); } } } } // If there is a simple refsets file , load now if (simpleRefsetFileExists) { loadSimpleRefsets(helper, conceptMap, now); } concepts.close(); contentService.commit(); // Tree position computation String isaRelType = conceptMap.get("isa").getTerminologyId(); log.info("Start creating tree positions root, " + isaRelType); contentService.computeTreePositions(terminology, version, isaRelType, "root"); // Clean-up log.info("done ..."); contentService.close(); } catch (Exception e) { e.printStackTrace(); log.error(e.getMessage(), e); throw new Exception("Unexpected exception:", e); } } /** * Load concept attributes. * * @param helper the helper * @param conceptMap the concept map * @param now the now * @throws Exception the exception */ private void loadConceptAttributes(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load concept attributes"); String line; try (FileInputStream fis = new FileInputStream(new File(inputDir, CONCEPT_ATTRIBUTES_FILE_NAME)); InputStreamReader isr = new InputStreamReader(fis, StandardCharsets.UTF_8); BufferedReader conAttr = new BufferedReader(isr)) { final Concept targetTerminologyMetadataConcept = conceptMap.get(terminology + " metadata"); while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length == 1) { addConceptAttributesMetadata(helper, targetTerminologyMetadataConcept, conceptMap, fields); } else if (fields.length != 3) { throw new Exception("Unexpected number of fields: " + fields.length); } else { final Concept con = conceptMap.get(fields[0]); if (con == null) { throw new Exception("Unable to find parent concept " + line); } final String attrType = fields[1]; final String value = fields[2]; helper.createAttributeValue(con, Long.parseLong(conceptMap.get(attrType).getTerminologyId()), value, version, objCt++, now); } } } } /** * Adds the concept attributes metadata. * * @param helper the helper * @param parentConcept the parent concept * @param conceptMap the concept map * @param fields the fields * @throws Exception the exception */ private void addConceptAttributesMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } /** * Load concept relationships. * * @param helper the helper * @param conceptMap the concept map * @param now the now * @throws Exception the exception */ private void loadConceptRelationships(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load concept relationships"); String line; try (final BufferedReader conAttr = new BufferedReader(new FileReader(new File(inputDir, CONCEPT_RELATIONSHIPS_FILE_NAME)));) { final Concept targetTerminologyMetadataConcept = conceptMap.get(terminology + " metadata"); while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); /* * if (fields.length == 1) { addConceptRelationshipsMetadata(helper, * targetTerminologyMetadataConcept, conceptMap, fields); } else if * (fields.length != 4) { throw new Exception("Unexpected number of fields: " + * fields.length); } else */ if (fields.length == 4) { final Concept sourceCon = conceptMap.get(fields[0]); if (sourceCon == null) { throw new Exception("Unable to find source concept " + line); } final Concept destinationCon = conceptMap.get(fields[2]); if (destinationCon == null) { throw new Exception("Unable to find source concept " + line); } // relationship's terminologyId must reference the terminology of the matching description String terminologyId = ""; Set<String> terminologyIds = new HashSet<>(); Set<Description> descriptions = sourceCon.getDescriptions(); final String type = fields[2]; final String label = fields[3]; for (Description d : descriptions) { if (d.getTerm().contains(label)) { if (!terminologyIds.contains(terminologyId)) { terminologyId = d.getTerminologyId() + "~1"; terminologyIds.add(terminologyId); } } } if (terminologyId.contentEquals("")) { terminologyId = objCt++ + ""; } helper.createRelationship(sourceCon, destinationCon, label, type, terminologyId, terminology, version, dateFormat.format(now)); } } } } /** * Adds the concept relationships metadata. * * @param helper the helper * @param parentConcept the parent concept * @param conceptMap the concept map * @param fields the fields * @throws Exception the exception */ private void addConceptRelationshipsMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } private void loadSimpleRefsets(SimpleMetadataHelper helper, Map<String, Concept> conceptMap, Date now) throws Exception { log.info(" Load simple refsets and members"); String line; try (final BufferedReader conAttr = new BufferedReader(new FileReader(new File(inputDir, SIMPLE_REFSETS_FILE_NAME)));) { while ((line = conAttr.readLine()) != null) { final String[] fields = parseLine(line); if (fields.length == 2) { final Concept con = conceptMap.get(fields[0]); if (con == null) { throw new Exception("Unable to find concept " + line); } final String refsetName = fields[1]; helper.createRefsetMember(con, refsetName, "" + objCt++, terminology, version, dateFormat.format(now)); } } } } private void addSimpleRefsetMetadata(SimpleMetadataHelper helper, Concept parentConcept, Map<String, Concept> conceptMap, String[] fields) throws Exception { // Create metadata concept from terminology input file final Concept newConcept = helper.createNewActiveConcept(fields[0], parentConcept); conceptMap.put(fields[0], newConcept); } /** * Parses the line. * * @param line the line * @return the string[] */ private String[] parseLine(String line) { line = line.replace("\r", ""); return line.indexOf('\t') != -1 ? line.split("\t") : line.split("\\|"); } /** * Adds the progress listener. * * @param l the l */ @Override public void addProgressListener(ProgressListener l) { listeners.add(l); } /** * Removes the progress listener. * * @param l the l */ @Override public void removeProgressListener(ProgressListener l) { listeners.remove(l); } /** * Reset. * * @throws Exception the exception */ @Override public void reset() throws Exception { // n/a } /** * Check preconditions. * * @throws Exception the exception */ @Override public void checkPreconditions() throws Exception { // n/a } /** * Cancel. * * @throws Exception the exception */ @Override public void cancel() throws Exception { // n/a } }
Resolve incorrect links.
jpa-services/src/main/java/org/ihtsdo/otf/mapping/jpa/algo/SimpleLoaderAlgorithm.java
Resolve incorrect links.
<ide><path>pa-services/src/main/java/org/ihtsdo/otf/mapping/jpa/algo/SimpleLoaderAlgorithm.java <ide> import java.text.SimpleDateFormat; <ide> import java.util.ArrayList; <ide> import java.util.Date; <add>import java.util.HashMap; <ide> import java.util.HashSet; <ide> import java.util.List; <ide> import java.util.Map; <ide> <ide> /** The obj ct. */ <ide> private int objCt = 1001; <add> <add> <add> Map<String, Integer> terminologyIds = new HashMap<>(); <ide> <ide> /** <ide> * Instantiates a {@link SimpleLoaderAlgorithm} from the specified parameters. <ide> if (sourceCon == null) { <ide> throw new Exception("Unable to find source concept " + line); <ide> } <del> final Concept destinationCon = conceptMap.get(fields[2]); <add> final Concept destinationCon = conceptMap.get(fields[1]); <ide> if (destinationCon == null) { <del> throw new Exception("Unable to find source concept " + line); <add> //throw new Exception("Unable to find source concept " + line); <add> System.out.println("Unable to find source concept " + fields[0] + " " + fields[1]); <add> continue; <ide> } <ide> <ide> // relationship's terminologyId must reference the terminology of the matching description <ide> String terminologyId = ""; <del> Set<String> terminologyIds = new HashSet<>(); <ide> Set<Description> descriptions = sourceCon.getDescriptions(); <ide> final String type = fields[2]; <ide> final String label = fields[3]; <ide> for (Description d : descriptions) { <ide> if (d.getTerm().contains(label)) { <del> if (!terminologyIds.contains(terminologyId)) { <add> if (terminologyIds.containsKey(d.getTerminologyId())) { <add> terminologyId = d.getTerminologyId() + "~" + ((Integer)terminologyIds.get(d.getTerminologyId())) + 1; <add> terminologyIds.put(d.getTerminologyId(), ((Integer)terminologyIds.get(d.getTerminologyId())) + 1); <add> } else { <ide> terminologyId = d.getTerminologyId() + "~1"; <del> terminologyIds.add(terminologyId); <add> terminologyIds.put(d.getTerminologyId(), Integer.parseInt("1")); <ide> } <ide> } <ide> }